diff --git a/.bleep/generated-sources/typo-tester-anorm@jvm213/scripts.GenHardcodedFiles/anorm/testdb/hardcoded/ExecuteReturningSyntax.scala b/.bleep/generated-sources/typo-tester-anorm@jvm213/scripts.GenHardcodedFiles/anorm/testdb/hardcoded/ExecuteReturningSyntax.scala new file mode 100644 index 000000000..6fe6f5460 --- /dev/null +++ b/.bleep/generated-sources/typo-tester-anorm@jvm213/scripts.GenHardcodedFiles/anorm/testdb/hardcoded/ExecuteReturningSyntax.scala @@ -0,0 +1,29 @@ +/** + * File automatically generated by `typo` for its own test suite. + * + * IF YOU CHANGE THIS FILE YOUR CHANGES WILL BE OVERWRITTEN + */ +package anorm +package testdb +package hardcoded + +import java.sql.Connection +import resource.managed + +object ExecuteReturningSyntax { + /* add executeReturning to anorm. it needs to be inside the package, because everything is hidden */ + implicit class Ops(batchSql: BatchSql) { + def executeReturning[T](parser: ResultSetParser[T])(implicit c: Connection): T = + managed(batchSql.getFilledStatement(c, getGeneratedKeys = true))(using StatementResource, statementClassTag).acquireAndGet { ps => + ps.executeBatch() + Sql + .asTry( + parser, + managed(ps.getGeneratedKeys)(using ResultSetResource, resultSetClassTag), + onFirstRow = false, + ColumnAliaser.empty + ) + .get + } + } +} diff --git a/.bleep/generated-sources/typo-tester-anorm@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/compositepk/person/PersonRepo.scala b/.bleep/generated-sources/typo-tester-anorm@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/compositepk/person/PersonRepo.scala index 5e479597f..87d9280d4 100644 --- a/.bleep/generated-sources/typo-tester-anorm@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/compositepk/person/PersonRepo.scala +++ b/.bleep/generated-sources/typo-tester-anorm@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/compositepk/person/PersonRepo.scala @@ -29,4 +29,7 @@ trait PersonRepo { def update(row: PersonRow)(implicit c: Connection): Boolean def updateFieldValues(compositeId: PersonId, fieldValues: List[PersonFieldValue[?]])(implicit c: Connection): Boolean def upsert(unsaved: PersonRow)(implicit c: Connection): PersonRow + def upsertBatch(unsaved: Iterable[PersonRow])(implicit c: Connection): List[PersonRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Iterator[PersonRow], batchSize: Int = 10000)(implicit c: Connection): Int } diff --git a/.bleep/generated-sources/typo-tester-anorm@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/compositepk/person/PersonRepoImpl.scala b/.bleep/generated-sources/typo-tester-anorm@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/compositepk/person/PersonRepoImpl.scala index 41b54c8cd..9adcf1c63 100644 --- a/.bleep/generated-sources/typo-tester-anorm@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/compositepk/person/PersonRepoImpl.scala +++ b/.bleep/generated-sources/typo-tester-anorm@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/compositepk/person/PersonRepoImpl.scala @@ -8,6 +8,7 @@ package hardcoded package compositepk package person +import anorm.BatchSql import anorm.NamedParameter import anorm.ParameterMetaData import anorm.ParameterValue @@ -17,6 +18,7 @@ import anorm.SimpleSql import anorm.SqlStringInterpolation import anorm.ToStatement import java.sql.Connection +import scala.annotation.nowarn import testdb.hardcoded.customtypes.Defaulted import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder @@ -148,4 +150,40 @@ class PersonRepoImpl extends PersonRepo { .executeInsert(PersonRow.rowParser(1).single) } + override def upsertBatch(unsaved: Iterable[PersonRow])(implicit c: Connection): List[PersonRow] = { + def toNamedParameter(row: PersonRow): List[NamedParameter] = List( + NamedParameter("one", ParameterValue(row.one, null, ToStatement.longToStatement)), + NamedParameter("two", ParameterValue(row.two, null, ToStatement.optionToStatement(ToStatement.stringToStatement, ParameterMetaData.StringParameterMetaData))), + NamedParameter("name", ParameterValue(row.name, null, ToStatement.optionToStatement(ToStatement.stringToStatement, ParameterMetaData.StringParameterMetaData))) + ) + unsaved.toList match { + case Nil => Nil + case head :: rest => + new anorm.testdb.hardcoded.ExecuteReturningSyntax.Ops( + BatchSql( + s"""insert into compositepk.person("one", "two", "name") + values ({one}::int8, {two}, {name}) + on conflict ("one", "two") + do update set + "name" = EXCLUDED."name" + returning "one", "two", "name" + """, + toNamedParameter(head), + rest.map(toNamedParameter)* + ) + ).executeReturning(PersonRow.rowParser(1).*) + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[PersonRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + SQL"create temporary table person_TEMP (like compositepk.person) on commit drop".execute(): @nowarn + streamingInsert(s"""copy person_TEMP("one", "two", "name") from stdin""", batchSize, unsaved)(PersonRow.text, c): @nowarn + SQL"""insert into compositepk.person("one", "two", "name") + select * from person_TEMP + on conflict ("one", "two") + do update set + "name" = EXCLUDED."name" + ; + drop table person_TEMP;""".executeUpdate() + } } diff --git a/.bleep/generated-sources/typo-tester-anorm@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/compositepk/person/PersonRepoMock.scala b/.bleep/generated-sources/typo-tester-anorm@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/compositepk/person/PersonRepoMock.scala index 2c1451f0f..12d1979be 100644 --- a/.bleep/generated-sources/typo-tester-anorm@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/compositepk/person/PersonRepoMock.scala +++ b/.bleep/generated-sources/typo-tester-anorm@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/compositepk/person/PersonRepoMock.scala @@ -100,4 +100,17 @@ class PersonRepoMock(toRow: Function1[PersonRowUnsaved, PersonRow], map.put(unsaved.compositeId, unsaved): @nowarn unsaved } + override def upsertBatch(unsaved: Iterable[PersonRow])(implicit c: Connection): List[PersonRow] = { + unsaved.map { row => + map += (row.compositeId -> row) + row + }.toList + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[PersonRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + unsaved.foreach { row => + map += (row.compositeId -> row) + } + unsaved.size + } } diff --git a/.bleep/generated-sources/typo-tester-anorm@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/football_club/FootballClubRepo.scala b/.bleep/generated-sources/typo-tester-anorm@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/football_club/FootballClubRepo.scala index 0f0a9a407..7fba23c57 100644 --- a/.bleep/generated-sources/typo-tester-anorm@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/football_club/FootballClubRepo.scala +++ b/.bleep/generated-sources/typo-tester-anorm@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/football_club/FootballClubRepo.scala @@ -29,4 +29,7 @@ trait FootballClubRepo { def update(row: FootballClubRow)(implicit c: Connection): Boolean def updateFieldValues(id: FootballClubId, fieldValues: List[FootballClubFieldValue[?]])(implicit c: Connection): Boolean def upsert(unsaved: FootballClubRow)(implicit c: Connection): FootballClubRow + def upsertBatch(unsaved: Iterable[FootballClubRow])(implicit c: Connection): List[FootballClubRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Iterator[FootballClubRow], batchSize: Int = 10000)(implicit c: Connection): Int } diff --git a/.bleep/generated-sources/typo-tester-anorm@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/football_club/FootballClubRepoImpl.scala b/.bleep/generated-sources/typo-tester-anorm@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/football_club/FootballClubRepoImpl.scala index 8d8836da5..116c3e040 100644 --- a/.bleep/generated-sources/typo-tester-anorm@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/football_club/FootballClubRepoImpl.scala +++ b/.bleep/generated-sources/typo-tester-anorm@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/football_club/FootballClubRepoImpl.scala @@ -8,6 +8,7 @@ package hardcoded package myschema package football_club +import anorm.BatchSql import anorm.NamedParameter import anorm.ParameterValue import anorm.RowParser @@ -16,6 +17,7 @@ import anorm.SimpleSql import anorm.SqlStringInterpolation import anorm.ToStatement import java.sql.Connection +import scala.annotation.nowarn import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder import typo.dsl.SelectBuilderSql @@ -130,4 +132,39 @@ class FootballClubRepoImpl extends FootballClubRepo { .executeInsert(FootballClubRow.rowParser(1).single) } + override def upsertBatch(unsaved: Iterable[FootballClubRow])(implicit c: Connection): List[FootballClubRow] = { + def toNamedParameter(row: FootballClubRow): List[NamedParameter] = List( + NamedParameter("id", ParameterValue(row.id, null, FootballClubId.toStatement)), + NamedParameter("name", ParameterValue(row.name, null, ToStatement.stringToStatement)) + ) + unsaved.toList match { + case Nil => Nil + case head :: rest => + new anorm.testdb.hardcoded.ExecuteReturningSyntax.Ops( + BatchSql( + s"""insert into myschema.football_club("id", "name") + values ({id}::int8, {name}) + on conflict ("id") + do update set + "name" = EXCLUDED."name" + returning "id", "name" + """, + toNamedParameter(head), + rest.map(toNamedParameter)* + ) + ).executeReturning(FootballClubRow.rowParser(1).*) + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[FootballClubRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + SQL"create temporary table football_club_TEMP (like myschema.football_club) on commit drop".execute(): @nowarn + streamingInsert(s"""copy football_club_TEMP("id", "name") from stdin""", batchSize, unsaved)(FootballClubRow.text, c): @nowarn + SQL"""insert into myschema.football_club("id", "name") + select * from football_club_TEMP + on conflict ("id") + do update set + "name" = EXCLUDED."name" + ; + drop table football_club_TEMP;""".executeUpdate() + } } diff --git a/.bleep/generated-sources/typo-tester-anorm@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/football_club/FootballClubRepoMock.scala b/.bleep/generated-sources/typo-tester-anorm@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/football_club/FootballClubRepoMock.scala index e0fe8f1bd..37d177d06 100644 --- a/.bleep/generated-sources/typo-tester-anorm@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/football_club/FootballClubRepoMock.scala +++ b/.bleep/generated-sources/typo-tester-anorm@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/football_club/FootballClubRepoMock.scala @@ -97,4 +97,17 @@ class FootballClubRepoMock(map: scala.collection.mutable.Map[FootballClubId, Foo map.put(unsaved.id, unsaved): @nowarn unsaved } + override def upsertBatch(unsaved: Iterable[FootballClubRow])(implicit c: Connection): List[FootballClubRow] = { + unsaved.map { row => + map += (row.id -> row) + row + }.toList + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[FootballClubRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + unsaved.foreach { row => + map += (row.id -> row) + } + unsaved.size + } } diff --git a/.bleep/generated-sources/typo-tester-anorm@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/marital_status/MaritalStatusRepo.scala b/.bleep/generated-sources/typo-tester-anorm@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/marital_status/MaritalStatusRepo.scala index cf8ea88c3..0afd9d893 100644 --- a/.bleep/generated-sources/typo-tester-anorm@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/marital_status/MaritalStatusRepo.scala +++ b/.bleep/generated-sources/typo-tester-anorm@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/marital_status/MaritalStatusRepo.scala @@ -27,4 +27,7 @@ trait MaritalStatusRepo { def selectByIdsTracked(ids: Array[MaritalStatusId])(implicit c: Connection): Map[MaritalStatusId, MaritalStatusRow] def update: UpdateBuilder[MaritalStatusFields, MaritalStatusRow] def upsert(unsaved: MaritalStatusRow)(implicit c: Connection): MaritalStatusRow + def upsertBatch(unsaved: Iterable[MaritalStatusRow])(implicit c: Connection): List[MaritalStatusRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Iterator[MaritalStatusRow], batchSize: Int = 10000)(implicit c: Connection): Int } diff --git a/.bleep/generated-sources/typo-tester-anorm@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/marital_status/MaritalStatusRepoImpl.scala b/.bleep/generated-sources/typo-tester-anorm@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/marital_status/MaritalStatusRepoImpl.scala index 9bec11ce6..256f6c94c 100644 --- a/.bleep/generated-sources/typo-tester-anorm@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/marital_status/MaritalStatusRepoImpl.scala +++ b/.bleep/generated-sources/typo-tester-anorm@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/marital_status/MaritalStatusRepoImpl.scala @@ -8,6 +8,7 @@ package hardcoded package myschema package marital_status +import anorm.BatchSql import anorm.NamedParameter import anorm.ParameterValue import anorm.RowParser @@ -15,6 +16,7 @@ import anorm.SQL import anorm.SimpleSql import anorm.SqlStringInterpolation import java.sql.Connection +import scala.annotation.nowarn import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder import typo.dsl.SelectBuilderSql @@ -96,11 +98,42 @@ class MaritalStatusRepoImpl extends MaritalStatusRepo { ${ParameterValue(unsaved.id, null, MaritalStatusId.toStatement)}::int8 ) on conflict ("id") - do update set - + do nothing returning "id" """ .executeInsert(MaritalStatusRow.rowParser(1).single) } + override def upsertBatch(unsaved: Iterable[MaritalStatusRow])(implicit c: Connection): List[MaritalStatusRow] = { + def toNamedParameter(row: MaritalStatusRow): List[NamedParameter] = List( + NamedParameter("id", ParameterValue(row.id, null, MaritalStatusId.toStatement)) + ) + unsaved.toList match { + case Nil => Nil + case head :: rest => + new anorm.testdb.hardcoded.ExecuteReturningSyntax.Ops( + BatchSql( + s"""insert into myschema.marital_status("id") + values ({id}::int8) + on conflict ("id") + do nothing + returning "id" + """, + toNamedParameter(head), + rest.map(toNamedParameter)* + ) + ).executeReturning(MaritalStatusRow.rowParser(1).*) + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[MaritalStatusRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + SQL"create temporary table marital_status_TEMP (like myschema.marital_status) on commit drop".execute(): @nowarn + streamingInsert(s"""copy marital_status_TEMP("id") from stdin""", batchSize, unsaved)(MaritalStatusRow.text, c): @nowarn + SQL"""insert into myschema.marital_status("id") + select * from marital_status_TEMP + on conflict ("id") + do nothing + ; + drop table marital_status_TEMP;""".executeUpdate() + } } diff --git a/.bleep/generated-sources/typo-tester-anorm@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/marital_status/MaritalStatusRepoMock.scala b/.bleep/generated-sources/typo-tester-anorm@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/marital_status/MaritalStatusRepoMock.scala index 9b4c2bb8d..cf8a6121d 100644 --- a/.bleep/generated-sources/typo-tester-anorm@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/marital_status/MaritalStatusRepoMock.scala +++ b/.bleep/generated-sources/typo-tester-anorm@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/marital_status/MaritalStatusRepoMock.scala @@ -72,4 +72,17 @@ class MaritalStatusRepoMock(map: scala.collection.mutable.Map[MaritalStatusId, M map.put(unsaved.id, unsaved): @nowarn unsaved } + override def upsertBatch(unsaved: Iterable[MaritalStatusRow])(implicit c: Connection): List[MaritalStatusRow] = { + unsaved.map { row => + map += (row.id -> row) + row + }.toList + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[MaritalStatusRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + unsaved.foreach { row => + map += (row.id -> row) + } + unsaved.size + } } diff --git a/.bleep/generated-sources/typo-tester-anorm@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/person/PersonRepo.scala b/.bleep/generated-sources/typo-tester-anorm@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/person/PersonRepo.scala index d3c6b5bd6..b8c43ba30 100644 --- a/.bleep/generated-sources/typo-tester-anorm@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/person/PersonRepo.scala +++ b/.bleep/generated-sources/typo-tester-anorm@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/person/PersonRepo.scala @@ -32,4 +32,7 @@ trait PersonRepo { def update(row: PersonRow)(implicit c: Connection): Boolean def updateFieldValues(id: PersonId, fieldValues: List[PersonFieldValue[?]])(implicit c: Connection): Boolean def upsert(unsaved: PersonRow)(implicit c: Connection): PersonRow + def upsertBatch(unsaved: Iterable[PersonRow])(implicit c: Connection): List[PersonRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Iterator[PersonRow], batchSize: Int = 10000)(implicit c: Connection): Int } diff --git a/.bleep/generated-sources/typo-tester-anorm@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/person/PersonRepoImpl.scala b/.bleep/generated-sources/typo-tester-anorm@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/person/PersonRepoImpl.scala index 0afad0da3..ecb767f39 100644 --- a/.bleep/generated-sources/typo-tester-anorm@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/person/PersonRepoImpl.scala +++ b/.bleep/generated-sources/typo-tester-anorm@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/person/PersonRepoImpl.scala @@ -8,6 +8,7 @@ package hardcoded package myschema package person +import anorm.BatchSql import anorm.NamedParameter import anorm.ParameterMetaData import anorm.ParameterValue @@ -17,6 +18,7 @@ import anorm.SimpleSql import anorm.SqlStringInterpolation import anorm.ToStatement import java.sql.Connection +import scala.annotation.nowarn import testdb.hardcoded.customtypes.Defaulted import testdb.hardcoded.myschema.football_club.FootballClubId import testdb.hardcoded.myschema.marital_status.MaritalStatusId @@ -231,4 +233,69 @@ class PersonRepoImpl extends PersonRepo { .executeInsert(PersonRow.rowParser(1).single) } + override def upsertBatch(unsaved: Iterable[PersonRow])(implicit c: Connection): List[PersonRow] = { + def toNamedParameter(row: PersonRow): List[NamedParameter] = List( + NamedParameter("id", ParameterValue(row.id, null, PersonId.toStatement)), + NamedParameter("favourite_football_club_id", ParameterValue(row.favouriteFootballClubId, null, FootballClubId.toStatement)), + NamedParameter("name", ParameterValue(row.name, null, ToStatement.stringToStatement)), + NamedParameter("nick_name", ParameterValue(row.nickName, null, ToStatement.optionToStatement(ToStatement.stringToStatement, ParameterMetaData.StringParameterMetaData))), + NamedParameter("blog_url", ParameterValue(row.blogUrl, null, ToStatement.optionToStatement(ToStatement.stringToStatement, ParameterMetaData.StringParameterMetaData))), + NamedParameter("email", ParameterValue(row.email, null, ToStatement.stringToStatement)), + NamedParameter("phone", ParameterValue(row.phone, null, ToStatement.stringToStatement)), + NamedParameter("likes_pizza", ParameterValue(row.likesPizza, null, ToStatement.booleanToStatement)), + NamedParameter("marital_status_id", ParameterValue(row.maritalStatusId, null, MaritalStatusId.toStatement)), + NamedParameter("work_email", ParameterValue(row.workEmail, null, ToStatement.optionToStatement(ToStatement.stringToStatement, ParameterMetaData.StringParameterMetaData))), + NamedParameter("sector", ParameterValue(row.sector, null, Sector.toStatement)), + NamedParameter("favorite_number", ParameterValue(row.favoriteNumber, null, Number.toStatement)) + ) + unsaved.toList match { + case Nil => Nil + case head :: rest => + new anorm.testdb.hardcoded.ExecuteReturningSyntax.Ops( + BatchSql( + s"""insert into myschema.person("id", "favourite_football_club_id", "name", "nick_name", "blog_url", "email", "phone", "likes_pizza", "marital_status_id", "work_email", "sector", "favorite_number") + values ({id}::int8, {favourite_football_club_id}, {name}, {nick_name}, {blog_url}, {email}, {phone}, {likes_pizza}, {marital_status_id}, {work_email}, {sector}::myschema.sector, {favorite_number}::myschema.number) + on conflict ("id") + do update set + "favourite_football_club_id" = EXCLUDED."favourite_football_club_id", + "name" = EXCLUDED."name", + "nick_name" = EXCLUDED."nick_name", + "blog_url" = EXCLUDED."blog_url", + "email" = EXCLUDED."email", + "phone" = EXCLUDED."phone", + "likes_pizza" = EXCLUDED."likes_pizza", + "marital_status_id" = EXCLUDED."marital_status_id", + "work_email" = EXCLUDED."work_email", + "sector" = EXCLUDED."sector", + "favorite_number" = EXCLUDED."favorite_number" + returning "id", "favourite_football_club_id", "name", "nick_name", "blog_url", "email", "phone", "likes_pizza", "marital_status_id", "work_email", "sector", "favorite_number" + """, + toNamedParameter(head), + rest.map(toNamedParameter)* + ) + ).executeReturning(PersonRow.rowParser(1).*) + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[PersonRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + SQL"create temporary table person_TEMP (like myschema.person) on commit drop".execute(): @nowarn + streamingInsert(s"""copy person_TEMP("id", "favourite_football_club_id", "name", "nick_name", "blog_url", "email", "phone", "likes_pizza", "marital_status_id", "work_email", "sector", "favorite_number") from stdin""", batchSize, unsaved)(PersonRow.text, c): @nowarn + SQL"""insert into myschema.person("id", "favourite_football_club_id", "name", "nick_name", "blog_url", "email", "phone", "likes_pizza", "marital_status_id", "work_email", "sector", "favorite_number") + select * from person_TEMP + on conflict ("id") + do update set + "favourite_football_club_id" = EXCLUDED."favourite_football_club_id", + "name" = EXCLUDED."name", + "nick_name" = EXCLUDED."nick_name", + "blog_url" = EXCLUDED."blog_url", + "email" = EXCLUDED."email", + "phone" = EXCLUDED."phone", + "likes_pizza" = EXCLUDED."likes_pizza", + "marital_status_id" = EXCLUDED."marital_status_id", + "work_email" = EXCLUDED."work_email", + "sector" = EXCLUDED."sector", + "favorite_number" = EXCLUDED."favorite_number" + ; + drop table person_TEMP;""".executeUpdate() + } } diff --git a/.bleep/generated-sources/typo-tester-anorm@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/person/PersonRepoMock.scala b/.bleep/generated-sources/typo-tester-anorm@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/person/PersonRepoMock.scala index f74fcf88e..bb9994c80 100644 --- a/.bleep/generated-sources/typo-tester-anorm@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/person/PersonRepoMock.scala +++ b/.bleep/generated-sources/typo-tester-anorm@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/person/PersonRepoMock.scala @@ -129,4 +129,17 @@ class PersonRepoMock(toRow: Function1[PersonRowUnsaved, PersonRow], map.put(unsaved.id, unsaved): @nowarn unsaved } + override def upsertBatch(unsaved: Iterable[PersonRow])(implicit c: Connection): List[PersonRow] = { + unsaved.map { row => + map += (row.id -> row) + row + }.toList + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[PersonRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + unsaved.foreach { row => + map += (row.id -> row) + } + unsaved.size + } } diff --git a/.bleep/generated-sources/typo-tester-anorm@jvm3/scripts.GenHardcodedFiles/anorm/testdb/hardcoded/ExecuteReturningSyntax.scala b/.bleep/generated-sources/typo-tester-anorm@jvm3/scripts.GenHardcodedFiles/anorm/testdb/hardcoded/ExecuteReturningSyntax.scala new file mode 100644 index 000000000..6fe6f5460 --- /dev/null +++ b/.bleep/generated-sources/typo-tester-anorm@jvm3/scripts.GenHardcodedFiles/anorm/testdb/hardcoded/ExecuteReturningSyntax.scala @@ -0,0 +1,29 @@ +/** + * File automatically generated by `typo` for its own test suite. + * + * IF YOU CHANGE THIS FILE YOUR CHANGES WILL BE OVERWRITTEN + */ +package anorm +package testdb +package hardcoded + +import java.sql.Connection +import resource.managed + +object ExecuteReturningSyntax { + /* add executeReturning to anorm. it needs to be inside the package, because everything is hidden */ + implicit class Ops(batchSql: BatchSql) { + def executeReturning[T](parser: ResultSetParser[T])(implicit c: Connection): T = + managed(batchSql.getFilledStatement(c, getGeneratedKeys = true))(using StatementResource, statementClassTag).acquireAndGet { ps => + ps.executeBatch() + Sql + .asTry( + parser, + managed(ps.getGeneratedKeys)(using ResultSetResource, resultSetClassTag), + onFirstRow = false, + ColumnAliaser.empty + ) + .get + } + } +} diff --git a/.bleep/generated-sources/typo-tester-anorm@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/compositepk/person/PersonRepo.scala b/.bleep/generated-sources/typo-tester-anorm@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/compositepk/person/PersonRepo.scala index 5e479597f..87d9280d4 100644 --- a/.bleep/generated-sources/typo-tester-anorm@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/compositepk/person/PersonRepo.scala +++ b/.bleep/generated-sources/typo-tester-anorm@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/compositepk/person/PersonRepo.scala @@ -29,4 +29,7 @@ trait PersonRepo { def update(row: PersonRow)(implicit c: Connection): Boolean def updateFieldValues(compositeId: PersonId, fieldValues: List[PersonFieldValue[?]])(implicit c: Connection): Boolean def upsert(unsaved: PersonRow)(implicit c: Connection): PersonRow + def upsertBatch(unsaved: Iterable[PersonRow])(implicit c: Connection): List[PersonRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Iterator[PersonRow], batchSize: Int = 10000)(implicit c: Connection): Int } diff --git a/.bleep/generated-sources/typo-tester-anorm@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/compositepk/person/PersonRepoImpl.scala b/.bleep/generated-sources/typo-tester-anorm@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/compositepk/person/PersonRepoImpl.scala index 41b54c8cd..9adcf1c63 100644 --- a/.bleep/generated-sources/typo-tester-anorm@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/compositepk/person/PersonRepoImpl.scala +++ b/.bleep/generated-sources/typo-tester-anorm@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/compositepk/person/PersonRepoImpl.scala @@ -8,6 +8,7 @@ package hardcoded package compositepk package person +import anorm.BatchSql import anorm.NamedParameter import anorm.ParameterMetaData import anorm.ParameterValue @@ -17,6 +18,7 @@ import anorm.SimpleSql import anorm.SqlStringInterpolation import anorm.ToStatement import java.sql.Connection +import scala.annotation.nowarn import testdb.hardcoded.customtypes.Defaulted import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder @@ -148,4 +150,40 @@ class PersonRepoImpl extends PersonRepo { .executeInsert(PersonRow.rowParser(1).single) } + override def upsertBatch(unsaved: Iterable[PersonRow])(implicit c: Connection): List[PersonRow] = { + def toNamedParameter(row: PersonRow): List[NamedParameter] = List( + NamedParameter("one", ParameterValue(row.one, null, ToStatement.longToStatement)), + NamedParameter("two", ParameterValue(row.two, null, ToStatement.optionToStatement(ToStatement.stringToStatement, ParameterMetaData.StringParameterMetaData))), + NamedParameter("name", ParameterValue(row.name, null, ToStatement.optionToStatement(ToStatement.stringToStatement, ParameterMetaData.StringParameterMetaData))) + ) + unsaved.toList match { + case Nil => Nil + case head :: rest => + new anorm.testdb.hardcoded.ExecuteReturningSyntax.Ops( + BatchSql( + s"""insert into compositepk.person("one", "two", "name") + values ({one}::int8, {two}, {name}) + on conflict ("one", "two") + do update set + "name" = EXCLUDED."name" + returning "one", "two", "name" + """, + toNamedParameter(head), + rest.map(toNamedParameter)* + ) + ).executeReturning(PersonRow.rowParser(1).*) + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[PersonRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + SQL"create temporary table person_TEMP (like compositepk.person) on commit drop".execute(): @nowarn + streamingInsert(s"""copy person_TEMP("one", "two", "name") from stdin""", batchSize, unsaved)(PersonRow.text, c): @nowarn + SQL"""insert into compositepk.person("one", "two", "name") + select * from person_TEMP + on conflict ("one", "two") + do update set + "name" = EXCLUDED."name" + ; + drop table person_TEMP;""".executeUpdate() + } } diff --git a/.bleep/generated-sources/typo-tester-anorm@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/compositepk/person/PersonRepoMock.scala b/.bleep/generated-sources/typo-tester-anorm@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/compositepk/person/PersonRepoMock.scala index 2c1451f0f..12d1979be 100644 --- a/.bleep/generated-sources/typo-tester-anorm@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/compositepk/person/PersonRepoMock.scala +++ b/.bleep/generated-sources/typo-tester-anorm@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/compositepk/person/PersonRepoMock.scala @@ -100,4 +100,17 @@ class PersonRepoMock(toRow: Function1[PersonRowUnsaved, PersonRow], map.put(unsaved.compositeId, unsaved): @nowarn unsaved } + override def upsertBatch(unsaved: Iterable[PersonRow])(implicit c: Connection): List[PersonRow] = { + unsaved.map { row => + map += (row.compositeId -> row) + row + }.toList + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[PersonRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + unsaved.foreach { row => + map += (row.compositeId -> row) + } + unsaved.size + } } diff --git a/.bleep/generated-sources/typo-tester-anorm@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/football_club/FootballClubRepo.scala b/.bleep/generated-sources/typo-tester-anorm@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/football_club/FootballClubRepo.scala index 0f0a9a407..7fba23c57 100644 --- a/.bleep/generated-sources/typo-tester-anorm@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/football_club/FootballClubRepo.scala +++ b/.bleep/generated-sources/typo-tester-anorm@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/football_club/FootballClubRepo.scala @@ -29,4 +29,7 @@ trait FootballClubRepo { def update(row: FootballClubRow)(implicit c: Connection): Boolean def updateFieldValues(id: FootballClubId, fieldValues: List[FootballClubFieldValue[?]])(implicit c: Connection): Boolean def upsert(unsaved: FootballClubRow)(implicit c: Connection): FootballClubRow + def upsertBatch(unsaved: Iterable[FootballClubRow])(implicit c: Connection): List[FootballClubRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Iterator[FootballClubRow], batchSize: Int = 10000)(implicit c: Connection): Int } diff --git a/.bleep/generated-sources/typo-tester-anorm@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/football_club/FootballClubRepoImpl.scala b/.bleep/generated-sources/typo-tester-anorm@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/football_club/FootballClubRepoImpl.scala index 8d8836da5..116c3e040 100644 --- a/.bleep/generated-sources/typo-tester-anorm@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/football_club/FootballClubRepoImpl.scala +++ b/.bleep/generated-sources/typo-tester-anorm@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/football_club/FootballClubRepoImpl.scala @@ -8,6 +8,7 @@ package hardcoded package myschema package football_club +import anorm.BatchSql import anorm.NamedParameter import anorm.ParameterValue import anorm.RowParser @@ -16,6 +17,7 @@ import anorm.SimpleSql import anorm.SqlStringInterpolation import anorm.ToStatement import java.sql.Connection +import scala.annotation.nowarn import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder import typo.dsl.SelectBuilderSql @@ -130,4 +132,39 @@ class FootballClubRepoImpl extends FootballClubRepo { .executeInsert(FootballClubRow.rowParser(1).single) } + override def upsertBatch(unsaved: Iterable[FootballClubRow])(implicit c: Connection): List[FootballClubRow] = { + def toNamedParameter(row: FootballClubRow): List[NamedParameter] = List( + NamedParameter("id", ParameterValue(row.id, null, FootballClubId.toStatement)), + NamedParameter("name", ParameterValue(row.name, null, ToStatement.stringToStatement)) + ) + unsaved.toList match { + case Nil => Nil + case head :: rest => + new anorm.testdb.hardcoded.ExecuteReturningSyntax.Ops( + BatchSql( + s"""insert into myschema.football_club("id", "name") + values ({id}::int8, {name}) + on conflict ("id") + do update set + "name" = EXCLUDED."name" + returning "id", "name" + """, + toNamedParameter(head), + rest.map(toNamedParameter)* + ) + ).executeReturning(FootballClubRow.rowParser(1).*) + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[FootballClubRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + SQL"create temporary table football_club_TEMP (like myschema.football_club) on commit drop".execute(): @nowarn + streamingInsert(s"""copy football_club_TEMP("id", "name") from stdin""", batchSize, unsaved)(FootballClubRow.text, c): @nowarn + SQL"""insert into myschema.football_club("id", "name") + select * from football_club_TEMP + on conflict ("id") + do update set + "name" = EXCLUDED."name" + ; + drop table football_club_TEMP;""".executeUpdate() + } } diff --git a/.bleep/generated-sources/typo-tester-anorm@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/football_club/FootballClubRepoMock.scala b/.bleep/generated-sources/typo-tester-anorm@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/football_club/FootballClubRepoMock.scala index e0fe8f1bd..37d177d06 100644 --- a/.bleep/generated-sources/typo-tester-anorm@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/football_club/FootballClubRepoMock.scala +++ b/.bleep/generated-sources/typo-tester-anorm@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/football_club/FootballClubRepoMock.scala @@ -97,4 +97,17 @@ class FootballClubRepoMock(map: scala.collection.mutable.Map[FootballClubId, Foo map.put(unsaved.id, unsaved): @nowarn unsaved } + override def upsertBatch(unsaved: Iterable[FootballClubRow])(implicit c: Connection): List[FootballClubRow] = { + unsaved.map { row => + map += (row.id -> row) + row + }.toList + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[FootballClubRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + unsaved.foreach { row => + map += (row.id -> row) + } + unsaved.size + } } diff --git a/.bleep/generated-sources/typo-tester-anorm@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/marital_status/MaritalStatusRepo.scala b/.bleep/generated-sources/typo-tester-anorm@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/marital_status/MaritalStatusRepo.scala index cf8ea88c3..0afd9d893 100644 --- a/.bleep/generated-sources/typo-tester-anorm@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/marital_status/MaritalStatusRepo.scala +++ b/.bleep/generated-sources/typo-tester-anorm@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/marital_status/MaritalStatusRepo.scala @@ -27,4 +27,7 @@ trait MaritalStatusRepo { def selectByIdsTracked(ids: Array[MaritalStatusId])(implicit c: Connection): Map[MaritalStatusId, MaritalStatusRow] def update: UpdateBuilder[MaritalStatusFields, MaritalStatusRow] def upsert(unsaved: MaritalStatusRow)(implicit c: Connection): MaritalStatusRow + def upsertBatch(unsaved: Iterable[MaritalStatusRow])(implicit c: Connection): List[MaritalStatusRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Iterator[MaritalStatusRow], batchSize: Int = 10000)(implicit c: Connection): Int } diff --git a/.bleep/generated-sources/typo-tester-anorm@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/marital_status/MaritalStatusRepoImpl.scala b/.bleep/generated-sources/typo-tester-anorm@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/marital_status/MaritalStatusRepoImpl.scala index 9bec11ce6..256f6c94c 100644 --- a/.bleep/generated-sources/typo-tester-anorm@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/marital_status/MaritalStatusRepoImpl.scala +++ b/.bleep/generated-sources/typo-tester-anorm@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/marital_status/MaritalStatusRepoImpl.scala @@ -8,6 +8,7 @@ package hardcoded package myschema package marital_status +import anorm.BatchSql import anorm.NamedParameter import anorm.ParameterValue import anorm.RowParser @@ -15,6 +16,7 @@ import anorm.SQL import anorm.SimpleSql import anorm.SqlStringInterpolation import java.sql.Connection +import scala.annotation.nowarn import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder import typo.dsl.SelectBuilderSql @@ -96,11 +98,42 @@ class MaritalStatusRepoImpl extends MaritalStatusRepo { ${ParameterValue(unsaved.id, null, MaritalStatusId.toStatement)}::int8 ) on conflict ("id") - do update set - + do nothing returning "id" """ .executeInsert(MaritalStatusRow.rowParser(1).single) } + override def upsertBatch(unsaved: Iterable[MaritalStatusRow])(implicit c: Connection): List[MaritalStatusRow] = { + def toNamedParameter(row: MaritalStatusRow): List[NamedParameter] = List( + NamedParameter("id", ParameterValue(row.id, null, MaritalStatusId.toStatement)) + ) + unsaved.toList match { + case Nil => Nil + case head :: rest => + new anorm.testdb.hardcoded.ExecuteReturningSyntax.Ops( + BatchSql( + s"""insert into myschema.marital_status("id") + values ({id}::int8) + on conflict ("id") + do nothing + returning "id" + """, + toNamedParameter(head), + rest.map(toNamedParameter)* + ) + ).executeReturning(MaritalStatusRow.rowParser(1).*) + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[MaritalStatusRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + SQL"create temporary table marital_status_TEMP (like myschema.marital_status) on commit drop".execute(): @nowarn + streamingInsert(s"""copy marital_status_TEMP("id") from stdin""", batchSize, unsaved)(MaritalStatusRow.text, c): @nowarn + SQL"""insert into myschema.marital_status("id") + select * from marital_status_TEMP + on conflict ("id") + do nothing + ; + drop table marital_status_TEMP;""".executeUpdate() + } } diff --git a/.bleep/generated-sources/typo-tester-anorm@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/marital_status/MaritalStatusRepoMock.scala b/.bleep/generated-sources/typo-tester-anorm@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/marital_status/MaritalStatusRepoMock.scala index 9b4c2bb8d..cf8a6121d 100644 --- a/.bleep/generated-sources/typo-tester-anorm@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/marital_status/MaritalStatusRepoMock.scala +++ b/.bleep/generated-sources/typo-tester-anorm@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/marital_status/MaritalStatusRepoMock.scala @@ -72,4 +72,17 @@ class MaritalStatusRepoMock(map: scala.collection.mutable.Map[MaritalStatusId, M map.put(unsaved.id, unsaved): @nowarn unsaved } + override def upsertBatch(unsaved: Iterable[MaritalStatusRow])(implicit c: Connection): List[MaritalStatusRow] = { + unsaved.map { row => + map += (row.id -> row) + row + }.toList + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[MaritalStatusRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + unsaved.foreach { row => + map += (row.id -> row) + } + unsaved.size + } } diff --git a/.bleep/generated-sources/typo-tester-anorm@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/person/PersonRepo.scala b/.bleep/generated-sources/typo-tester-anorm@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/person/PersonRepo.scala index d3c6b5bd6..b8c43ba30 100644 --- a/.bleep/generated-sources/typo-tester-anorm@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/person/PersonRepo.scala +++ b/.bleep/generated-sources/typo-tester-anorm@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/person/PersonRepo.scala @@ -32,4 +32,7 @@ trait PersonRepo { def update(row: PersonRow)(implicit c: Connection): Boolean def updateFieldValues(id: PersonId, fieldValues: List[PersonFieldValue[?]])(implicit c: Connection): Boolean def upsert(unsaved: PersonRow)(implicit c: Connection): PersonRow + def upsertBatch(unsaved: Iterable[PersonRow])(implicit c: Connection): List[PersonRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Iterator[PersonRow], batchSize: Int = 10000)(implicit c: Connection): Int } diff --git a/.bleep/generated-sources/typo-tester-anorm@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/person/PersonRepoImpl.scala b/.bleep/generated-sources/typo-tester-anorm@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/person/PersonRepoImpl.scala index 0afad0da3..ecb767f39 100644 --- a/.bleep/generated-sources/typo-tester-anorm@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/person/PersonRepoImpl.scala +++ b/.bleep/generated-sources/typo-tester-anorm@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/person/PersonRepoImpl.scala @@ -8,6 +8,7 @@ package hardcoded package myschema package person +import anorm.BatchSql import anorm.NamedParameter import anorm.ParameterMetaData import anorm.ParameterValue @@ -17,6 +18,7 @@ import anorm.SimpleSql import anorm.SqlStringInterpolation import anorm.ToStatement import java.sql.Connection +import scala.annotation.nowarn import testdb.hardcoded.customtypes.Defaulted import testdb.hardcoded.myschema.football_club.FootballClubId import testdb.hardcoded.myschema.marital_status.MaritalStatusId @@ -231,4 +233,69 @@ class PersonRepoImpl extends PersonRepo { .executeInsert(PersonRow.rowParser(1).single) } + override def upsertBatch(unsaved: Iterable[PersonRow])(implicit c: Connection): List[PersonRow] = { + def toNamedParameter(row: PersonRow): List[NamedParameter] = List( + NamedParameter("id", ParameterValue(row.id, null, PersonId.toStatement)), + NamedParameter("favourite_football_club_id", ParameterValue(row.favouriteFootballClubId, null, FootballClubId.toStatement)), + NamedParameter("name", ParameterValue(row.name, null, ToStatement.stringToStatement)), + NamedParameter("nick_name", ParameterValue(row.nickName, null, ToStatement.optionToStatement(ToStatement.stringToStatement, ParameterMetaData.StringParameterMetaData))), + NamedParameter("blog_url", ParameterValue(row.blogUrl, null, ToStatement.optionToStatement(ToStatement.stringToStatement, ParameterMetaData.StringParameterMetaData))), + NamedParameter("email", ParameterValue(row.email, null, ToStatement.stringToStatement)), + NamedParameter("phone", ParameterValue(row.phone, null, ToStatement.stringToStatement)), + NamedParameter("likes_pizza", ParameterValue(row.likesPizza, null, ToStatement.booleanToStatement)), + NamedParameter("marital_status_id", ParameterValue(row.maritalStatusId, null, MaritalStatusId.toStatement)), + NamedParameter("work_email", ParameterValue(row.workEmail, null, ToStatement.optionToStatement(ToStatement.stringToStatement, ParameterMetaData.StringParameterMetaData))), + NamedParameter("sector", ParameterValue(row.sector, null, Sector.toStatement)), + NamedParameter("favorite_number", ParameterValue(row.favoriteNumber, null, Number.toStatement)) + ) + unsaved.toList match { + case Nil => Nil + case head :: rest => + new anorm.testdb.hardcoded.ExecuteReturningSyntax.Ops( + BatchSql( + s"""insert into myschema.person("id", "favourite_football_club_id", "name", "nick_name", "blog_url", "email", "phone", "likes_pizza", "marital_status_id", "work_email", "sector", "favorite_number") + values ({id}::int8, {favourite_football_club_id}, {name}, {nick_name}, {blog_url}, {email}, {phone}, {likes_pizza}, {marital_status_id}, {work_email}, {sector}::myschema.sector, {favorite_number}::myschema.number) + on conflict ("id") + do update set + "favourite_football_club_id" = EXCLUDED."favourite_football_club_id", + "name" = EXCLUDED."name", + "nick_name" = EXCLUDED."nick_name", + "blog_url" = EXCLUDED."blog_url", + "email" = EXCLUDED."email", + "phone" = EXCLUDED."phone", + "likes_pizza" = EXCLUDED."likes_pizza", + "marital_status_id" = EXCLUDED."marital_status_id", + "work_email" = EXCLUDED."work_email", + "sector" = EXCLUDED."sector", + "favorite_number" = EXCLUDED."favorite_number" + returning "id", "favourite_football_club_id", "name", "nick_name", "blog_url", "email", "phone", "likes_pizza", "marital_status_id", "work_email", "sector", "favorite_number" + """, + toNamedParameter(head), + rest.map(toNamedParameter)* + ) + ).executeReturning(PersonRow.rowParser(1).*) + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[PersonRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + SQL"create temporary table person_TEMP (like myschema.person) on commit drop".execute(): @nowarn + streamingInsert(s"""copy person_TEMP("id", "favourite_football_club_id", "name", "nick_name", "blog_url", "email", "phone", "likes_pizza", "marital_status_id", "work_email", "sector", "favorite_number") from stdin""", batchSize, unsaved)(PersonRow.text, c): @nowarn + SQL"""insert into myschema.person("id", "favourite_football_club_id", "name", "nick_name", "blog_url", "email", "phone", "likes_pizza", "marital_status_id", "work_email", "sector", "favorite_number") + select * from person_TEMP + on conflict ("id") + do update set + "favourite_football_club_id" = EXCLUDED."favourite_football_club_id", + "name" = EXCLUDED."name", + "nick_name" = EXCLUDED."nick_name", + "blog_url" = EXCLUDED."blog_url", + "email" = EXCLUDED."email", + "phone" = EXCLUDED."phone", + "likes_pizza" = EXCLUDED."likes_pizza", + "marital_status_id" = EXCLUDED."marital_status_id", + "work_email" = EXCLUDED."work_email", + "sector" = EXCLUDED."sector", + "favorite_number" = EXCLUDED."favorite_number" + ; + drop table person_TEMP;""".executeUpdate() + } } diff --git a/.bleep/generated-sources/typo-tester-anorm@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/person/PersonRepoMock.scala b/.bleep/generated-sources/typo-tester-anorm@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/person/PersonRepoMock.scala index f74fcf88e..bb9994c80 100644 --- a/.bleep/generated-sources/typo-tester-anorm@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/person/PersonRepoMock.scala +++ b/.bleep/generated-sources/typo-tester-anorm@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/person/PersonRepoMock.scala @@ -129,4 +129,17 @@ class PersonRepoMock(toRow: Function1[PersonRowUnsaved, PersonRow], map.put(unsaved.id, unsaved): @nowarn unsaved } + override def upsertBatch(unsaved: Iterable[PersonRow])(implicit c: Connection): List[PersonRow] = { + unsaved.map { row => + map += (row.id -> row) + row + }.toList + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[PersonRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + unsaved.foreach { row => + map += (row.id -> row) + } + unsaved.size + } } diff --git a/.bleep/generated-sources/typo-tester-doobie@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/compositepk/person/PersonRepo.scala b/.bleep/generated-sources/typo-tester-doobie@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/compositepk/person/PersonRepo.scala index 300cd52c6..66c742ff9 100644 --- a/.bleep/generated-sources/typo-tester-doobie@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/compositepk/person/PersonRepo.scala +++ b/.bleep/generated-sources/typo-tester-doobie@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/compositepk/person/PersonRepo.scala @@ -30,4 +30,7 @@ trait PersonRepo { def update(row: PersonRow): ConnectionIO[Boolean] def updateFieldValues(compositeId: PersonId, fieldValues: List[PersonFieldValue[?]]): ConnectionIO[Boolean] def upsert(unsaved: PersonRow): ConnectionIO[PersonRow] + def upsertBatch(unsaved: List[PersonRow]): Stream[ConnectionIO, PersonRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Stream[ConnectionIO, PersonRow], batchSize: Int = 10000): ConnectionIO[Int] } diff --git a/.bleep/generated-sources/typo-tester-doobie@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/compositepk/person/PersonRepoImpl.scala b/.bleep/generated-sources/typo-tester-doobie@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/compositepk/person/PersonRepoImpl.scala index 742b01d59..4ecacd856 100644 --- a/.bleep/generated-sources/typo-tester-doobie@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/compositepk/person/PersonRepoImpl.scala +++ b/.bleep/generated-sources/typo-tester-doobie@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/compositepk/person/PersonRepoImpl.scala @@ -9,6 +9,7 @@ package compositepk package person import cats.data.NonEmptyList +import cats.instances.list.catsStdInstancesForList import doobie.free.connection.ConnectionIO import doobie.free.connection.pure import doobie.postgres.syntax.FragmentOps @@ -18,6 +19,7 @@ import doobie.util.Write import doobie.util.fragment.Fragment import doobie.util.fragments import doobie.util.meta.Meta +import doobie.util.update.Update import fs2.Stream import testdb.hardcoded.customtypes.Defaulted import typo.dsl.DeleteBuilder @@ -130,4 +132,29 @@ class PersonRepoImpl extends PersonRepo { returning "one", "two", "name" """.query(using PersonRow.read).unique } + override def upsertBatch(unsaved: List[PersonRow]): Stream[ConnectionIO, PersonRow] = { + Update[PersonRow]( + s"""insert into compositepk.person("one", "two", "name") + values (?::int8,?,?) + on conflict ("one", "two") + do update set + "name" = EXCLUDED."name" + returning "one", "two", "name"""" + )(using PersonRow.write) + .updateManyWithGeneratedKeys[PersonRow]("one", "two", "name")(unsaved)(using catsStdInstancesForList, PersonRow.read) + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, PersonRow], batchSize: Int = 10000): ConnectionIO[Int] = { + for { + _ <- sql"create temporary table person_TEMP (like compositepk.person) on commit drop".update.run + _ <- new FragmentOps(sql"""copy person_TEMP("one", "two", "name") from stdin""").copyIn(unsaved, batchSize)(using PersonRow.text) + res <- sql"""insert into compositepk.person("one", "two", "name") + select * from person_TEMP + on conflict ("one", "two") + do update set + "name" = EXCLUDED."name" + ; + drop table person_TEMP;""".update.run + } yield res + } } diff --git a/.bleep/generated-sources/typo-tester-doobie@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/compositepk/person/PersonRepoMock.scala b/.bleep/generated-sources/typo-tester-doobie@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/compositepk/person/PersonRepoMock.scala index 0c56cce4d..a9d2f1be4 100644 --- a/.bleep/generated-sources/typo-tester-doobie@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/compositepk/person/PersonRepoMock.scala +++ b/.bleep/generated-sources/typo-tester-doobie@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/compositepk/person/PersonRepoMock.scala @@ -120,4 +120,23 @@ class PersonRepoMock(toRow: Function1[PersonRowUnsaved, PersonRow], unsaved } } + override def upsertBatch(unsaved: List[PersonRow]): Stream[ConnectionIO, PersonRow] = { + Stream.emits { + unsaved.map { row => + map += (row.compositeId -> row) + row + } + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, PersonRow], batchSize: Int = 10000): ConnectionIO[Int] = { + unsaved.compile.toList.map { rows => + var num = 0 + rows.foreach { row => + map += (row.compositeId -> row) + num += 1 + } + num + } + } } diff --git a/.bleep/generated-sources/typo-tester-doobie@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/compositepk/person/PersonRow.scala b/.bleep/generated-sources/typo-tester-doobie@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/compositepk/person/PersonRow.scala index cdfb0c8c2..47c967931 100644 --- a/.bleep/generated-sources/typo-tester-doobie@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/compositepk/person/PersonRow.scala +++ b/.bleep/generated-sources/typo-tester-doobie@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/compositepk/person/PersonRow.scala @@ -11,6 +11,7 @@ package person import doobie.enumerated.Nullability import doobie.postgres.Text import doobie.util.Read +import doobie.util.Write import doobie.util.meta.Meta import io.circe.Decoder import io.circe.Encoder @@ -56,4 +57,20 @@ object PersonRow { sb.append(Text.DELIMETER) Text.option(Text.stringInstance).unsafeEncode(row.name, sb) } + implicit lazy val write: Write[PersonRow] = new Write[PersonRow]( + puts = List((Meta.LongMeta.put, Nullability.NoNulls), + (Meta.StringMeta.put, Nullability.Nullable), + (Meta.StringMeta.put, Nullability.Nullable)), + toList = x => List(x.one, x.two, x.name), + unsafeSet = (rs, i, a) => { + Meta.LongMeta.put.unsafeSetNonNullable(rs, i + 0, a.one) + Meta.StringMeta.put.unsafeSetNullable(rs, i + 1, a.two) + Meta.StringMeta.put.unsafeSetNullable(rs, i + 2, a.name) + }, + unsafeUpdate = (ps, i, a) => { + Meta.LongMeta.put.unsafeUpdateNonNullable(ps, i + 0, a.one) + Meta.StringMeta.put.unsafeUpdateNullable(ps, i + 1, a.two) + Meta.StringMeta.put.unsafeUpdateNullable(ps, i + 2, a.name) + } + ) } diff --git a/.bleep/generated-sources/typo-tester-doobie@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/football_club/FootballClubRepo.scala b/.bleep/generated-sources/typo-tester-doobie@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/football_club/FootballClubRepo.scala index 34e5502bb..48efb3aea 100644 --- a/.bleep/generated-sources/typo-tester-doobie@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/football_club/FootballClubRepo.scala +++ b/.bleep/generated-sources/typo-tester-doobie@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/football_club/FootballClubRepo.scala @@ -30,4 +30,7 @@ trait FootballClubRepo { def update(row: FootballClubRow): ConnectionIO[Boolean] def updateFieldValues(id: FootballClubId, fieldValues: List[FootballClubFieldValue[?]]): ConnectionIO[Boolean] def upsert(unsaved: FootballClubRow): ConnectionIO[FootballClubRow] + def upsertBatch(unsaved: List[FootballClubRow]): Stream[ConnectionIO, FootballClubRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Stream[ConnectionIO, FootballClubRow], batchSize: Int = 10000): ConnectionIO[Int] } diff --git a/.bleep/generated-sources/typo-tester-doobie@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/football_club/FootballClubRepoImpl.scala b/.bleep/generated-sources/typo-tester-doobie@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/football_club/FootballClubRepoImpl.scala index 0c0b7adfc..c435bdc80 100644 --- a/.bleep/generated-sources/typo-tester-doobie@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/football_club/FootballClubRepoImpl.scala +++ b/.bleep/generated-sources/typo-tester-doobie@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/football_club/FootballClubRepoImpl.scala @@ -9,6 +9,7 @@ package myschema package football_club import cats.data.NonEmptyList +import cats.instances.list.catsStdInstancesForList import doobie.free.connection.ConnectionIO import doobie.free.connection.pure import doobie.postgres.syntax.FragmentOps @@ -17,6 +18,7 @@ import doobie.syntax.string.toSqlInterpolator import doobie.util.Write import doobie.util.fragments import doobie.util.meta.Meta +import doobie.util.update.Update import fs2.Stream import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder @@ -107,4 +109,29 @@ class FootballClubRepoImpl extends FootballClubRepo { returning "id", "name" """.query(using FootballClubRow.read).unique } + override def upsertBatch(unsaved: List[FootballClubRow]): Stream[ConnectionIO, FootballClubRow] = { + Update[FootballClubRow]( + s"""insert into myschema.football_club("id", "name") + values (?::int8,?) + on conflict ("id") + do update set + "name" = EXCLUDED."name" + returning "id", "name"""" + )(using FootballClubRow.write) + .updateManyWithGeneratedKeys[FootballClubRow]("id", "name")(unsaved)(using catsStdInstancesForList, FootballClubRow.read) + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, FootballClubRow], batchSize: Int = 10000): ConnectionIO[Int] = { + for { + _ <- sql"create temporary table football_club_TEMP (like myschema.football_club) on commit drop".update.run + _ <- new FragmentOps(sql"""copy football_club_TEMP("id", "name") from stdin""").copyIn(unsaved, batchSize)(using FootballClubRow.text) + res <- sql"""insert into myschema.football_club("id", "name") + select * from football_club_TEMP + on conflict ("id") + do update set + "name" = EXCLUDED."name" + ; + drop table football_club_TEMP;""".update.run + } yield res + } } diff --git a/.bleep/generated-sources/typo-tester-doobie@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/football_club/FootballClubRepoMock.scala b/.bleep/generated-sources/typo-tester-doobie@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/football_club/FootballClubRepoMock.scala index 40df5f181..de93115fd 100644 --- a/.bleep/generated-sources/typo-tester-doobie@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/football_club/FootballClubRepoMock.scala +++ b/.bleep/generated-sources/typo-tester-doobie@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/football_club/FootballClubRepoMock.scala @@ -115,4 +115,23 @@ class FootballClubRepoMock(map: scala.collection.mutable.Map[FootballClubId, Foo unsaved } } + override def upsertBatch(unsaved: List[FootballClubRow]): Stream[ConnectionIO, FootballClubRow] = { + Stream.emits { + unsaved.map { row => + map += (row.id -> row) + row + } + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, FootballClubRow], batchSize: Int = 10000): ConnectionIO[Int] = { + unsaved.compile.toList.map { rows => + var num = 0 + rows.foreach { row => + map += (row.id -> row) + num += 1 + } + num + } + } } diff --git a/.bleep/generated-sources/typo-tester-doobie@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/football_club/FootballClubRow.scala b/.bleep/generated-sources/typo-tester-doobie@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/football_club/FootballClubRow.scala index 3b8985a4f..eaaac16fb 100644 --- a/.bleep/generated-sources/typo-tester-doobie@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/football_club/FootballClubRow.scala +++ b/.bleep/generated-sources/typo-tester-doobie@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/football_club/FootballClubRow.scala @@ -11,6 +11,7 @@ package football_club import doobie.enumerated.Nullability import doobie.postgres.Text import doobie.util.Read +import doobie.util.Write import doobie.util.meta.Meta import io.circe.Decoder import io.circe.Encoder @@ -42,4 +43,17 @@ object FootballClubRow { sb.append(Text.DELIMETER) Text.stringInstance.unsafeEncode(row.name, sb) } + implicit lazy val write: Write[FootballClubRow] = new Write[FootballClubRow]( + puts = List((FootballClubId.put, Nullability.NoNulls), + (Meta.StringMeta.put, Nullability.NoNulls)), + toList = x => List(x.id, x.name), + unsafeSet = (rs, i, a) => { + FootballClubId.put.unsafeSetNonNullable(rs, i + 0, a.id) + Meta.StringMeta.put.unsafeSetNonNullable(rs, i + 1, a.name) + }, + unsafeUpdate = (ps, i, a) => { + FootballClubId.put.unsafeUpdateNonNullable(ps, i + 0, a.id) + Meta.StringMeta.put.unsafeUpdateNonNullable(ps, i + 1, a.name) + } + ) } diff --git a/.bleep/generated-sources/typo-tester-doobie@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/marital_status/MaritalStatusRepo.scala b/.bleep/generated-sources/typo-tester-doobie@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/marital_status/MaritalStatusRepo.scala index 29564dfea..4307c05f1 100644 --- a/.bleep/generated-sources/typo-tester-doobie@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/marital_status/MaritalStatusRepo.scala +++ b/.bleep/generated-sources/typo-tester-doobie@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/marital_status/MaritalStatusRepo.scala @@ -28,4 +28,7 @@ trait MaritalStatusRepo { def selectByIdsTracked(ids: Array[MaritalStatusId]): ConnectionIO[Map[MaritalStatusId, MaritalStatusRow]] def update: UpdateBuilder[MaritalStatusFields, MaritalStatusRow] def upsert(unsaved: MaritalStatusRow): ConnectionIO[MaritalStatusRow] + def upsertBatch(unsaved: List[MaritalStatusRow]): Stream[ConnectionIO, MaritalStatusRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Stream[ConnectionIO, MaritalStatusRow], batchSize: Int = 10000): ConnectionIO[Int] } diff --git a/.bleep/generated-sources/typo-tester-doobie@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/marital_status/MaritalStatusRepoImpl.scala b/.bleep/generated-sources/typo-tester-doobie@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/marital_status/MaritalStatusRepoImpl.scala index c2253807d..bed0f0dc0 100644 --- a/.bleep/generated-sources/typo-tester-doobie@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/marital_status/MaritalStatusRepoImpl.scala +++ b/.bleep/generated-sources/typo-tester-doobie@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/marital_status/MaritalStatusRepoImpl.scala @@ -8,12 +8,14 @@ package hardcoded package myschema package marital_status +import cats.instances.list.catsStdInstancesForList import doobie.free.connection.ConnectionIO import doobie.postgres.syntax.FragmentOps import doobie.syntax.SqlInterpolator.SingleFragment.fromWrite import doobie.syntax.string.toSqlInterpolator import doobie.util.Write import doobie.util.fragments +import doobie.util.update.Update import fs2.Stream import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder @@ -74,9 +76,31 @@ class MaritalStatusRepoImpl extends MaritalStatusRepo { ${fromWrite(unsaved.id)(Write.fromPut(MaritalStatusId.put))}::int8 ) on conflict ("id") - do update set - + do nothing returning "id" """.query(using MaritalStatusRow.read).unique } + override def upsertBatch(unsaved: List[MaritalStatusRow]): Stream[ConnectionIO, MaritalStatusRow] = { + Update[MaritalStatusRow]( + s"""insert into myschema.marital_status("id") + values (?::int8) + on conflict ("id") + do nothing + returning "id"""" + )(using MaritalStatusRow.write) + .updateManyWithGeneratedKeys[MaritalStatusRow]("id")(unsaved)(using catsStdInstancesForList, MaritalStatusRow.read) + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, MaritalStatusRow], batchSize: Int = 10000): ConnectionIO[Int] = { + for { + _ <- sql"create temporary table marital_status_TEMP (like myschema.marital_status) on commit drop".update.run + _ <- new FragmentOps(sql"""copy marital_status_TEMP("id") from stdin""").copyIn(unsaved, batchSize)(using MaritalStatusRow.text) + res <- sql"""insert into myschema.marital_status("id") + select * from marital_status_TEMP + on conflict ("id") + do nothing + ; + drop table marital_status_TEMP;""".update.run + } yield res + } } diff --git a/.bleep/generated-sources/typo-tester-doobie@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/marital_status/MaritalStatusRepoMock.scala b/.bleep/generated-sources/typo-tester-doobie@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/marital_status/MaritalStatusRepoMock.scala index 15191cb11..31d51191d 100644 --- a/.bleep/generated-sources/typo-tester-doobie@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/marital_status/MaritalStatusRepoMock.scala +++ b/.bleep/generated-sources/typo-tester-doobie@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/marital_status/MaritalStatusRepoMock.scala @@ -86,4 +86,23 @@ class MaritalStatusRepoMock(map: scala.collection.mutable.Map[MaritalStatusId, M unsaved } } + override def upsertBatch(unsaved: List[MaritalStatusRow]): Stream[ConnectionIO, MaritalStatusRow] = { + Stream.emits { + unsaved.map { row => + map += (row.id -> row) + row + } + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, MaritalStatusRow], batchSize: Int = 10000): ConnectionIO[Int] = { + unsaved.compile.toList.map { rows => + var num = 0 + rows.foreach { row => + map += (row.id -> row) + num += 1 + } + num + } + } } diff --git a/.bleep/generated-sources/typo-tester-doobie@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/marital_status/MaritalStatusRow.scala b/.bleep/generated-sources/typo-tester-doobie@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/marital_status/MaritalStatusRow.scala index da439766a..d2a9981c8 100644 --- a/.bleep/generated-sources/typo-tester-doobie@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/marital_status/MaritalStatusRow.scala +++ b/.bleep/generated-sources/typo-tester-doobie@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/marital_status/MaritalStatusRow.scala @@ -11,6 +11,7 @@ package marital_status import doobie.enumerated.Nullability import doobie.postgres.Text import doobie.util.Read +import doobie.util.Write import io.circe.Decoder import io.circe.Encoder import java.sql.ResultSet @@ -35,4 +36,14 @@ object MaritalStatusRow { implicit lazy val text: Text[MaritalStatusRow] = Text.instance[MaritalStatusRow]{ (row, sb) => MaritalStatusId.text.unsafeEncode(row.id, sb) } + implicit lazy val write: Write[MaritalStatusRow] = new Write[MaritalStatusRow]( + puts = List((MaritalStatusId.put, Nullability.NoNulls)), + toList = x => List(x.id), + unsafeSet = (rs, i, a) => { + MaritalStatusId.put.unsafeSetNonNullable(rs, i + 0, a.id) + }, + unsafeUpdate = (ps, i, a) => { + MaritalStatusId.put.unsafeUpdateNonNullable(ps, i + 0, a.id) + } + ) } diff --git a/.bleep/generated-sources/typo-tester-doobie@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/person/PersonRepo.scala b/.bleep/generated-sources/typo-tester-doobie@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/person/PersonRepo.scala index 5e9fe04f9..9db299d5d 100644 --- a/.bleep/generated-sources/typo-tester-doobie@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/person/PersonRepo.scala +++ b/.bleep/generated-sources/typo-tester-doobie@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/person/PersonRepo.scala @@ -33,4 +33,7 @@ trait PersonRepo { def update(row: PersonRow): ConnectionIO[Boolean] def updateFieldValues(id: PersonId, fieldValues: List[PersonFieldValue[?]]): ConnectionIO[Boolean] def upsert(unsaved: PersonRow): ConnectionIO[PersonRow] + def upsertBatch(unsaved: List[PersonRow]): Stream[ConnectionIO, PersonRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Stream[ConnectionIO, PersonRow], batchSize: Int = 10000): ConnectionIO[Int] } diff --git a/.bleep/generated-sources/typo-tester-doobie@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/person/PersonRepoImpl.scala b/.bleep/generated-sources/typo-tester-doobie@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/person/PersonRepoImpl.scala index cbcdf4cd8..b499a6d13 100644 --- a/.bleep/generated-sources/typo-tester-doobie@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/person/PersonRepoImpl.scala +++ b/.bleep/generated-sources/typo-tester-doobie@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/person/PersonRepoImpl.scala @@ -9,6 +9,7 @@ package myschema package person import cats.data.NonEmptyList +import cats.instances.list.catsStdInstancesForList import doobie.free.connection.ConnectionIO import doobie.free.connection.pure import doobie.postgres.syntax.FragmentOps @@ -18,6 +19,7 @@ import doobie.util.Write import doobie.util.fragment.Fragment import doobie.util.fragments import doobie.util.meta.Meta +import doobie.util.update.Update import fs2.Stream import testdb.hardcoded.customtypes.Defaulted import testdb.hardcoded.myschema.football_club.FootballClubId @@ -207,4 +209,49 @@ class PersonRepoImpl extends PersonRepo { returning "id", "favourite_football_club_id", "name", "nick_name", "blog_url", "email", "phone", "likes_pizza", "marital_status_id", "work_email", "sector", "favorite_number" """.query(using PersonRow.read).unique } + override def upsertBatch(unsaved: List[PersonRow]): Stream[ConnectionIO, PersonRow] = { + Update[PersonRow]( + s"""insert into myschema.person("id", "favourite_football_club_id", "name", "nick_name", "blog_url", "email", "phone", "likes_pizza", "marital_status_id", "work_email", "sector", "favorite_number") + values (?::int8,?,?,?,?,?,?,?,?,?,?::myschema.sector,?::myschema.number) + on conflict ("id") + do update set + "favourite_football_club_id" = EXCLUDED."favourite_football_club_id", + "name" = EXCLUDED."name", + "nick_name" = EXCLUDED."nick_name", + "blog_url" = EXCLUDED."blog_url", + "email" = EXCLUDED."email", + "phone" = EXCLUDED."phone", + "likes_pizza" = EXCLUDED."likes_pizza", + "marital_status_id" = EXCLUDED."marital_status_id", + "work_email" = EXCLUDED."work_email", + "sector" = EXCLUDED."sector", + "favorite_number" = EXCLUDED."favorite_number" + returning "id", "favourite_football_club_id", "name", "nick_name", "blog_url", "email", "phone", "likes_pizza", "marital_status_id", "work_email", "sector", "favorite_number"""" + )(using PersonRow.write) + .updateManyWithGeneratedKeys[PersonRow]("id", "favourite_football_club_id", "name", "nick_name", "blog_url", "email", "phone", "likes_pizza", "marital_status_id", "work_email", "sector", "favorite_number")(unsaved)(using catsStdInstancesForList, PersonRow.read) + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, PersonRow], batchSize: Int = 10000): ConnectionIO[Int] = { + for { + _ <- sql"create temporary table person_TEMP (like myschema.person) on commit drop".update.run + _ <- new FragmentOps(sql"""copy person_TEMP("id", "favourite_football_club_id", "name", "nick_name", "blog_url", "email", "phone", "likes_pizza", "marital_status_id", "work_email", "sector", "favorite_number") from stdin""").copyIn(unsaved, batchSize)(using PersonRow.text) + res <- sql"""insert into myschema.person("id", "favourite_football_club_id", "name", "nick_name", "blog_url", "email", "phone", "likes_pizza", "marital_status_id", "work_email", "sector", "favorite_number") + select * from person_TEMP + on conflict ("id") + do update set + "favourite_football_club_id" = EXCLUDED."favourite_football_club_id", + "name" = EXCLUDED."name", + "nick_name" = EXCLUDED."nick_name", + "blog_url" = EXCLUDED."blog_url", + "email" = EXCLUDED."email", + "phone" = EXCLUDED."phone", + "likes_pizza" = EXCLUDED."likes_pizza", + "marital_status_id" = EXCLUDED."marital_status_id", + "work_email" = EXCLUDED."work_email", + "sector" = EXCLUDED."sector", + "favorite_number" = EXCLUDED."favorite_number" + ; + drop table person_TEMP;""".update.run + } yield res + } } diff --git a/.bleep/generated-sources/typo-tester-doobie@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/person/PersonRepoMock.scala b/.bleep/generated-sources/typo-tester-doobie@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/person/PersonRepoMock.scala index 3bcd444f7..75ec1f051 100644 --- a/.bleep/generated-sources/typo-tester-doobie@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/person/PersonRepoMock.scala +++ b/.bleep/generated-sources/typo-tester-doobie@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/person/PersonRepoMock.scala @@ -151,4 +151,23 @@ class PersonRepoMock(toRow: Function1[PersonRowUnsaved, PersonRow], unsaved } } + override def upsertBatch(unsaved: List[PersonRow]): Stream[ConnectionIO, PersonRow] = { + Stream.emits { + unsaved.map { row => + map += (row.id -> row) + row + } + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, PersonRow], batchSize: Int = 10000): ConnectionIO[Int] = { + unsaved.compile.toList.map { rows => + var num = 0 + rows.foreach { row => + map += (row.id -> row) + num += 1 + } + num + } + } } diff --git a/.bleep/generated-sources/typo-tester-doobie@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/person/PersonRow.scala b/.bleep/generated-sources/typo-tester-doobie@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/person/PersonRow.scala index b8664a1ca..ffbdd51d9 100644 --- a/.bleep/generated-sources/typo-tester-doobie@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/person/PersonRow.scala +++ b/.bleep/generated-sources/typo-tester-doobie@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/person/PersonRow.scala @@ -11,6 +11,7 @@ package person import doobie.enumerated.Nullability import doobie.postgres.Text import doobie.util.Read +import doobie.util.Write import doobie.util.meta.Meta import io.circe.Decoder import io.circe.Encoder @@ -104,4 +105,47 @@ object PersonRow { sb.append(Text.DELIMETER) Number.text.unsafeEncode(row.favoriteNumber, sb) } + implicit lazy val write: Write[PersonRow] = new Write[PersonRow]( + puts = List((PersonId.put, Nullability.NoNulls), + (FootballClubId.put, Nullability.NoNulls), + (Meta.StringMeta.put, Nullability.NoNulls), + (Meta.StringMeta.put, Nullability.Nullable), + (Meta.StringMeta.put, Nullability.Nullable), + (Meta.StringMeta.put, Nullability.NoNulls), + (Meta.StringMeta.put, Nullability.NoNulls), + (Meta.BooleanMeta.put, Nullability.NoNulls), + (MaritalStatusId.put, Nullability.NoNulls), + (Meta.StringMeta.put, Nullability.Nullable), + (Sector.put, Nullability.NoNulls), + (Number.put, Nullability.NoNulls)), + toList = x => List(x.id, x.favouriteFootballClubId, x.name, x.nickName, x.blogUrl, x.email, x.phone, x.likesPizza, x.maritalStatusId, x.workEmail, x.sector, x.favoriteNumber), + unsafeSet = (rs, i, a) => { + PersonId.put.unsafeSetNonNullable(rs, i + 0, a.id) + FootballClubId.put.unsafeSetNonNullable(rs, i + 1, a.favouriteFootballClubId) + Meta.StringMeta.put.unsafeSetNonNullable(rs, i + 2, a.name) + Meta.StringMeta.put.unsafeSetNullable(rs, i + 3, a.nickName) + Meta.StringMeta.put.unsafeSetNullable(rs, i + 4, a.blogUrl) + Meta.StringMeta.put.unsafeSetNonNullable(rs, i + 5, a.email) + Meta.StringMeta.put.unsafeSetNonNullable(rs, i + 6, a.phone) + Meta.BooleanMeta.put.unsafeSetNonNullable(rs, i + 7, a.likesPizza) + MaritalStatusId.put.unsafeSetNonNullable(rs, i + 8, a.maritalStatusId) + Meta.StringMeta.put.unsafeSetNullable(rs, i + 9, a.workEmail) + Sector.put.unsafeSetNonNullable(rs, i + 10, a.sector) + Number.put.unsafeSetNonNullable(rs, i + 11, a.favoriteNumber) + }, + unsafeUpdate = (ps, i, a) => { + PersonId.put.unsafeUpdateNonNullable(ps, i + 0, a.id) + FootballClubId.put.unsafeUpdateNonNullable(ps, i + 1, a.favouriteFootballClubId) + Meta.StringMeta.put.unsafeUpdateNonNullable(ps, i + 2, a.name) + Meta.StringMeta.put.unsafeUpdateNullable(ps, i + 3, a.nickName) + Meta.StringMeta.put.unsafeUpdateNullable(ps, i + 4, a.blogUrl) + Meta.StringMeta.put.unsafeUpdateNonNullable(ps, i + 5, a.email) + Meta.StringMeta.put.unsafeUpdateNonNullable(ps, i + 6, a.phone) + Meta.BooleanMeta.put.unsafeUpdateNonNullable(ps, i + 7, a.likesPizza) + MaritalStatusId.put.unsafeUpdateNonNullable(ps, i + 8, a.maritalStatusId) + Meta.StringMeta.put.unsafeUpdateNullable(ps, i + 9, a.workEmail) + Sector.put.unsafeUpdateNonNullable(ps, i + 10, a.sector) + Number.put.unsafeUpdateNonNullable(ps, i + 11, a.favoriteNumber) + } + ) } diff --git a/.bleep/generated-sources/typo-tester-doobie@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/compositepk/person/PersonRepo.scala b/.bleep/generated-sources/typo-tester-doobie@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/compositepk/person/PersonRepo.scala index 300cd52c6..66c742ff9 100644 --- a/.bleep/generated-sources/typo-tester-doobie@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/compositepk/person/PersonRepo.scala +++ b/.bleep/generated-sources/typo-tester-doobie@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/compositepk/person/PersonRepo.scala @@ -30,4 +30,7 @@ trait PersonRepo { def update(row: PersonRow): ConnectionIO[Boolean] def updateFieldValues(compositeId: PersonId, fieldValues: List[PersonFieldValue[?]]): ConnectionIO[Boolean] def upsert(unsaved: PersonRow): ConnectionIO[PersonRow] + def upsertBatch(unsaved: List[PersonRow]): Stream[ConnectionIO, PersonRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Stream[ConnectionIO, PersonRow], batchSize: Int = 10000): ConnectionIO[Int] } diff --git a/.bleep/generated-sources/typo-tester-doobie@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/compositepk/person/PersonRepoImpl.scala b/.bleep/generated-sources/typo-tester-doobie@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/compositepk/person/PersonRepoImpl.scala index 742b01d59..4ecacd856 100644 --- a/.bleep/generated-sources/typo-tester-doobie@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/compositepk/person/PersonRepoImpl.scala +++ b/.bleep/generated-sources/typo-tester-doobie@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/compositepk/person/PersonRepoImpl.scala @@ -9,6 +9,7 @@ package compositepk package person import cats.data.NonEmptyList +import cats.instances.list.catsStdInstancesForList import doobie.free.connection.ConnectionIO import doobie.free.connection.pure import doobie.postgres.syntax.FragmentOps @@ -18,6 +19,7 @@ import doobie.util.Write import doobie.util.fragment.Fragment import doobie.util.fragments import doobie.util.meta.Meta +import doobie.util.update.Update import fs2.Stream import testdb.hardcoded.customtypes.Defaulted import typo.dsl.DeleteBuilder @@ -130,4 +132,29 @@ class PersonRepoImpl extends PersonRepo { returning "one", "two", "name" """.query(using PersonRow.read).unique } + override def upsertBatch(unsaved: List[PersonRow]): Stream[ConnectionIO, PersonRow] = { + Update[PersonRow]( + s"""insert into compositepk.person("one", "two", "name") + values (?::int8,?,?) + on conflict ("one", "two") + do update set + "name" = EXCLUDED."name" + returning "one", "two", "name"""" + )(using PersonRow.write) + .updateManyWithGeneratedKeys[PersonRow]("one", "two", "name")(unsaved)(using catsStdInstancesForList, PersonRow.read) + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, PersonRow], batchSize: Int = 10000): ConnectionIO[Int] = { + for { + _ <- sql"create temporary table person_TEMP (like compositepk.person) on commit drop".update.run + _ <- new FragmentOps(sql"""copy person_TEMP("one", "two", "name") from stdin""").copyIn(unsaved, batchSize)(using PersonRow.text) + res <- sql"""insert into compositepk.person("one", "two", "name") + select * from person_TEMP + on conflict ("one", "two") + do update set + "name" = EXCLUDED."name" + ; + drop table person_TEMP;""".update.run + } yield res + } } diff --git a/.bleep/generated-sources/typo-tester-doobie@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/compositepk/person/PersonRepoMock.scala b/.bleep/generated-sources/typo-tester-doobie@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/compositepk/person/PersonRepoMock.scala index 0c56cce4d..a9d2f1be4 100644 --- a/.bleep/generated-sources/typo-tester-doobie@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/compositepk/person/PersonRepoMock.scala +++ b/.bleep/generated-sources/typo-tester-doobie@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/compositepk/person/PersonRepoMock.scala @@ -120,4 +120,23 @@ class PersonRepoMock(toRow: Function1[PersonRowUnsaved, PersonRow], unsaved } } + override def upsertBatch(unsaved: List[PersonRow]): Stream[ConnectionIO, PersonRow] = { + Stream.emits { + unsaved.map { row => + map += (row.compositeId -> row) + row + } + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, PersonRow], batchSize: Int = 10000): ConnectionIO[Int] = { + unsaved.compile.toList.map { rows => + var num = 0 + rows.foreach { row => + map += (row.compositeId -> row) + num += 1 + } + num + } + } } diff --git a/.bleep/generated-sources/typo-tester-doobie@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/compositepk/person/PersonRow.scala b/.bleep/generated-sources/typo-tester-doobie@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/compositepk/person/PersonRow.scala index cdfb0c8c2..47c967931 100644 --- a/.bleep/generated-sources/typo-tester-doobie@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/compositepk/person/PersonRow.scala +++ b/.bleep/generated-sources/typo-tester-doobie@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/compositepk/person/PersonRow.scala @@ -11,6 +11,7 @@ package person import doobie.enumerated.Nullability import doobie.postgres.Text import doobie.util.Read +import doobie.util.Write import doobie.util.meta.Meta import io.circe.Decoder import io.circe.Encoder @@ -56,4 +57,20 @@ object PersonRow { sb.append(Text.DELIMETER) Text.option(Text.stringInstance).unsafeEncode(row.name, sb) } + implicit lazy val write: Write[PersonRow] = new Write[PersonRow]( + puts = List((Meta.LongMeta.put, Nullability.NoNulls), + (Meta.StringMeta.put, Nullability.Nullable), + (Meta.StringMeta.put, Nullability.Nullable)), + toList = x => List(x.one, x.two, x.name), + unsafeSet = (rs, i, a) => { + Meta.LongMeta.put.unsafeSetNonNullable(rs, i + 0, a.one) + Meta.StringMeta.put.unsafeSetNullable(rs, i + 1, a.two) + Meta.StringMeta.put.unsafeSetNullable(rs, i + 2, a.name) + }, + unsafeUpdate = (ps, i, a) => { + Meta.LongMeta.put.unsafeUpdateNonNullable(ps, i + 0, a.one) + Meta.StringMeta.put.unsafeUpdateNullable(ps, i + 1, a.two) + Meta.StringMeta.put.unsafeUpdateNullable(ps, i + 2, a.name) + } + ) } diff --git a/.bleep/generated-sources/typo-tester-doobie@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/football_club/FootballClubRepo.scala b/.bleep/generated-sources/typo-tester-doobie@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/football_club/FootballClubRepo.scala index 34e5502bb..48efb3aea 100644 --- a/.bleep/generated-sources/typo-tester-doobie@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/football_club/FootballClubRepo.scala +++ b/.bleep/generated-sources/typo-tester-doobie@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/football_club/FootballClubRepo.scala @@ -30,4 +30,7 @@ trait FootballClubRepo { def update(row: FootballClubRow): ConnectionIO[Boolean] def updateFieldValues(id: FootballClubId, fieldValues: List[FootballClubFieldValue[?]]): ConnectionIO[Boolean] def upsert(unsaved: FootballClubRow): ConnectionIO[FootballClubRow] + def upsertBatch(unsaved: List[FootballClubRow]): Stream[ConnectionIO, FootballClubRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Stream[ConnectionIO, FootballClubRow], batchSize: Int = 10000): ConnectionIO[Int] } diff --git a/.bleep/generated-sources/typo-tester-doobie@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/football_club/FootballClubRepoImpl.scala b/.bleep/generated-sources/typo-tester-doobie@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/football_club/FootballClubRepoImpl.scala index 0c0b7adfc..c435bdc80 100644 --- a/.bleep/generated-sources/typo-tester-doobie@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/football_club/FootballClubRepoImpl.scala +++ b/.bleep/generated-sources/typo-tester-doobie@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/football_club/FootballClubRepoImpl.scala @@ -9,6 +9,7 @@ package myschema package football_club import cats.data.NonEmptyList +import cats.instances.list.catsStdInstancesForList import doobie.free.connection.ConnectionIO import doobie.free.connection.pure import doobie.postgres.syntax.FragmentOps @@ -17,6 +18,7 @@ import doobie.syntax.string.toSqlInterpolator import doobie.util.Write import doobie.util.fragments import doobie.util.meta.Meta +import doobie.util.update.Update import fs2.Stream import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder @@ -107,4 +109,29 @@ class FootballClubRepoImpl extends FootballClubRepo { returning "id", "name" """.query(using FootballClubRow.read).unique } + override def upsertBatch(unsaved: List[FootballClubRow]): Stream[ConnectionIO, FootballClubRow] = { + Update[FootballClubRow]( + s"""insert into myschema.football_club("id", "name") + values (?::int8,?) + on conflict ("id") + do update set + "name" = EXCLUDED."name" + returning "id", "name"""" + )(using FootballClubRow.write) + .updateManyWithGeneratedKeys[FootballClubRow]("id", "name")(unsaved)(using catsStdInstancesForList, FootballClubRow.read) + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, FootballClubRow], batchSize: Int = 10000): ConnectionIO[Int] = { + for { + _ <- sql"create temporary table football_club_TEMP (like myschema.football_club) on commit drop".update.run + _ <- new FragmentOps(sql"""copy football_club_TEMP("id", "name") from stdin""").copyIn(unsaved, batchSize)(using FootballClubRow.text) + res <- sql"""insert into myschema.football_club("id", "name") + select * from football_club_TEMP + on conflict ("id") + do update set + "name" = EXCLUDED."name" + ; + drop table football_club_TEMP;""".update.run + } yield res + } } diff --git a/.bleep/generated-sources/typo-tester-doobie@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/football_club/FootballClubRepoMock.scala b/.bleep/generated-sources/typo-tester-doobie@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/football_club/FootballClubRepoMock.scala index 40df5f181..de93115fd 100644 --- a/.bleep/generated-sources/typo-tester-doobie@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/football_club/FootballClubRepoMock.scala +++ b/.bleep/generated-sources/typo-tester-doobie@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/football_club/FootballClubRepoMock.scala @@ -115,4 +115,23 @@ class FootballClubRepoMock(map: scala.collection.mutable.Map[FootballClubId, Foo unsaved } } + override def upsertBatch(unsaved: List[FootballClubRow]): Stream[ConnectionIO, FootballClubRow] = { + Stream.emits { + unsaved.map { row => + map += (row.id -> row) + row + } + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, FootballClubRow], batchSize: Int = 10000): ConnectionIO[Int] = { + unsaved.compile.toList.map { rows => + var num = 0 + rows.foreach { row => + map += (row.id -> row) + num += 1 + } + num + } + } } diff --git a/.bleep/generated-sources/typo-tester-doobie@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/football_club/FootballClubRow.scala b/.bleep/generated-sources/typo-tester-doobie@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/football_club/FootballClubRow.scala index 3b8985a4f..eaaac16fb 100644 --- a/.bleep/generated-sources/typo-tester-doobie@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/football_club/FootballClubRow.scala +++ b/.bleep/generated-sources/typo-tester-doobie@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/football_club/FootballClubRow.scala @@ -11,6 +11,7 @@ package football_club import doobie.enumerated.Nullability import doobie.postgres.Text import doobie.util.Read +import doobie.util.Write import doobie.util.meta.Meta import io.circe.Decoder import io.circe.Encoder @@ -42,4 +43,17 @@ object FootballClubRow { sb.append(Text.DELIMETER) Text.stringInstance.unsafeEncode(row.name, sb) } + implicit lazy val write: Write[FootballClubRow] = new Write[FootballClubRow]( + puts = List((FootballClubId.put, Nullability.NoNulls), + (Meta.StringMeta.put, Nullability.NoNulls)), + toList = x => List(x.id, x.name), + unsafeSet = (rs, i, a) => { + FootballClubId.put.unsafeSetNonNullable(rs, i + 0, a.id) + Meta.StringMeta.put.unsafeSetNonNullable(rs, i + 1, a.name) + }, + unsafeUpdate = (ps, i, a) => { + FootballClubId.put.unsafeUpdateNonNullable(ps, i + 0, a.id) + Meta.StringMeta.put.unsafeUpdateNonNullable(ps, i + 1, a.name) + } + ) } diff --git a/.bleep/generated-sources/typo-tester-doobie@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/marital_status/MaritalStatusRepo.scala b/.bleep/generated-sources/typo-tester-doobie@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/marital_status/MaritalStatusRepo.scala index 29564dfea..4307c05f1 100644 --- a/.bleep/generated-sources/typo-tester-doobie@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/marital_status/MaritalStatusRepo.scala +++ b/.bleep/generated-sources/typo-tester-doobie@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/marital_status/MaritalStatusRepo.scala @@ -28,4 +28,7 @@ trait MaritalStatusRepo { def selectByIdsTracked(ids: Array[MaritalStatusId]): ConnectionIO[Map[MaritalStatusId, MaritalStatusRow]] def update: UpdateBuilder[MaritalStatusFields, MaritalStatusRow] def upsert(unsaved: MaritalStatusRow): ConnectionIO[MaritalStatusRow] + def upsertBatch(unsaved: List[MaritalStatusRow]): Stream[ConnectionIO, MaritalStatusRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Stream[ConnectionIO, MaritalStatusRow], batchSize: Int = 10000): ConnectionIO[Int] } diff --git a/.bleep/generated-sources/typo-tester-doobie@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/marital_status/MaritalStatusRepoImpl.scala b/.bleep/generated-sources/typo-tester-doobie@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/marital_status/MaritalStatusRepoImpl.scala index c2253807d..bed0f0dc0 100644 --- a/.bleep/generated-sources/typo-tester-doobie@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/marital_status/MaritalStatusRepoImpl.scala +++ b/.bleep/generated-sources/typo-tester-doobie@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/marital_status/MaritalStatusRepoImpl.scala @@ -8,12 +8,14 @@ package hardcoded package myschema package marital_status +import cats.instances.list.catsStdInstancesForList import doobie.free.connection.ConnectionIO import doobie.postgres.syntax.FragmentOps import doobie.syntax.SqlInterpolator.SingleFragment.fromWrite import doobie.syntax.string.toSqlInterpolator import doobie.util.Write import doobie.util.fragments +import doobie.util.update.Update import fs2.Stream import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder @@ -74,9 +76,31 @@ class MaritalStatusRepoImpl extends MaritalStatusRepo { ${fromWrite(unsaved.id)(Write.fromPut(MaritalStatusId.put))}::int8 ) on conflict ("id") - do update set - + do nothing returning "id" """.query(using MaritalStatusRow.read).unique } + override def upsertBatch(unsaved: List[MaritalStatusRow]): Stream[ConnectionIO, MaritalStatusRow] = { + Update[MaritalStatusRow]( + s"""insert into myschema.marital_status("id") + values (?::int8) + on conflict ("id") + do nothing + returning "id"""" + )(using MaritalStatusRow.write) + .updateManyWithGeneratedKeys[MaritalStatusRow]("id")(unsaved)(using catsStdInstancesForList, MaritalStatusRow.read) + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, MaritalStatusRow], batchSize: Int = 10000): ConnectionIO[Int] = { + for { + _ <- sql"create temporary table marital_status_TEMP (like myschema.marital_status) on commit drop".update.run + _ <- new FragmentOps(sql"""copy marital_status_TEMP("id") from stdin""").copyIn(unsaved, batchSize)(using MaritalStatusRow.text) + res <- sql"""insert into myschema.marital_status("id") + select * from marital_status_TEMP + on conflict ("id") + do nothing + ; + drop table marital_status_TEMP;""".update.run + } yield res + } } diff --git a/.bleep/generated-sources/typo-tester-doobie@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/marital_status/MaritalStatusRepoMock.scala b/.bleep/generated-sources/typo-tester-doobie@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/marital_status/MaritalStatusRepoMock.scala index 15191cb11..31d51191d 100644 --- a/.bleep/generated-sources/typo-tester-doobie@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/marital_status/MaritalStatusRepoMock.scala +++ b/.bleep/generated-sources/typo-tester-doobie@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/marital_status/MaritalStatusRepoMock.scala @@ -86,4 +86,23 @@ class MaritalStatusRepoMock(map: scala.collection.mutable.Map[MaritalStatusId, M unsaved } } + override def upsertBatch(unsaved: List[MaritalStatusRow]): Stream[ConnectionIO, MaritalStatusRow] = { + Stream.emits { + unsaved.map { row => + map += (row.id -> row) + row + } + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, MaritalStatusRow], batchSize: Int = 10000): ConnectionIO[Int] = { + unsaved.compile.toList.map { rows => + var num = 0 + rows.foreach { row => + map += (row.id -> row) + num += 1 + } + num + } + } } diff --git a/.bleep/generated-sources/typo-tester-doobie@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/marital_status/MaritalStatusRow.scala b/.bleep/generated-sources/typo-tester-doobie@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/marital_status/MaritalStatusRow.scala index da439766a..d2a9981c8 100644 --- a/.bleep/generated-sources/typo-tester-doobie@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/marital_status/MaritalStatusRow.scala +++ b/.bleep/generated-sources/typo-tester-doobie@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/marital_status/MaritalStatusRow.scala @@ -11,6 +11,7 @@ package marital_status import doobie.enumerated.Nullability import doobie.postgres.Text import doobie.util.Read +import doobie.util.Write import io.circe.Decoder import io.circe.Encoder import java.sql.ResultSet @@ -35,4 +36,14 @@ object MaritalStatusRow { implicit lazy val text: Text[MaritalStatusRow] = Text.instance[MaritalStatusRow]{ (row, sb) => MaritalStatusId.text.unsafeEncode(row.id, sb) } + implicit lazy val write: Write[MaritalStatusRow] = new Write[MaritalStatusRow]( + puts = List((MaritalStatusId.put, Nullability.NoNulls)), + toList = x => List(x.id), + unsafeSet = (rs, i, a) => { + MaritalStatusId.put.unsafeSetNonNullable(rs, i + 0, a.id) + }, + unsafeUpdate = (ps, i, a) => { + MaritalStatusId.put.unsafeUpdateNonNullable(ps, i + 0, a.id) + } + ) } diff --git a/.bleep/generated-sources/typo-tester-doobie@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/person/PersonRepo.scala b/.bleep/generated-sources/typo-tester-doobie@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/person/PersonRepo.scala index 5e9fe04f9..9db299d5d 100644 --- a/.bleep/generated-sources/typo-tester-doobie@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/person/PersonRepo.scala +++ b/.bleep/generated-sources/typo-tester-doobie@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/person/PersonRepo.scala @@ -33,4 +33,7 @@ trait PersonRepo { def update(row: PersonRow): ConnectionIO[Boolean] def updateFieldValues(id: PersonId, fieldValues: List[PersonFieldValue[?]]): ConnectionIO[Boolean] def upsert(unsaved: PersonRow): ConnectionIO[PersonRow] + def upsertBatch(unsaved: List[PersonRow]): Stream[ConnectionIO, PersonRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Stream[ConnectionIO, PersonRow], batchSize: Int = 10000): ConnectionIO[Int] } diff --git a/.bleep/generated-sources/typo-tester-doobie@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/person/PersonRepoImpl.scala b/.bleep/generated-sources/typo-tester-doobie@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/person/PersonRepoImpl.scala index cbcdf4cd8..b499a6d13 100644 --- a/.bleep/generated-sources/typo-tester-doobie@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/person/PersonRepoImpl.scala +++ b/.bleep/generated-sources/typo-tester-doobie@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/person/PersonRepoImpl.scala @@ -9,6 +9,7 @@ package myschema package person import cats.data.NonEmptyList +import cats.instances.list.catsStdInstancesForList import doobie.free.connection.ConnectionIO import doobie.free.connection.pure import doobie.postgres.syntax.FragmentOps @@ -18,6 +19,7 @@ import doobie.util.Write import doobie.util.fragment.Fragment import doobie.util.fragments import doobie.util.meta.Meta +import doobie.util.update.Update import fs2.Stream import testdb.hardcoded.customtypes.Defaulted import testdb.hardcoded.myschema.football_club.FootballClubId @@ -207,4 +209,49 @@ class PersonRepoImpl extends PersonRepo { returning "id", "favourite_football_club_id", "name", "nick_name", "blog_url", "email", "phone", "likes_pizza", "marital_status_id", "work_email", "sector", "favorite_number" """.query(using PersonRow.read).unique } + override def upsertBatch(unsaved: List[PersonRow]): Stream[ConnectionIO, PersonRow] = { + Update[PersonRow]( + s"""insert into myschema.person("id", "favourite_football_club_id", "name", "nick_name", "blog_url", "email", "phone", "likes_pizza", "marital_status_id", "work_email", "sector", "favorite_number") + values (?::int8,?,?,?,?,?,?,?,?,?,?::myschema.sector,?::myschema.number) + on conflict ("id") + do update set + "favourite_football_club_id" = EXCLUDED."favourite_football_club_id", + "name" = EXCLUDED."name", + "nick_name" = EXCLUDED."nick_name", + "blog_url" = EXCLUDED."blog_url", + "email" = EXCLUDED."email", + "phone" = EXCLUDED."phone", + "likes_pizza" = EXCLUDED."likes_pizza", + "marital_status_id" = EXCLUDED."marital_status_id", + "work_email" = EXCLUDED."work_email", + "sector" = EXCLUDED."sector", + "favorite_number" = EXCLUDED."favorite_number" + returning "id", "favourite_football_club_id", "name", "nick_name", "blog_url", "email", "phone", "likes_pizza", "marital_status_id", "work_email", "sector", "favorite_number"""" + )(using PersonRow.write) + .updateManyWithGeneratedKeys[PersonRow]("id", "favourite_football_club_id", "name", "nick_name", "blog_url", "email", "phone", "likes_pizza", "marital_status_id", "work_email", "sector", "favorite_number")(unsaved)(using catsStdInstancesForList, PersonRow.read) + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, PersonRow], batchSize: Int = 10000): ConnectionIO[Int] = { + for { + _ <- sql"create temporary table person_TEMP (like myschema.person) on commit drop".update.run + _ <- new FragmentOps(sql"""copy person_TEMP("id", "favourite_football_club_id", "name", "nick_name", "blog_url", "email", "phone", "likes_pizza", "marital_status_id", "work_email", "sector", "favorite_number") from stdin""").copyIn(unsaved, batchSize)(using PersonRow.text) + res <- sql"""insert into myschema.person("id", "favourite_football_club_id", "name", "nick_name", "blog_url", "email", "phone", "likes_pizza", "marital_status_id", "work_email", "sector", "favorite_number") + select * from person_TEMP + on conflict ("id") + do update set + "favourite_football_club_id" = EXCLUDED."favourite_football_club_id", + "name" = EXCLUDED."name", + "nick_name" = EXCLUDED."nick_name", + "blog_url" = EXCLUDED."blog_url", + "email" = EXCLUDED."email", + "phone" = EXCLUDED."phone", + "likes_pizza" = EXCLUDED."likes_pizza", + "marital_status_id" = EXCLUDED."marital_status_id", + "work_email" = EXCLUDED."work_email", + "sector" = EXCLUDED."sector", + "favorite_number" = EXCLUDED."favorite_number" + ; + drop table person_TEMP;""".update.run + } yield res + } } diff --git a/.bleep/generated-sources/typo-tester-doobie@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/person/PersonRepoMock.scala b/.bleep/generated-sources/typo-tester-doobie@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/person/PersonRepoMock.scala index 3bcd444f7..75ec1f051 100644 --- a/.bleep/generated-sources/typo-tester-doobie@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/person/PersonRepoMock.scala +++ b/.bleep/generated-sources/typo-tester-doobie@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/person/PersonRepoMock.scala @@ -151,4 +151,23 @@ class PersonRepoMock(toRow: Function1[PersonRowUnsaved, PersonRow], unsaved } } + override def upsertBatch(unsaved: List[PersonRow]): Stream[ConnectionIO, PersonRow] = { + Stream.emits { + unsaved.map { row => + map += (row.id -> row) + row + } + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, PersonRow], batchSize: Int = 10000): ConnectionIO[Int] = { + unsaved.compile.toList.map { rows => + var num = 0 + rows.foreach { row => + map += (row.id -> row) + num += 1 + } + num + } + } } diff --git a/.bleep/generated-sources/typo-tester-doobie@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/person/PersonRow.scala b/.bleep/generated-sources/typo-tester-doobie@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/person/PersonRow.scala index b8664a1ca..ffbdd51d9 100644 --- a/.bleep/generated-sources/typo-tester-doobie@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/person/PersonRow.scala +++ b/.bleep/generated-sources/typo-tester-doobie@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/person/PersonRow.scala @@ -11,6 +11,7 @@ package person import doobie.enumerated.Nullability import doobie.postgres.Text import doobie.util.Read +import doobie.util.Write import doobie.util.meta.Meta import io.circe.Decoder import io.circe.Encoder @@ -104,4 +105,47 @@ object PersonRow { sb.append(Text.DELIMETER) Number.text.unsafeEncode(row.favoriteNumber, sb) } + implicit lazy val write: Write[PersonRow] = new Write[PersonRow]( + puts = List((PersonId.put, Nullability.NoNulls), + (FootballClubId.put, Nullability.NoNulls), + (Meta.StringMeta.put, Nullability.NoNulls), + (Meta.StringMeta.put, Nullability.Nullable), + (Meta.StringMeta.put, Nullability.Nullable), + (Meta.StringMeta.put, Nullability.NoNulls), + (Meta.StringMeta.put, Nullability.NoNulls), + (Meta.BooleanMeta.put, Nullability.NoNulls), + (MaritalStatusId.put, Nullability.NoNulls), + (Meta.StringMeta.put, Nullability.Nullable), + (Sector.put, Nullability.NoNulls), + (Number.put, Nullability.NoNulls)), + toList = x => List(x.id, x.favouriteFootballClubId, x.name, x.nickName, x.blogUrl, x.email, x.phone, x.likesPizza, x.maritalStatusId, x.workEmail, x.sector, x.favoriteNumber), + unsafeSet = (rs, i, a) => { + PersonId.put.unsafeSetNonNullable(rs, i + 0, a.id) + FootballClubId.put.unsafeSetNonNullable(rs, i + 1, a.favouriteFootballClubId) + Meta.StringMeta.put.unsafeSetNonNullable(rs, i + 2, a.name) + Meta.StringMeta.put.unsafeSetNullable(rs, i + 3, a.nickName) + Meta.StringMeta.put.unsafeSetNullable(rs, i + 4, a.blogUrl) + Meta.StringMeta.put.unsafeSetNonNullable(rs, i + 5, a.email) + Meta.StringMeta.put.unsafeSetNonNullable(rs, i + 6, a.phone) + Meta.BooleanMeta.put.unsafeSetNonNullable(rs, i + 7, a.likesPizza) + MaritalStatusId.put.unsafeSetNonNullable(rs, i + 8, a.maritalStatusId) + Meta.StringMeta.put.unsafeSetNullable(rs, i + 9, a.workEmail) + Sector.put.unsafeSetNonNullable(rs, i + 10, a.sector) + Number.put.unsafeSetNonNullable(rs, i + 11, a.favoriteNumber) + }, + unsafeUpdate = (ps, i, a) => { + PersonId.put.unsafeUpdateNonNullable(ps, i + 0, a.id) + FootballClubId.put.unsafeUpdateNonNullable(ps, i + 1, a.favouriteFootballClubId) + Meta.StringMeta.put.unsafeUpdateNonNullable(ps, i + 2, a.name) + Meta.StringMeta.put.unsafeUpdateNullable(ps, i + 3, a.nickName) + Meta.StringMeta.put.unsafeUpdateNullable(ps, i + 4, a.blogUrl) + Meta.StringMeta.put.unsafeUpdateNonNullable(ps, i + 5, a.email) + Meta.StringMeta.put.unsafeUpdateNonNullable(ps, i + 6, a.phone) + Meta.BooleanMeta.put.unsafeUpdateNonNullable(ps, i + 7, a.likesPizza) + MaritalStatusId.put.unsafeUpdateNonNullable(ps, i + 8, a.maritalStatusId) + Meta.StringMeta.put.unsafeUpdateNullable(ps, i + 9, a.workEmail) + Sector.put.unsafeUpdateNonNullable(ps, i + 10, a.sector) + Number.put.unsafeUpdateNonNullable(ps, i + 11, a.favoriteNumber) + } + ) } diff --git a/.bleep/generated-sources/typo-tester-zio-jdbc@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/compositepk/person/PersonRepo.scala b/.bleep/generated-sources/typo-tester-zio-jdbc@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/compositepk/person/PersonRepo.scala index 2a17943bf..d47e1b154 100644 --- a/.bleep/generated-sources/typo-tester-zio-jdbc@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/compositepk/person/PersonRepo.scala +++ b/.bleep/generated-sources/typo-tester-zio-jdbc@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/compositepk/person/PersonRepo.scala @@ -32,4 +32,7 @@ trait PersonRepo { def update(row: PersonRow): ZIO[ZConnection, Throwable, Boolean] def updateFieldValues(compositeId: PersonId, fieldValues: List[PersonFieldValue[?]]): ZIO[ZConnection, Throwable, Boolean] def upsert(unsaved: PersonRow): ZIO[ZConnection, Throwable, UpdateResult[PersonRow]] + // Not implementable for zio-jdbc: upsertBatch + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, PersonRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] } diff --git a/.bleep/generated-sources/typo-tester-zio-jdbc@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/compositepk/person/PersonRepoImpl.scala b/.bleep/generated-sources/typo-tester-zio-jdbc@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/compositepk/person/PersonRepoImpl.scala index 99e80552b..d91f13f40 100644 --- a/.bleep/generated-sources/typo-tester-zio-jdbc@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/compositepk/person/PersonRepoImpl.scala +++ b/.bleep/generated-sources/typo-tester-zio-jdbc@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/compositepk/person/PersonRepoImpl.scala @@ -123,4 +123,17 @@ class PersonRepoImpl extends PersonRepo { "name" = EXCLUDED."name" returning "one", "two", "name"""".insertReturning(using PersonRow.jdbcDecoder) } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, PersonRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + val created = sql"create temporary table person_TEMP (like compositepk.person) on commit drop".execute + val copied = streamingInsert(s"""copy person_TEMP("one", "two", "name") from stdin""", batchSize, unsaved)(PersonRow.text) + val merged = sql"""insert into compositepk.person("one", "two", "name") + select * from person_TEMP + on conflict ("one", "two") + do update set + "name" = EXCLUDED."name" + ; + drop table person_TEMP;""".update + created *> copied *> merged + } } diff --git a/.bleep/generated-sources/typo-tester-zio-jdbc@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/compositepk/person/PersonRepoMock.scala b/.bleep/generated-sources/typo-tester-zio-jdbc@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/compositepk/person/PersonRepoMock.scala index 4f70aa206..67b5eb064 100644 --- a/.bleep/generated-sources/typo-tester-zio-jdbc@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/compositepk/person/PersonRepoMock.scala +++ b/.bleep/generated-sources/typo-tester-zio-jdbc@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/compositepk/person/PersonRepoMock.scala @@ -119,4 +119,13 @@ class PersonRepoMock(toRow: Function1[PersonRowUnsaved, PersonRow], UpdateResult(1, Chunk.single(unsaved)) } } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, PersonRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + unsaved.scanZIO(0L) { case (acc, row) => + ZIO.succeed { + map += (row.compositeId -> row) + acc + 1 + } + }.runLast.map(_.getOrElse(0L)) + } } diff --git a/.bleep/generated-sources/typo-tester-zio-jdbc@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/football_club/FootballClubRepo.scala b/.bleep/generated-sources/typo-tester-zio-jdbc@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/football_club/FootballClubRepo.scala index 652845f0c..2f956fc30 100644 --- a/.bleep/generated-sources/typo-tester-zio-jdbc@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/football_club/FootballClubRepo.scala +++ b/.bleep/generated-sources/typo-tester-zio-jdbc@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/football_club/FootballClubRepo.scala @@ -32,4 +32,7 @@ trait FootballClubRepo { def update(row: FootballClubRow): ZIO[ZConnection, Throwable, Boolean] def updateFieldValues(id: FootballClubId, fieldValues: List[FootballClubFieldValue[?]]): ZIO[ZConnection, Throwable, Boolean] def upsert(unsaved: FootballClubRow): ZIO[ZConnection, Throwable, UpdateResult[FootballClubRow]] + // Not implementable for zio-jdbc: upsertBatch + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, FootballClubRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] } diff --git a/.bleep/generated-sources/typo-tester-zio-jdbc@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/football_club/FootballClubRepoImpl.scala b/.bleep/generated-sources/typo-tester-zio-jdbc@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/football_club/FootballClubRepoImpl.scala index 3a7a40b82..eddf925ac 100644 --- a/.bleep/generated-sources/typo-tester-zio-jdbc@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/football_club/FootballClubRepoImpl.scala +++ b/.bleep/generated-sources/typo-tester-zio-jdbc@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/football_club/FootballClubRepoImpl.scala @@ -103,4 +103,17 @@ class FootballClubRepoImpl extends FootballClubRepo { "name" = EXCLUDED."name" returning "id", "name"""".insertReturning(using FootballClubRow.jdbcDecoder) } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, FootballClubRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + val created = sql"create temporary table football_club_TEMP (like myschema.football_club) on commit drop".execute + val copied = streamingInsert(s"""copy football_club_TEMP("id", "name") from stdin""", batchSize, unsaved)(FootballClubRow.text) + val merged = sql"""insert into myschema.football_club("id", "name") + select * from football_club_TEMP + on conflict ("id") + do update set + "name" = EXCLUDED."name" + ; + drop table football_club_TEMP;""".update + created *> copied *> merged + } } diff --git a/.bleep/generated-sources/typo-tester-zio-jdbc@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/football_club/FootballClubRepoMock.scala b/.bleep/generated-sources/typo-tester-zio-jdbc@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/football_club/FootballClubRepoMock.scala index 90089894b..6e1ceff71 100644 --- a/.bleep/generated-sources/typo-tester-zio-jdbc@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/football_club/FootballClubRepoMock.scala +++ b/.bleep/generated-sources/typo-tester-zio-jdbc@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/football_club/FootballClubRepoMock.scala @@ -116,4 +116,13 @@ class FootballClubRepoMock(map: scala.collection.mutable.Map[FootballClubId, Foo UpdateResult(1, Chunk.single(unsaved)) } } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, FootballClubRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + unsaved.scanZIO(0L) { case (acc, row) => + ZIO.succeed { + map += (row.id -> row) + acc + 1 + } + }.runLast.map(_.getOrElse(0L)) + } } diff --git a/.bleep/generated-sources/typo-tester-zio-jdbc@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/marital_status/MaritalStatusRepo.scala b/.bleep/generated-sources/typo-tester-zio-jdbc@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/marital_status/MaritalStatusRepo.scala index a12b10174..1d52df440 100644 --- a/.bleep/generated-sources/typo-tester-zio-jdbc@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/marital_status/MaritalStatusRepo.scala +++ b/.bleep/generated-sources/typo-tester-zio-jdbc@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/marital_status/MaritalStatusRepo.scala @@ -30,4 +30,7 @@ trait MaritalStatusRepo { def selectByIdsTracked(ids: Array[MaritalStatusId]): ZIO[ZConnection, Throwable, Map[MaritalStatusId, MaritalStatusRow]] def update: UpdateBuilder[MaritalStatusFields, MaritalStatusRow] def upsert(unsaved: MaritalStatusRow): ZIO[ZConnection, Throwable, UpdateResult[MaritalStatusRow]] + // Not implementable for zio-jdbc: upsertBatch + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, MaritalStatusRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] } diff --git a/.bleep/generated-sources/typo-tester-zio-jdbc@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/marital_status/MaritalStatusRepoImpl.scala b/.bleep/generated-sources/typo-tester-zio-jdbc@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/marital_status/MaritalStatusRepoImpl.scala index effe6fc4f..1801585d4 100644 --- a/.bleep/generated-sources/typo-tester-zio-jdbc@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/marital_status/MaritalStatusRepoImpl.scala +++ b/.bleep/generated-sources/typo-tester-zio-jdbc@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/marital_status/MaritalStatusRepoImpl.scala @@ -78,6 +78,19 @@ class MaritalStatusRepoImpl extends MaritalStatusRepo { ${Segment.paramSegment(unsaved.id)(MaritalStatusId.setter)}::int8 ) on conflict ("id") + do nothing returning "id"""".insertReturning(using MaritalStatusRow.jdbcDecoder) } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, MaritalStatusRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + val created = sql"create temporary table marital_status_TEMP (like myschema.marital_status) on commit drop".execute + val copied = streamingInsert(s"""copy marital_status_TEMP("id") from stdin""", batchSize, unsaved)(MaritalStatusRow.text) + val merged = sql"""insert into myschema.marital_status("id") + select * from marital_status_TEMP + on conflict ("id") + do nothing + ; + drop table marital_status_TEMP;""".update + created *> copied *> merged + } } diff --git a/.bleep/generated-sources/typo-tester-zio-jdbc@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/marital_status/MaritalStatusRepoMock.scala b/.bleep/generated-sources/typo-tester-zio-jdbc@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/marital_status/MaritalStatusRepoMock.scala index c87c7a6b2..c473f43f0 100644 --- a/.bleep/generated-sources/typo-tester-zio-jdbc@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/marital_status/MaritalStatusRepoMock.scala +++ b/.bleep/generated-sources/typo-tester-zio-jdbc@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/marital_status/MaritalStatusRepoMock.scala @@ -87,4 +87,13 @@ class MaritalStatusRepoMock(map: scala.collection.mutable.Map[MaritalStatusId, M UpdateResult(1, Chunk.single(unsaved)) } } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, MaritalStatusRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + unsaved.scanZIO(0L) { case (acc, row) => + ZIO.succeed { + map += (row.id -> row) + acc + 1 + } + }.runLast.map(_.getOrElse(0L)) + } } diff --git a/.bleep/generated-sources/typo-tester-zio-jdbc@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/person/PersonRepo.scala b/.bleep/generated-sources/typo-tester-zio-jdbc@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/person/PersonRepo.scala index 7eb24cee5..db8b93dac 100644 --- a/.bleep/generated-sources/typo-tester-zio-jdbc@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/person/PersonRepo.scala +++ b/.bleep/generated-sources/typo-tester-zio-jdbc@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/person/PersonRepo.scala @@ -35,4 +35,7 @@ trait PersonRepo { def update(row: PersonRow): ZIO[ZConnection, Throwable, Boolean] def updateFieldValues(id: PersonId, fieldValues: List[PersonFieldValue[?]]): ZIO[ZConnection, Throwable, Boolean] def upsert(unsaved: PersonRow): ZIO[ZConnection, Throwable, UpdateResult[PersonRow]] + // Not implementable for zio-jdbc: upsertBatch + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, PersonRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] } diff --git a/.bleep/generated-sources/typo-tester-zio-jdbc@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/person/PersonRepoImpl.scala b/.bleep/generated-sources/typo-tester-zio-jdbc@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/person/PersonRepoImpl.scala index 47ce473aa..ede74147b 100644 --- a/.bleep/generated-sources/typo-tester-zio-jdbc@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/person/PersonRepoImpl.scala +++ b/.bleep/generated-sources/typo-tester-zio-jdbc@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/person/PersonRepoImpl.scala @@ -200,4 +200,27 @@ class PersonRepoImpl extends PersonRepo { "favorite_number" = EXCLUDED."favorite_number" returning "id", "favourite_football_club_id", "name", "nick_name", "blog_url", "email", "phone", "likes_pizza", "marital_status_id", "work_email", "sector", "favorite_number"""".insertReturning(using PersonRow.jdbcDecoder) } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, PersonRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + val created = sql"create temporary table person_TEMP (like myschema.person) on commit drop".execute + val copied = streamingInsert(s"""copy person_TEMP("id", "favourite_football_club_id", "name", "nick_name", "blog_url", "email", "phone", "likes_pizza", "marital_status_id", "work_email", "sector", "favorite_number") from stdin""", batchSize, unsaved)(PersonRow.text) + val merged = sql"""insert into myschema.person("id", "favourite_football_club_id", "name", "nick_name", "blog_url", "email", "phone", "likes_pizza", "marital_status_id", "work_email", "sector", "favorite_number") + select * from person_TEMP + on conflict ("id") + do update set + "favourite_football_club_id" = EXCLUDED."favourite_football_club_id", + "name" = EXCLUDED."name", + "nick_name" = EXCLUDED."nick_name", + "blog_url" = EXCLUDED."blog_url", + "email" = EXCLUDED."email", + "phone" = EXCLUDED."phone", + "likes_pizza" = EXCLUDED."likes_pizza", + "marital_status_id" = EXCLUDED."marital_status_id", + "work_email" = EXCLUDED."work_email", + "sector" = EXCLUDED."sector", + "favorite_number" = EXCLUDED."favorite_number" + ; + drop table person_TEMP;""".update + created *> copied *> merged + } } diff --git a/.bleep/generated-sources/typo-tester-zio-jdbc@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/person/PersonRepoMock.scala b/.bleep/generated-sources/typo-tester-zio-jdbc@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/person/PersonRepoMock.scala index 880b46fef..cdd0e7829 100644 --- a/.bleep/generated-sources/typo-tester-zio-jdbc@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/person/PersonRepoMock.scala +++ b/.bleep/generated-sources/typo-tester-zio-jdbc@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/person/PersonRepoMock.scala @@ -150,4 +150,13 @@ class PersonRepoMock(toRow: Function1[PersonRowUnsaved, PersonRow], UpdateResult(1, Chunk.single(unsaved)) } } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, PersonRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + unsaved.scanZIO(0L) { case (acc, row) => + ZIO.succeed { + map += (row.id -> row) + acc + 1 + } + }.runLast.map(_.getOrElse(0L)) + } } diff --git a/.bleep/generated-sources/typo-tester-zio-jdbc@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/compositepk/person/PersonRepo.scala b/.bleep/generated-sources/typo-tester-zio-jdbc@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/compositepk/person/PersonRepo.scala index 2a17943bf..d47e1b154 100644 --- a/.bleep/generated-sources/typo-tester-zio-jdbc@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/compositepk/person/PersonRepo.scala +++ b/.bleep/generated-sources/typo-tester-zio-jdbc@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/compositepk/person/PersonRepo.scala @@ -32,4 +32,7 @@ trait PersonRepo { def update(row: PersonRow): ZIO[ZConnection, Throwable, Boolean] def updateFieldValues(compositeId: PersonId, fieldValues: List[PersonFieldValue[?]]): ZIO[ZConnection, Throwable, Boolean] def upsert(unsaved: PersonRow): ZIO[ZConnection, Throwable, UpdateResult[PersonRow]] + // Not implementable for zio-jdbc: upsertBatch + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, PersonRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] } diff --git a/.bleep/generated-sources/typo-tester-zio-jdbc@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/compositepk/person/PersonRepoImpl.scala b/.bleep/generated-sources/typo-tester-zio-jdbc@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/compositepk/person/PersonRepoImpl.scala index 99e80552b..d91f13f40 100644 --- a/.bleep/generated-sources/typo-tester-zio-jdbc@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/compositepk/person/PersonRepoImpl.scala +++ b/.bleep/generated-sources/typo-tester-zio-jdbc@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/compositepk/person/PersonRepoImpl.scala @@ -123,4 +123,17 @@ class PersonRepoImpl extends PersonRepo { "name" = EXCLUDED."name" returning "one", "two", "name"""".insertReturning(using PersonRow.jdbcDecoder) } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, PersonRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + val created = sql"create temporary table person_TEMP (like compositepk.person) on commit drop".execute + val copied = streamingInsert(s"""copy person_TEMP("one", "two", "name") from stdin""", batchSize, unsaved)(PersonRow.text) + val merged = sql"""insert into compositepk.person("one", "two", "name") + select * from person_TEMP + on conflict ("one", "two") + do update set + "name" = EXCLUDED."name" + ; + drop table person_TEMP;""".update + created *> copied *> merged + } } diff --git a/.bleep/generated-sources/typo-tester-zio-jdbc@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/compositepk/person/PersonRepoMock.scala b/.bleep/generated-sources/typo-tester-zio-jdbc@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/compositepk/person/PersonRepoMock.scala index 4f70aa206..67b5eb064 100644 --- a/.bleep/generated-sources/typo-tester-zio-jdbc@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/compositepk/person/PersonRepoMock.scala +++ b/.bleep/generated-sources/typo-tester-zio-jdbc@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/compositepk/person/PersonRepoMock.scala @@ -119,4 +119,13 @@ class PersonRepoMock(toRow: Function1[PersonRowUnsaved, PersonRow], UpdateResult(1, Chunk.single(unsaved)) } } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, PersonRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + unsaved.scanZIO(0L) { case (acc, row) => + ZIO.succeed { + map += (row.compositeId -> row) + acc + 1 + } + }.runLast.map(_.getOrElse(0L)) + } } diff --git a/.bleep/generated-sources/typo-tester-zio-jdbc@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/football_club/FootballClubRepo.scala b/.bleep/generated-sources/typo-tester-zio-jdbc@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/football_club/FootballClubRepo.scala index 652845f0c..2f956fc30 100644 --- a/.bleep/generated-sources/typo-tester-zio-jdbc@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/football_club/FootballClubRepo.scala +++ b/.bleep/generated-sources/typo-tester-zio-jdbc@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/football_club/FootballClubRepo.scala @@ -32,4 +32,7 @@ trait FootballClubRepo { def update(row: FootballClubRow): ZIO[ZConnection, Throwable, Boolean] def updateFieldValues(id: FootballClubId, fieldValues: List[FootballClubFieldValue[?]]): ZIO[ZConnection, Throwable, Boolean] def upsert(unsaved: FootballClubRow): ZIO[ZConnection, Throwable, UpdateResult[FootballClubRow]] + // Not implementable for zio-jdbc: upsertBatch + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, FootballClubRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] } diff --git a/.bleep/generated-sources/typo-tester-zio-jdbc@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/football_club/FootballClubRepoImpl.scala b/.bleep/generated-sources/typo-tester-zio-jdbc@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/football_club/FootballClubRepoImpl.scala index 3a7a40b82..eddf925ac 100644 --- a/.bleep/generated-sources/typo-tester-zio-jdbc@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/football_club/FootballClubRepoImpl.scala +++ b/.bleep/generated-sources/typo-tester-zio-jdbc@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/football_club/FootballClubRepoImpl.scala @@ -103,4 +103,17 @@ class FootballClubRepoImpl extends FootballClubRepo { "name" = EXCLUDED."name" returning "id", "name"""".insertReturning(using FootballClubRow.jdbcDecoder) } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, FootballClubRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + val created = sql"create temporary table football_club_TEMP (like myschema.football_club) on commit drop".execute + val copied = streamingInsert(s"""copy football_club_TEMP("id", "name") from stdin""", batchSize, unsaved)(FootballClubRow.text) + val merged = sql"""insert into myschema.football_club("id", "name") + select * from football_club_TEMP + on conflict ("id") + do update set + "name" = EXCLUDED."name" + ; + drop table football_club_TEMP;""".update + created *> copied *> merged + } } diff --git a/.bleep/generated-sources/typo-tester-zio-jdbc@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/football_club/FootballClubRepoMock.scala b/.bleep/generated-sources/typo-tester-zio-jdbc@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/football_club/FootballClubRepoMock.scala index 90089894b..6e1ceff71 100644 --- a/.bleep/generated-sources/typo-tester-zio-jdbc@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/football_club/FootballClubRepoMock.scala +++ b/.bleep/generated-sources/typo-tester-zio-jdbc@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/football_club/FootballClubRepoMock.scala @@ -116,4 +116,13 @@ class FootballClubRepoMock(map: scala.collection.mutable.Map[FootballClubId, Foo UpdateResult(1, Chunk.single(unsaved)) } } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, FootballClubRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + unsaved.scanZIO(0L) { case (acc, row) => + ZIO.succeed { + map += (row.id -> row) + acc + 1 + } + }.runLast.map(_.getOrElse(0L)) + } } diff --git a/.bleep/generated-sources/typo-tester-zio-jdbc@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/marital_status/MaritalStatusRepo.scala b/.bleep/generated-sources/typo-tester-zio-jdbc@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/marital_status/MaritalStatusRepo.scala index a12b10174..1d52df440 100644 --- a/.bleep/generated-sources/typo-tester-zio-jdbc@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/marital_status/MaritalStatusRepo.scala +++ b/.bleep/generated-sources/typo-tester-zio-jdbc@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/marital_status/MaritalStatusRepo.scala @@ -30,4 +30,7 @@ trait MaritalStatusRepo { def selectByIdsTracked(ids: Array[MaritalStatusId]): ZIO[ZConnection, Throwable, Map[MaritalStatusId, MaritalStatusRow]] def update: UpdateBuilder[MaritalStatusFields, MaritalStatusRow] def upsert(unsaved: MaritalStatusRow): ZIO[ZConnection, Throwable, UpdateResult[MaritalStatusRow]] + // Not implementable for zio-jdbc: upsertBatch + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, MaritalStatusRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] } diff --git a/.bleep/generated-sources/typo-tester-zio-jdbc@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/marital_status/MaritalStatusRepoImpl.scala b/.bleep/generated-sources/typo-tester-zio-jdbc@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/marital_status/MaritalStatusRepoImpl.scala index effe6fc4f..1801585d4 100644 --- a/.bleep/generated-sources/typo-tester-zio-jdbc@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/marital_status/MaritalStatusRepoImpl.scala +++ b/.bleep/generated-sources/typo-tester-zio-jdbc@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/marital_status/MaritalStatusRepoImpl.scala @@ -78,6 +78,19 @@ class MaritalStatusRepoImpl extends MaritalStatusRepo { ${Segment.paramSegment(unsaved.id)(MaritalStatusId.setter)}::int8 ) on conflict ("id") + do nothing returning "id"""".insertReturning(using MaritalStatusRow.jdbcDecoder) } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, MaritalStatusRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + val created = sql"create temporary table marital_status_TEMP (like myschema.marital_status) on commit drop".execute + val copied = streamingInsert(s"""copy marital_status_TEMP("id") from stdin""", batchSize, unsaved)(MaritalStatusRow.text) + val merged = sql"""insert into myschema.marital_status("id") + select * from marital_status_TEMP + on conflict ("id") + do nothing + ; + drop table marital_status_TEMP;""".update + created *> copied *> merged + } } diff --git a/.bleep/generated-sources/typo-tester-zio-jdbc@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/marital_status/MaritalStatusRepoMock.scala b/.bleep/generated-sources/typo-tester-zio-jdbc@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/marital_status/MaritalStatusRepoMock.scala index c87c7a6b2..c473f43f0 100644 --- a/.bleep/generated-sources/typo-tester-zio-jdbc@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/marital_status/MaritalStatusRepoMock.scala +++ b/.bleep/generated-sources/typo-tester-zio-jdbc@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/marital_status/MaritalStatusRepoMock.scala @@ -87,4 +87,13 @@ class MaritalStatusRepoMock(map: scala.collection.mutable.Map[MaritalStatusId, M UpdateResult(1, Chunk.single(unsaved)) } } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, MaritalStatusRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + unsaved.scanZIO(0L) { case (acc, row) => + ZIO.succeed { + map += (row.id -> row) + acc + 1 + } + }.runLast.map(_.getOrElse(0L)) + } } diff --git a/.bleep/generated-sources/typo-tester-zio-jdbc@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/person/PersonRepo.scala b/.bleep/generated-sources/typo-tester-zio-jdbc@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/person/PersonRepo.scala index 7eb24cee5..db8b93dac 100644 --- a/.bleep/generated-sources/typo-tester-zio-jdbc@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/person/PersonRepo.scala +++ b/.bleep/generated-sources/typo-tester-zio-jdbc@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/person/PersonRepo.scala @@ -35,4 +35,7 @@ trait PersonRepo { def update(row: PersonRow): ZIO[ZConnection, Throwable, Boolean] def updateFieldValues(id: PersonId, fieldValues: List[PersonFieldValue[?]]): ZIO[ZConnection, Throwable, Boolean] def upsert(unsaved: PersonRow): ZIO[ZConnection, Throwable, UpdateResult[PersonRow]] + // Not implementable for zio-jdbc: upsertBatch + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, PersonRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] } diff --git a/.bleep/generated-sources/typo-tester-zio-jdbc@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/person/PersonRepoImpl.scala b/.bleep/generated-sources/typo-tester-zio-jdbc@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/person/PersonRepoImpl.scala index 47ce473aa..ede74147b 100644 --- a/.bleep/generated-sources/typo-tester-zio-jdbc@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/person/PersonRepoImpl.scala +++ b/.bleep/generated-sources/typo-tester-zio-jdbc@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/person/PersonRepoImpl.scala @@ -200,4 +200,27 @@ class PersonRepoImpl extends PersonRepo { "favorite_number" = EXCLUDED."favorite_number" returning "id", "favourite_football_club_id", "name", "nick_name", "blog_url", "email", "phone", "likes_pizza", "marital_status_id", "work_email", "sector", "favorite_number"""".insertReturning(using PersonRow.jdbcDecoder) } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, PersonRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + val created = sql"create temporary table person_TEMP (like myschema.person) on commit drop".execute + val copied = streamingInsert(s"""copy person_TEMP("id", "favourite_football_club_id", "name", "nick_name", "blog_url", "email", "phone", "likes_pizza", "marital_status_id", "work_email", "sector", "favorite_number") from stdin""", batchSize, unsaved)(PersonRow.text) + val merged = sql"""insert into myschema.person("id", "favourite_football_club_id", "name", "nick_name", "blog_url", "email", "phone", "likes_pizza", "marital_status_id", "work_email", "sector", "favorite_number") + select * from person_TEMP + on conflict ("id") + do update set + "favourite_football_club_id" = EXCLUDED."favourite_football_club_id", + "name" = EXCLUDED."name", + "nick_name" = EXCLUDED."nick_name", + "blog_url" = EXCLUDED."blog_url", + "email" = EXCLUDED."email", + "phone" = EXCLUDED."phone", + "likes_pizza" = EXCLUDED."likes_pizza", + "marital_status_id" = EXCLUDED."marital_status_id", + "work_email" = EXCLUDED."work_email", + "sector" = EXCLUDED."sector", + "favorite_number" = EXCLUDED."favorite_number" + ; + drop table person_TEMP;""".update + created *> copied *> merged + } } diff --git a/.bleep/generated-sources/typo-tester-zio-jdbc@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/person/PersonRepoMock.scala b/.bleep/generated-sources/typo-tester-zio-jdbc@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/person/PersonRepoMock.scala index 880b46fef..cdd0e7829 100644 --- a/.bleep/generated-sources/typo-tester-zio-jdbc@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/person/PersonRepoMock.scala +++ b/.bleep/generated-sources/typo-tester-zio-jdbc@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/person/PersonRepoMock.scala @@ -150,4 +150,13 @@ class PersonRepoMock(toRow: Function1[PersonRowUnsaved, PersonRow], UpdateResult(1, Chunk.single(unsaved)) } } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, PersonRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + unsaved.scanZIO(0L) { case (acc, row) => + ZIO.succeed { + map += (row.id -> row) + acc + 1 + } + }.runLast.map(_.getOrElse(0L)) + } } diff --git a/site-in/other-features/testing-with-stubs.md b/site-in/other-features/testing-with-stubs.md index cd97ab564..86f6df69d 100644 --- a/site-in/other-features/testing-with-stubs.md +++ b/site-in/other-features/testing-with-stubs.md @@ -107,6 +107,18 @@ class AddressRepoMock(toRow: Function1[AddressRowUnsaved, AddressRow], map.put(unsaved.addressid, unsaved): @nowarn unsaved } + override def upsertBatch(unsaved: Iterable[AddressRow])(implicit c: Connection): List[AddressRow] = { + unsaved.map { row => + map += (row.addressid -> row) + row + }.toList + } + override def upsertStreaming(unsaved: Iterator[AddressRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + unsaved.foreach { row => + map += (row.addressid -> row) + } + unsaved.size + } } ``` diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/humanresources/department/DepartmentRepo.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/humanresources/department/DepartmentRepo.scala index 54d667ecb..e55859678 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/humanresources/department/DepartmentRepo.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/humanresources/department/DepartmentRepo.scala @@ -29,4 +29,7 @@ trait DepartmentRepo { def update: UpdateBuilder[DepartmentFields, DepartmentRow] def update(row: DepartmentRow)(implicit c: Connection): Boolean def upsert(unsaved: DepartmentRow)(implicit c: Connection): DepartmentRow + def upsertBatch(unsaved: Iterable[DepartmentRow])(implicit c: Connection): List[DepartmentRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Iterator[DepartmentRow], batchSize: Int = 10000)(implicit c: Connection): Int } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/humanresources/department/DepartmentRepoImpl.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/humanresources/department/DepartmentRepoImpl.scala index 38574bf28..269f9f2ff 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/humanresources/department/DepartmentRepoImpl.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/humanresources/department/DepartmentRepoImpl.scala @@ -10,6 +10,7 @@ package department import adventureworks.customtypes.Defaulted import adventureworks.customtypes.TypoLocalDateTime import adventureworks.public.Name +import anorm.BatchSql import anorm.NamedParameter import anorm.ParameterValue import anorm.RowParser @@ -17,6 +18,7 @@ import anorm.SQL import anorm.SimpleSql import anorm.SqlStringInterpolation import java.sql.Connection +import scala.annotation.nowarn import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder import typo.dsl.SelectBuilderSql @@ -135,4 +137,45 @@ class DepartmentRepoImpl extends DepartmentRepo { .executeInsert(DepartmentRow.rowParser(1).single) } + override def upsertBatch(unsaved: Iterable[DepartmentRow])(implicit c: Connection): List[DepartmentRow] = { + def toNamedParameter(row: DepartmentRow): List[NamedParameter] = List( + NamedParameter("departmentid", ParameterValue(row.departmentid, null, DepartmentId.toStatement)), + NamedParameter("name", ParameterValue(row.name, null, Name.toStatement)), + NamedParameter("groupname", ParameterValue(row.groupname, null, Name.toStatement)), + NamedParameter("modifieddate", ParameterValue(row.modifieddate, null, TypoLocalDateTime.toStatement)) + ) + unsaved.toList match { + case Nil => Nil + case head :: rest => + new anorm.adventureworks.ExecuteReturningSyntax.Ops( + BatchSql( + s"""insert into humanresources.department("departmentid", "name", "groupname", "modifieddate") + values ({departmentid}::int4, {name}::varchar, {groupname}::varchar, {modifieddate}::timestamp) + on conflict ("departmentid") + do update set + "name" = EXCLUDED."name", + "groupname" = EXCLUDED."groupname", + "modifieddate" = EXCLUDED."modifieddate" + returning "departmentid", "name", "groupname", "modifieddate"::text + """, + toNamedParameter(head), + rest.map(toNamedParameter)* + ) + ).executeReturning(DepartmentRow.rowParser(1).*) + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[DepartmentRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + SQL"create temporary table department_TEMP (like humanresources.department) on commit drop".execute(): @nowarn + streamingInsert(s"""copy department_TEMP("departmentid", "name", "groupname", "modifieddate") from stdin""", batchSize, unsaved)(DepartmentRow.text, c): @nowarn + SQL"""insert into humanresources.department("departmentid", "name", "groupname", "modifieddate") + select * from department_TEMP + on conflict ("departmentid") + do update set + "name" = EXCLUDED."name", + "groupname" = EXCLUDED."groupname", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table department_TEMP;""".executeUpdate() + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/humanresources/department/DepartmentRepoMock.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/humanresources/department/DepartmentRepoMock.scala index f65fb7948..d663ba0a1 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/humanresources/department/DepartmentRepoMock.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/humanresources/department/DepartmentRepoMock.scala @@ -87,4 +87,17 @@ class DepartmentRepoMock(toRow: Function1[DepartmentRowUnsaved, DepartmentRow], map.put(unsaved.departmentid, unsaved): @nowarn unsaved } + override def upsertBatch(unsaved: Iterable[DepartmentRow])(implicit c: Connection): List[DepartmentRow] = { + unsaved.map { row => + map += (row.departmentid -> row) + row + }.toList + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[DepartmentRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + unsaved.foreach { row => + map += (row.departmentid -> row) + } + unsaved.size + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/humanresources/employee/EmployeeRepo.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/humanresources/employee/EmployeeRepo.scala index 70387571d..e9ff53ff6 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/humanresources/employee/EmployeeRepo.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/humanresources/employee/EmployeeRepo.scala @@ -30,4 +30,7 @@ trait EmployeeRepo { def update: UpdateBuilder[EmployeeFields, EmployeeRow] def update(row: EmployeeRow)(implicit c: Connection): Boolean def upsert(unsaved: EmployeeRow)(implicit c: Connection): EmployeeRow + def upsertBatch(unsaved: Iterable[EmployeeRow])(implicit c: Connection): List[EmployeeRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Iterator[EmployeeRow], batchSize: Int = 10000)(implicit c: Connection): Int } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/humanresources/employee/EmployeeRepoImpl.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/humanresources/employee/EmployeeRepoImpl.scala index 50dcfdc9a..f18bc40e9 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/humanresources/employee/EmployeeRepoImpl.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/humanresources/employee/EmployeeRepoImpl.scala @@ -14,6 +14,7 @@ import adventureworks.customtypes.TypoShort import adventureworks.customtypes.TypoUUID import adventureworks.person.businessentity.BusinessentityId import adventureworks.public.Flag +import anorm.BatchSql import anorm.NamedParameter import anorm.ParameterMetaData import anorm.ParameterValue @@ -23,6 +24,7 @@ import anorm.SimpleSql import anorm.SqlStringInterpolation import anorm.ToStatement import java.sql.Connection +import scala.annotation.nowarn import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder import typo.dsl.SelectBuilderSql @@ -200,4 +202,78 @@ class EmployeeRepoImpl extends EmployeeRepo { .executeInsert(EmployeeRow.rowParser(1).single) } + override def upsertBatch(unsaved: Iterable[EmployeeRow])(implicit c: Connection): List[EmployeeRow] = { + def toNamedParameter(row: EmployeeRow): List[NamedParameter] = List( + NamedParameter("businessentityid", ParameterValue(row.businessentityid, null, BusinessentityId.toStatement)), + NamedParameter("nationalidnumber", ParameterValue(row.nationalidnumber, null, ToStatement.stringToStatement)), + NamedParameter("loginid", ParameterValue(row.loginid, null, ToStatement.stringToStatement)), + NamedParameter("jobtitle", ParameterValue(row.jobtitle, null, ToStatement.stringToStatement)), + NamedParameter("birthdate", ParameterValue(row.birthdate, null, TypoLocalDate.toStatement)), + NamedParameter("maritalstatus", ParameterValue(row.maritalstatus, null, ToStatement.stringToStatement)), + NamedParameter("gender", ParameterValue(row.gender, null, ToStatement.stringToStatement)), + NamedParameter("hiredate", ParameterValue(row.hiredate, null, TypoLocalDate.toStatement)), + NamedParameter("salariedflag", ParameterValue(row.salariedflag, null, Flag.toStatement)), + NamedParameter("vacationhours", ParameterValue(row.vacationhours, null, TypoShort.toStatement)), + NamedParameter("sickleavehours", ParameterValue(row.sickleavehours, null, TypoShort.toStatement)), + NamedParameter("currentflag", ParameterValue(row.currentflag, null, Flag.toStatement)), + NamedParameter("rowguid", ParameterValue(row.rowguid, null, TypoUUID.toStatement)), + NamedParameter("modifieddate", ParameterValue(row.modifieddate, null, TypoLocalDateTime.toStatement)), + NamedParameter("organizationnode", ParameterValue(row.organizationnode, null, ToStatement.optionToStatement(ToStatement.stringToStatement, ParameterMetaData.StringParameterMetaData))) + ) + unsaved.toList match { + case Nil => Nil + case head :: rest => + new anorm.adventureworks.ExecuteReturningSyntax.Ops( + BatchSql( + s"""insert into humanresources.employee("businessentityid", "nationalidnumber", "loginid", "jobtitle", "birthdate", "maritalstatus", "gender", "hiredate", "salariedflag", "vacationhours", "sickleavehours", "currentflag", "rowguid", "modifieddate", "organizationnode") + values ({businessentityid}::int4, {nationalidnumber}, {loginid}, {jobtitle}, {birthdate}::date, {maritalstatus}::bpchar, {gender}::bpchar, {hiredate}::date, {salariedflag}::bool, {vacationhours}::int2, {sickleavehours}::int2, {currentflag}::bool, {rowguid}::uuid, {modifieddate}::timestamp, {organizationnode}) + on conflict ("businessentityid") + do update set + "nationalidnumber" = EXCLUDED."nationalidnumber", + "loginid" = EXCLUDED."loginid", + "jobtitle" = EXCLUDED."jobtitle", + "birthdate" = EXCLUDED."birthdate", + "maritalstatus" = EXCLUDED."maritalstatus", + "gender" = EXCLUDED."gender", + "hiredate" = EXCLUDED."hiredate", + "salariedflag" = EXCLUDED."salariedflag", + "vacationhours" = EXCLUDED."vacationhours", + "sickleavehours" = EXCLUDED."sickleavehours", + "currentflag" = EXCLUDED."currentflag", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate", + "organizationnode" = EXCLUDED."organizationnode" + returning "businessentityid", "nationalidnumber", "loginid", "jobtitle", "birthdate"::text, "maritalstatus", "gender", "hiredate"::text, "salariedflag", "vacationhours", "sickleavehours", "currentflag", "rowguid", "modifieddate"::text, "organizationnode" + """, + toNamedParameter(head), + rest.map(toNamedParameter)* + ) + ).executeReturning(EmployeeRow.rowParser(1).*) + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[EmployeeRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + SQL"create temporary table employee_TEMP (like humanresources.employee) on commit drop".execute(): @nowarn + streamingInsert(s"""copy employee_TEMP("businessentityid", "nationalidnumber", "loginid", "jobtitle", "birthdate", "maritalstatus", "gender", "hiredate", "salariedflag", "vacationhours", "sickleavehours", "currentflag", "rowguid", "modifieddate", "organizationnode") from stdin""", batchSize, unsaved)(EmployeeRow.text, c): @nowarn + SQL"""insert into humanresources.employee("businessentityid", "nationalidnumber", "loginid", "jobtitle", "birthdate", "maritalstatus", "gender", "hiredate", "salariedflag", "vacationhours", "sickleavehours", "currentflag", "rowguid", "modifieddate", "organizationnode") + select * from employee_TEMP + on conflict ("businessentityid") + do update set + "nationalidnumber" = EXCLUDED."nationalidnumber", + "loginid" = EXCLUDED."loginid", + "jobtitle" = EXCLUDED."jobtitle", + "birthdate" = EXCLUDED."birthdate", + "maritalstatus" = EXCLUDED."maritalstatus", + "gender" = EXCLUDED."gender", + "hiredate" = EXCLUDED."hiredate", + "salariedflag" = EXCLUDED."salariedflag", + "vacationhours" = EXCLUDED."vacationhours", + "sickleavehours" = EXCLUDED."sickleavehours", + "currentflag" = EXCLUDED."currentflag", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate", + "organizationnode" = EXCLUDED."organizationnode" + ; + drop table employee_TEMP;""".executeUpdate() + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/humanresources/employee/EmployeeRepoMock.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/humanresources/employee/EmployeeRepoMock.scala index 48f677140..85e6fda6f 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/humanresources/employee/EmployeeRepoMock.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/humanresources/employee/EmployeeRepoMock.scala @@ -88,4 +88,17 @@ class EmployeeRepoMock(toRow: Function1[EmployeeRowUnsaved, EmployeeRow], map.put(unsaved.businessentityid, unsaved): @nowarn unsaved } + override def upsertBatch(unsaved: Iterable[EmployeeRow])(implicit c: Connection): List[EmployeeRow] = { + unsaved.map { row => + map += (row.businessentityid -> row) + row + }.toList + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[EmployeeRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + unsaved.foreach { row => + map += (row.businessentityid -> row) + } + unsaved.size + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/humanresources/employeedepartmenthistory/EmployeedepartmenthistoryRepo.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/humanresources/employeedepartmenthistory/EmployeedepartmenthistoryRepo.scala index ebe64177b..28efa9188 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/humanresources/employeedepartmenthistory/EmployeedepartmenthistoryRepo.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/humanresources/employeedepartmenthistory/EmployeedepartmenthistoryRepo.scala @@ -29,4 +29,7 @@ trait EmployeedepartmenthistoryRepo { def update: UpdateBuilder[EmployeedepartmenthistoryFields, EmployeedepartmenthistoryRow] def update(row: EmployeedepartmenthistoryRow)(implicit c: Connection): Boolean def upsert(unsaved: EmployeedepartmenthistoryRow)(implicit c: Connection): EmployeedepartmenthistoryRow + def upsertBatch(unsaved: Iterable[EmployeedepartmenthistoryRow])(implicit c: Connection): List[EmployeedepartmenthistoryRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Iterator[EmployeedepartmenthistoryRow], batchSize: Int = 10000)(implicit c: Connection): Int } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/humanresources/employeedepartmenthistory/EmployeedepartmenthistoryRepoImpl.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/humanresources/employeedepartmenthistory/EmployeedepartmenthistoryRepoImpl.scala index 1a5e87af5..8d769c1d2 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/humanresources/employeedepartmenthistory/EmployeedepartmenthistoryRepoImpl.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/humanresources/employeedepartmenthistory/EmployeedepartmenthistoryRepoImpl.scala @@ -13,6 +13,7 @@ import adventureworks.customtypes.TypoLocalDateTime import adventureworks.humanresources.department.DepartmentId import adventureworks.humanresources.shift.ShiftId import adventureworks.person.businessentity.BusinessentityId +import anorm.BatchSql import anorm.NamedParameter import anorm.ParameterValue import anorm.RowParser @@ -21,6 +22,7 @@ import anorm.SimpleSql import anorm.SqlStringInterpolation import anorm.ToStatement import java.sql.Connection +import scala.annotation.nowarn import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder import typo.dsl.SelectBuilderSql @@ -148,4 +150,45 @@ class EmployeedepartmenthistoryRepoImpl extends EmployeedepartmenthistoryRepo { .executeInsert(EmployeedepartmenthistoryRow.rowParser(1).single) } + override def upsertBatch(unsaved: Iterable[EmployeedepartmenthistoryRow])(implicit c: Connection): List[EmployeedepartmenthistoryRow] = { + def toNamedParameter(row: EmployeedepartmenthistoryRow): List[NamedParameter] = List( + NamedParameter("businessentityid", ParameterValue(row.businessentityid, null, BusinessentityId.toStatement)), + NamedParameter("departmentid", ParameterValue(row.departmentid, null, DepartmentId.toStatement)), + NamedParameter("shiftid", ParameterValue(row.shiftid, null, ShiftId.toStatement)), + NamedParameter("startdate", ParameterValue(row.startdate, null, TypoLocalDate.toStatement)), + NamedParameter("enddate", ParameterValue(row.enddate, null, ToStatement.optionToStatement(TypoLocalDate.toStatement, TypoLocalDate.parameterMetadata))), + NamedParameter("modifieddate", ParameterValue(row.modifieddate, null, TypoLocalDateTime.toStatement)) + ) + unsaved.toList match { + case Nil => Nil + case head :: rest => + new anorm.adventureworks.ExecuteReturningSyntax.Ops( + BatchSql( + s"""insert into humanresources.employeedepartmenthistory("businessentityid", "departmentid", "shiftid", "startdate", "enddate", "modifieddate") + values ({businessentityid}::int4, {departmentid}::int2, {shiftid}::int2, {startdate}::date, {enddate}::date, {modifieddate}::timestamp) + on conflict ("businessentityid", "startdate", "departmentid", "shiftid") + do update set + "enddate" = EXCLUDED."enddate", + "modifieddate" = EXCLUDED."modifieddate" + returning "businessentityid", "departmentid", "shiftid", "startdate"::text, "enddate"::text, "modifieddate"::text + """, + toNamedParameter(head), + rest.map(toNamedParameter)* + ) + ).executeReturning(EmployeedepartmenthistoryRow.rowParser(1).*) + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[EmployeedepartmenthistoryRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + SQL"create temporary table employeedepartmenthistory_TEMP (like humanresources.employeedepartmenthistory) on commit drop".execute(): @nowarn + streamingInsert(s"""copy employeedepartmenthistory_TEMP("businessentityid", "departmentid", "shiftid", "startdate", "enddate", "modifieddate") from stdin""", batchSize, unsaved)(EmployeedepartmenthistoryRow.text, c): @nowarn + SQL"""insert into humanresources.employeedepartmenthistory("businessentityid", "departmentid", "shiftid", "startdate", "enddate", "modifieddate") + select * from employeedepartmenthistory_TEMP + on conflict ("businessentityid", "startdate", "departmentid", "shiftid") + do update set + "enddate" = EXCLUDED."enddate", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table employeedepartmenthistory_TEMP;""".executeUpdate() + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/humanresources/employeedepartmenthistory/EmployeedepartmenthistoryRepoMock.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/humanresources/employeedepartmenthistory/EmployeedepartmenthistoryRepoMock.scala index ed2c82024..18aecce70 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/humanresources/employeedepartmenthistory/EmployeedepartmenthistoryRepoMock.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/humanresources/employeedepartmenthistory/EmployeedepartmenthistoryRepoMock.scala @@ -87,4 +87,17 @@ class EmployeedepartmenthistoryRepoMock(toRow: Function1[Employeedepartmenthisto map.put(unsaved.compositeId, unsaved): @nowarn unsaved } + override def upsertBatch(unsaved: Iterable[EmployeedepartmenthistoryRow])(implicit c: Connection): List[EmployeedepartmenthistoryRow] = { + unsaved.map { row => + map += (row.compositeId -> row) + row + }.toList + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[EmployeedepartmenthistoryRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + unsaved.foreach { row => + map += (row.compositeId -> row) + } + unsaved.size + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/humanresources/employeepayhistory/EmployeepayhistoryRepo.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/humanresources/employeepayhistory/EmployeepayhistoryRepo.scala index d3e9a2dc4..3c4d2d46a 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/humanresources/employeepayhistory/EmployeepayhistoryRepo.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/humanresources/employeepayhistory/EmployeepayhistoryRepo.scala @@ -29,4 +29,7 @@ trait EmployeepayhistoryRepo { def update: UpdateBuilder[EmployeepayhistoryFields, EmployeepayhistoryRow] def update(row: EmployeepayhistoryRow)(implicit c: Connection): Boolean def upsert(unsaved: EmployeepayhistoryRow)(implicit c: Connection): EmployeepayhistoryRow + def upsertBatch(unsaved: Iterable[EmployeepayhistoryRow])(implicit c: Connection): List[EmployeepayhistoryRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Iterator[EmployeepayhistoryRow], batchSize: Int = 10000)(implicit c: Connection): Int } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/humanresources/employeepayhistory/EmployeepayhistoryRepoImpl.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/humanresources/employeepayhistory/EmployeepayhistoryRepoImpl.scala index fddf76313..95f564442 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/humanresources/employeepayhistory/EmployeepayhistoryRepoImpl.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/humanresources/employeepayhistory/EmployeepayhistoryRepoImpl.scala @@ -11,6 +11,7 @@ import adventureworks.customtypes.Defaulted import adventureworks.customtypes.TypoLocalDateTime import adventureworks.customtypes.TypoShort import adventureworks.person.businessentity.BusinessentityId +import anorm.BatchSql import anorm.NamedParameter import anorm.ParameterValue import anorm.RowParser @@ -19,6 +20,7 @@ import anorm.SimpleSql import anorm.SqlStringInterpolation import anorm.ToStatement import java.sql.Connection +import scala.annotation.nowarn import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder import typo.dsl.SelectBuilderSql @@ -142,4 +144,46 @@ class EmployeepayhistoryRepoImpl extends EmployeepayhistoryRepo { .executeInsert(EmployeepayhistoryRow.rowParser(1).single) } + override def upsertBatch(unsaved: Iterable[EmployeepayhistoryRow])(implicit c: Connection): List[EmployeepayhistoryRow] = { + def toNamedParameter(row: EmployeepayhistoryRow): List[NamedParameter] = List( + NamedParameter("businessentityid", ParameterValue(row.businessentityid, null, BusinessentityId.toStatement)), + NamedParameter("ratechangedate", ParameterValue(row.ratechangedate, null, TypoLocalDateTime.toStatement)), + NamedParameter("rate", ParameterValue(row.rate, null, ToStatement.scalaBigDecimalToStatement)), + NamedParameter("payfrequency", ParameterValue(row.payfrequency, null, TypoShort.toStatement)), + NamedParameter("modifieddate", ParameterValue(row.modifieddate, null, TypoLocalDateTime.toStatement)) + ) + unsaved.toList match { + case Nil => Nil + case head :: rest => + new anorm.adventureworks.ExecuteReturningSyntax.Ops( + BatchSql( + s"""insert into humanresources.employeepayhistory("businessentityid", "ratechangedate", "rate", "payfrequency", "modifieddate") + values ({businessentityid}::int4, {ratechangedate}::timestamp, {rate}::numeric, {payfrequency}::int2, {modifieddate}::timestamp) + on conflict ("businessentityid", "ratechangedate") + do update set + "rate" = EXCLUDED."rate", + "payfrequency" = EXCLUDED."payfrequency", + "modifieddate" = EXCLUDED."modifieddate" + returning "businessentityid", "ratechangedate"::text, "rate", "payfrequency", "modifieddate"::text + """, + toNamedParameter(head), + rest.map(toNamedParameter)* + ) + ).executeReturning(EmployeepayhistoryRow.rowParser(1).*) + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[EmployeepayhistoryRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + SQL"create temporary table employeepayhistory_TEMP (like humanresources.employeepayhistory) on commit drop".execute(): @nowarn + streamingInsert(s"""copy employeepayhistory_TEMP("businessentityid", "ratechangedate", "rate", "payfrequency", "modifieddate") from stdin""", batchSize, unsaved)(EmployeepayhistoryRow.text, c): @nowarn + SQL"""insert into humanresources.employeepayhistory("businessentityid", "ratechangedate", "rate", "payfrequency", "modifieddate") + select * from employeepayhistory_TEMP + on conflict ("businessentityid", "ratechangedate") + do update set + "rate" = EXCLUDED."rate", + "payfrequency" = EXCLUDED."payfrequency", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table employeepayhistory_TEMP;""".executeUpdate() + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/humanresources/employeepayhistory/EmployeepayhistoryRepoMock.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/humanresources/employeepayhistory/EmployeepayhistoryRepoMock.scala index 8aa642fb2..82a22249d 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/humanresources/employeepayhistory/EmployeepayhistoryRepoMock.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/humanresources/employeepayhistory/EmployeepayhistoryRepoMock.scala @@ -87,4 +87,17 @@ class EmployeepayhistoryRepoMock(toRow: Function1[EmployeepayhistoryRowUnsaved, map.put(unsaved.compositeId, unsaved): @nowarn unsaved } + override def upsertBatch(unsaved: Iterable[EmployeepayhistoryRow])(implicit c: Connection): List[EmployeepayhistoryRow] = { + unsaved.map { row => + map += (row.compositeId -> row) + row + }.toList + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[EmployeepayhistoryRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + unsaved.foreach { row => + map += (row.compositeId -> row) + } + unsaved.size + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/humanresources/jobcandidate/JobcandidateRepo.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/humanresources/jobcandidate/JobcandidateRepo.scala index bdb936957..5df163c7a 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/humanresources/jobcandidate/JobcandidateRepo.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/humanresources/jobcandidate/JobcandidateRepo.scala @@ -29,4 +29,7 @@ trait JobcandidateRepo { def update: UpdateBuilder[JobcandidateFields, JobcandidateRow] def update(row: JobcandidateRow)(implicit c: Connection): Boolean def upsert(unsaved: JobcandidateRow)(implicit c: Connection): JobcandidateRow + def upsertBatch(unsaved: Iterable[JobcandidateRow])(implicit c: Connection): List[JobcandidateRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Iterator[JobcandidateRow], batchSize: Int = 10000)(implicit c: Connection): Int } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/humanresources/jobcandidate/JobcandidateRepoImpl.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/humanresources/jobcandidate/JobcandidateRepoImpl.scala index 0b8b56197..5b56a7a65 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/humanresources/jobcandidate/JobcandidateRepoImpl.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/humanresources/jobcandidate/JobcandidateRepoImpl.scala @@ -11,6 +11,7 @@ import adventureworks.customtypes.Defaulted import adventureworks.customtypes.TypoLocalDateTime import adventureworks.customtypes.TypoXml import adventureworks.person.businessentity.BusinessentityId +import anorm.BatchSql import anorm.NamedParameter import anorm.ParameterValue import anorm.RowParser @@ -19,6 +20,7 @@ import anorm.SimpleSql import anorm.SqlStringInterpolation import anorm.ToStatement import java.sql.Connection +import scala.annotation.nowarn import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder import typo.dsl.SelectBuilderSql @@ -137,4 +139,45 @@ class JobcandidateRepoImpl extends JobcandidateRepo { .executeInsert(JobcandidateRow.rowParser(1).single) } + override def upsertBatch(unsaved: Iterable[JobcandidateRow])(implicit c: Connection): List[JobcandidateRow] = { + def toNamedParameter(row: JobcandidateRow): List[NamedParameter] = List( + NamedParameter("jobcandidateid", ParameterValue(row.jobcandidateid, null, JobcandidateId.toStatement)), + NamedParameter("businessentityid", ParameterValue(row.businessentityid, null, ToStatement.optionToStatement(BusinessentityId.toStatement, BusinessentityId.parameterMetadata))), + NamedParameter("resume", ParameterValue(row.resume, null, ToStatement.optionToStatement(TypoXml.toStatement, TypoXml.parameterMetadata))), + NamedParameter("modifieddate", ParameterValue(row.modifieddate, null, TypoLocalDateTime.toStatement)) + ) + unsaved.toList match { + case Nil => Nil + case head :: rest => + new anorm.adventureworks.ExecuteReturningSyntax.Ops( + BatchSql( + s"""insert into humanresources.jobcandidate("jobcandidateid", "businessentityid", "resume", "modifieddate") + values ({jobcandidateid}::int4, {businessentityid}::int4, {resume}::xml, {modifieddate}::timestamp) + on conflict ("jobcandidateid") + do update set + "businessentityid" = EXCLUDED."businessentityid", + "resume" = EXCLUDED."resume", + "modifieddate" = EXCLUDED."modifieddate" + returning "jobcandidateid", "businessentityid", "resume", "modifieddate"::text + """, + toNamedParameter(head), + rest.map(toNamedParameter)* + ) + ).executeReturning(JobcandidateRow.rowParser(1).*) + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[JobcandidateRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + SQL"create temporary table jobcandidate_TEMP (like humanresources.jobcandidate) on commit drop".execute(): @nowarn + streamingInsert(s"""copy jobcandidate_TEMP("jobcandidateid", "businessentityid", "resume", "modifieddate") from stdin""", batchSize, unsaved)(JobcandidateRow.text, c): @nowarn + SQL"""insert into humanresources.jobcandidate("jobcandidateid", "businessentityid", "resume", "modifieddate") + select * from jobcandidate_TEMP + on conflict ("jobcandidateid") + do update set + "businessentityid" = EXCLUDED."businessentityid", + "resume" = EXCLUDED."resume", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table jobcandidate_TEMP;""".executeUpdate() + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/humanresources/jobcandidate/JobcandidateRepoMock.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/humanresources/jobcandidate/JobcandidateRepoMock.scala index e3a4c4226..e5f65c7b8 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/humanresources/jobcandidate/JobcandidateRepoMock.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/humanresources/jobcandidate/JobcandidateRepoMock.scala @@ -87,4 +87,17 @@ class JobcandidateRepoMock(toRow: Function1[JobcandidateRowUnsaved, Jobcandidate map.put(unsaved.jobcandidateid, unsaved): @nowarn unsaved } + override def upsertBatch(unsaved: Iterable[JobcandidateRow])(implicit c: Connection): List[JobcandidateRow] = { + unsaved.map { row => + map += (row.jobcandidateid -> row) + row + }.toList + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[JobcandidateRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + unsaved.foreach { row => + map += (row.jobcandidateid -> row) + } + unsaved.size + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/humanresources/shift/ShiftRepo.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/humanresources/shift/ShiftRepo.scala index e3a5bb1ba..fa27a24eb 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/humanresources/shift/ShiftRepo.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/humanresources/shift/ShiftRepo.scala @@ -29,4 +29,7 @@ trait ShiftRepo { def update: UpdateBuilder[ShiftFields, ShiftRow] def update(row: ShiftRow)(implicit c: Connection): Boolean def upsert(unsaved: ShiftRow)(implicit c: Connection): ShiftRow + def upsertBatch(unsaved: Iterable[ShiftRow])(implicit c: Connection): List[ShiftRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Iterator[ShiftRow], batchSize: Int = 10000)(implicit c: Connection): Int } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/humanresources/shift/ShiftRepoImpl.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/humanresources/shift/ShiftRepoImpl.scala index e37a6cb66..5d48c68c7 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/humanresources/shift/ShiftRepoImpl.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/humanresources/shift/ShiftRepoImpl.scala @@ -11,6 +11,7 @@ import adventureworks.customtypes.Defaulted import adventureworks.customtypes.TypoLocalDateTime import adventureworks.customtypes.TypoLocalTime import adventureworks.public.Name +import anorm.BatchSql import anorm.NamedParameter import anorm.ParameterValue import anorm.RowParser @@ -18,6 +19,7 @@ import anorm.SQL import anorm.SimpleSql import anorm.SqlStringInterpolation import java.sql.Connection +import scala.annotation.nowarn import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder import typo.dsl.SelectBuilderSql @@ -140,4 +142,48 @@ class ShiftRepoImpl extends ShiftRepo { .executeInsert(ShiftRow.rowParser(1).single) } + override def upsertBatch(unsaved: Iterable[ShiftRow])(implicit c: Connection): List[ShiftRow] = { + def toNamedParameter(row: ShiftRow): List[NamedParameter] = List( + NamedParameter("shiftid", ParameterValue(row.shiftid, null, ShiftId.toStatement)), + NamedParameter("name", ParameterValue(row.name, null, Name.toStatement)), + NamedParameter("starttime", ParameterValue(row.starttime, null, TypoLocalTime.toStatement)), + NamedParameter("endtime", ParameterValue(row.endtime, null, TypoLocalTime.toStatement)), + NamedParameter("modifieddate", ParameterValue(row.modifieddate, null, TypoLocalDateTime.toStatement)) + ) + unsaved.toList match { + case Nil => Nil + case head :: rest => + new anorm.adventureworks.ExecuteReturningSyntax.Ops( + BatchSql( + s"""insert into humanresources.shift("shiftid", "name", "starttime", "endtime", "modifieddate") + values ({shiftid}::int4, {name}::varchar, {starttime}::time, {endtime}::time, {modifieddate}::timestamp) + on conflict ("shiftid") + do update set + "name" = EXCLUDED."name", + "starttime" = EXCLUDED."starttime", + "endtime" = EXCLUDED."endtime", + "modifieddate" = EXCLUDED."modifieddate" + returning "shiftid", "name", "starttime"::text, "endtime"::text, "modifieddate"::text + """, + toNamedParameter(head), + rest.map(toNamedParameter)* + ) + ).executeReturning(ShiftRow.rowParser(1).*) + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[ShiftRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + SQL"create temporary table shift_TEMP (like humanresources.shift) on commit drop".execute(): @nowarn + streamingInsert(s"""copy shift_TEMP("shiftid", "name", "starttime", "endtime", "modifieddate") from stdin""", batchSize, unsaved)(ShiftRow.text, c): @nowarn + SQL"""insert into humanresources.shift("shiftid", "name", "starttime", "endtime", "modifieddate") + select * from shift_TEMP + on conflict ("shiftid") + do update set + "name" = EXCLUDED."name", + "starttime" = EXCLUDED."starttime", + "endtime" = EXCLUDED."endtime", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table shift_TEMP;""".executeUpdate() + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/humanresources/shift/ShiftRepoMock.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/humanresources/shift/ShiftRepoMock.scala index 851844c03..9b2ed3bcf 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/humanresources/shift/ShiftRepoMock.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/humanresources/shift/ShiftRepoMock.scala @@ -87,4 +87,17 @@ class ShiftRepoMock(toRow: Function1[ShiftRowUnsaved, ShiftRow], map.put(unsaved.shiftid, unsaved): @nowarn unsaved } + override def upsertBatch(unsaved: Iterable[ShiftRow])(implicit c: Connection): List[ShiftRow] = { + unsaved.map { row => + map += (row.shiftid -> row) + row + }.toList + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[ShiftRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + unsaved.foreach { row => + map += (row.shiftid -> row) + } + unsaved.size + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/information_schema/CardinalNumber.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/information_schema/CardinalNumber.scala new file mode 100644 index 000000000..dfb29023a --- /dev/null +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/information_schema/CardinalNumber.scala @@ -0,0 +1,38 @@ +/** + * File has been automatically generated by `typo`. + * + * IF YOU CHANGE THIS FILE YOUR CHANGES WILL BE OVERWRITTEN. + */ +package adventureworks +package information_schema + +import anorm.Column +import anorm.ParameterMetaData +import anorm.ToStatement +import java.sql.Types +import play.api.libs.json.Reads +import play.api.libs.json.Writes +import typo.dsl.Bijection + +/** Domain `information_schema.cardinal_number` + * Constraint: CHECK ((VALUE >= 0)) + */ +case class CardinalNumber(value: Int) +object CardinalNumber { + implicit lazy val arrayColumn: Column[Array[CardinalNumber]] = Column.columnToArray(column, implicitly) + implicit lazy val arrayToStatement: ToStatement[Array[CardinalNumber]] = adventureworks.IntArrayToStatement.contramap(_.map(_.value)) + implicit lazy val bijection: Bijection[CardinalNumber, Int] = Bijection[CardinalNumber, Int](_.value)(CardinalNumber.apply) + implicit lazy val column: Column[CardinalNumber] = Column.columnToInt.map(CardinalNumber.apply) + implicit lazy val ordering: Ordering[CardinalNumber] = Ordering.by(_.value) + implicit lazy val parameterMetadata: ParameterMetaData[CardinalNumber] = new ParameterMetaData[CardinalNumber] { + override def sqlType: String = """"information_schema"."cardinal_number"""" + override def jdbcType: Int = Types.OTHER + } + implicit lazy val reads: Reads[CardinalNumber] = Reads.IntReads.map(CardinalNumber.apply) + implicit lazy val text: Text[CardinalNumber] = new Text[CardinalNumber] { + override def unsafeEncode(v: CardinalNumber, sb: StringBuilder) = Text.intInstance.unsafeEncode(v.value, sb) + override def unsafeArrayEncode(v: CardinalNumber, sb: StringBuilder) = Text.intInstance.unsafeArrayEncode(v.value, sb) + } + implicit lazy val toStatement: ToStatement[CardinalNumber] = ToStatement.intToStatement.contramap(_.value) + implicit lazy val writes: Writes[CardinalNumber] = Writes.IntWrites.contramap(_.value) +} \ No newline at end of file diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/information_schema/CharacterData.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/information_schema/CharacterData.scala new file mode 100644 index 000000000..2edda1880 --- /dev/null +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/information_schema/CharacterData.scala @@ -0,0 +1,38 @@ +/** + * File has been automatically generated by `typo`. + * + * IF YOU CHANGE THIS FILE YOUR CHANGES WILL BE OVERWRITTEN. + */ +package adventureworks +package information_schema + +import anorm.Column +import anorm.ParameterMetaData +import anorm.ToStatement +import java.sql.Types +import play.api.libs.json.Reads +import play.api.libs.json.Writes +import typo.dsl.Bijection + +/** Domain `information_schema.character_data` + * No constraint + */ +case class CharacterData(value: String) +object CharacterData { + implicit lazy val arrayColumn: Column[Array[CharacterData]] = Column.columnToArray(column, implicitly) + implicit lazy val arrayToStatement: ToStatement[Array[CharacterData]] = ToStatement.arrayToParameter(ParameterMetaData.StringParameterMetaData).contramap(_.map(_.value)) + implicit lazy val bijection: Bijection[CharacterData, String] = Bijection[CharacterData, String](_.value)(CharacterData.apply) + implicit lazy val column: Column[CharacterData] = Column.columnToString.map(CharacterData.apply) + implicit lazy val ordering: Ordering[CharacterData] = Ordering.by(_.value) + implicit lazy val parameterMetadata: ParameterMetaData[CharacterData] = new ParameterMetaData[CharacterData] { + override def sqlType: String = """"information_schema"."character_data"""" + override def jdbcType: Int = Types.OTHER + } + implicit lazy val reads: Reads[CharacterData] = Reads.StringReads.map(CharacterData.apply) + implicit lazy val text: Text[CharacterData] = new Text[CharacterData] { + override def unsafeEncode(v: CharacterData, sb: StringBuilder) = Text.stringInstance.unsafeEncode(v.value, sb) + override def unsafeArrayEncode(v: CharacterData, sb: StringBuilder) = Text.stringInstance.unsafeArrayEncode(v.value, sb) + } + implicit lazy val toStatement: ToStatement[CharacterData] = ToStatement.stringToStatement.contramap(_.value) + implicit lazy val writes: Writes[CharacterData] = Writes.StringWrites.contramap(_.value) +} \ No newline at end of file diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/information_schema/SqlIdentifier.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/information_schema/SqlIdentifier.scala new file mode 100644 index 000000000..9fb1d7b0a --- /dev/null +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/information_schema/SqlIdentifier.scala @@ -0,0 +1,38 @@ +/** + * File has been automatically generated by `typo`. + * + * IF YOU CHANGE THIS FILE YOUR CHANGES WILL BE OVERWRITTEN. + */ +package adventureworks +package information_schema + +import anorm.Column +import anorm.ParameterMetaData +import anorm.ToStatement +import java.sql.Types +import play.api.libs.json.Reads +import play.api.libs.json.Writes +import typo.dsl.Bijection + +/** Domain `information_schema.sql_identifier` + * No constraint + */ +case class SqlIdentifier(value: String) +object SqlIdentifier { + implicit lazy val arrayColumn: Column[Array[SqlIdentifier]] = Column.columnToArray(column, implicitly) + implicit lazy val arrayToStatement: ToStatement[Array[SqlIdentifier]] = ToStatement.arrayToParameter(ParameterMetaData.StringParameterMetaData).contramap(_.map(_.value)) + implicit lazy val bijection: Bijection[SqlIdentifier, String] = Bijection[SqlIdentifier, String](_.value)(SqlIdentifier.apply) + implicit lazy val column: Column[SqlIdentifier] = Column.columnToString.map(SqlIdentifier.apply) + implicit lazy val ordering: Ordering[SqlIdentifier] = Ordering.by(_.value) + implicit lazy val parameterMetadata: ParameterMetaData[SqlIdentifier] = new ParameterMetaData[SqlIdentifier] { + override def sqlType: String = """"information_schema"."sql_identifier"""" + override def jdbcType: Int = Types.OTHER + } + implicit lazy val reads: Reads[SqlIdentifier] = Reads.StringReads.map(SqlIdentifier.apply) + implicit lazy val text: Text[SqlIdentifier] = new Text[SqlIdentifier] { + override def unsafeEncode(v: SqlIdentifier, sb: StringBuilder) = Text.stringInstance.unsafeEncode(v.value, sb) + override def unsafeArrayEncode(v: SqlIdentifier, sb: StringBuilder) = Text.stringInstance.unsafeArrayEncode(v.value, sb) + } + implicit lazy val toStatement: ToStatement[SqlIdentifier] = ToStatement.stringToStatement.contramap(_.value) + implicit lazy val writes: Writes[SqlIdentifier] = Writes.StringWrites.contramap(_.value) +} \ No newline at end of file diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/information_schema/TimeStamp.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/information_schema/TimeStamp.scala new file mode 100644 index 000000000..445ce7267 --- /dev/null +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/information_schema/TimeStamp.scala @@ -0,0 +1,39 @@ +/** + * File has been automatically generated by `typo`. + * + * IF YOU CHANGE THIS FILE YOUR CHANGES WILL BE OVERWRITTEN. + */ +package adventureworks +package information_schema + +import adventureworks.customtypes.TypoInstant +import anorm.Column +import anorm.ParameterMetaData +import anorm.ToStatement +import java.sql.Types +import play.api.libs.json.Reads +import play.api.libs.json.Writes +import typo.dsl.Bijection + +/** Domain `information_schema.time_stamp` + * No constraint + */ +case class TimeStamp(value: TypoInstant) +object TimeStamp { + implicit lazy val arrayColumn: Column[Array[TimeStamp]] = Column.columnToArray(column, implicitly) + implicit lazy val arrayToStatement: ToStatement[Array[TimeStamp]] = TypoInstant.arrayToStatement.contramap(_.map(_.value)) + implicit lazy val bijection: Bijection[TimeStamp, TypoInstant] = Bijection[TimeStamp, TypoInstant](_.value)(TimeStamp.apply) + implicit lazy val column: Column[TimeStamp] = TypoInstant.column.map(TimeStamp.apply) + implicit def ordering(implicit O0: Ordering[TypoInstant]): Ordering[TimeStamp] = Ordering.by(_.value) + implicit lazy val parameterMetadata: ParameterMetaData[TimeStamp] = new ParameterMetaData[TimeStamp] { + override def sqlType: String = """"information_schema"."time_stamp"""" + override def jdbcType: Int = Types.OTHER + } + implicit lazy val reads: Reads[TimeStamp] = TypoInstant.reads.map(TimeStamp.apply) + implicit lazy val text: Text[TimeStamp] = new Text[TimeStamp] { + override def unsafeEncode(v: TimeStamp, sb: StringBuilder) = TypoInstant.text.unsafeEncode(v.value, sb) + override def unsafeArrayEncode(v: TimeStamp, sb: StringBuilder) = TypoInstant.text.unsafeArrayEncode(v.value, sb) + } + implicit lazy val toStatement: ToStatement[TimeStamp] = TypoInstant.toStatement.contramap(_.value) + implicit lazy val writes: Writes[TimeStamp] = TypoInstant.writes.contramap(_.value) +} \ No newline at end of file diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/information_schema/YesOrNo.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/information_schema/YesOrNo.scala new file mode 100644 index 000000000..9e60bb7c0 --- /dev/null +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/information_schema/YesOrNo.scala @@ -0,0 +1,38 @@ +/** + * File has been automatically generated by `typo`. + * + * IF YOU CHANGE THIS FILE YOUR CHANGES WILL BE OVERWRITTEN. + */ +package adventureworks +package information_schema + +import anorm.Column +import anorm.ParameterMetaData +import anorm.ToStatement +import java.sql.Types +import play.api.libs.json.Reads +import play.api.libs.json.Writes +import typo.dsl.Bijection + +/** Domain `information_schema.yes_or_no` + * Constraint: CHECK (((VALUE)::text = ANY ((ARRAY['YES'::character varying, 'NO'::character varying])::text[]))) + */ +case class YesOrNo(value: String) +object YesOrNo { + implicit lazy val arrayColumn: Column[Array[YesOrNo]] = Column.columnToArray(column, implicitly) + implicit lazy val arrayToStatement: ToStatement[Array[YesOrNo]] = ToStatement.arrayToParameter(ParameterMetaData.StringParameterMetaData).contramap(_.map(_.value)) + implicit lazy val bijection: Bijection[YesOrNo, String] = Bijection[YesOrNo, String](_.value)(YesOrNo.apply) + implicit lazy val column: Column[YesOrNo] = Column.columnToString.map(YesOrNo.apply) + implicit lazy val ordering: Ordering[YesOrNo] = Ordering.by(_.value) + implicit lazy val parameterMetadata: ParameterMetaData[YesOrNo] = new ParameterMetaData[YesOrNo] { + override def sqlType: String = """"information_schema"."yes_or_no"""" + override def jdbcType: Int = Types.OTHER + } + implicit lazy val reads: Reads[YesOrNo] = Reads.StringReads.map(YesOrNo.apply) + implicit lazy val text: Text[YesOrNo] = new Text[YesOrNo] { + override def unsafeEncode(v: YesOrNo, sb: StringBuilder) = Text.stringInstance.unsafeEncode(v.value, sb) + override def unsafeArrayEncode(v: YesOrNo, sb: StringBuilder) = Text.stringInstance.unsafeArrayEncode(v.value, sb) + } + implicit lazy val toStatement: ToStatement[YesOrNo] = ToStatement.stringToStatement.contramap(_.value) + implicit lazy val writes: Writes[YesOrNo] = Writes.StringWrites.contramap(_.value) +} \ No newline at end of file diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/person/address/AddressRepo.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/person/address/AddressRepo.scala index d6a3f76af..f31e96c50 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/person/address/AddressRepo.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/person/address/AddressRepo.scala @@ -29,4 +29,7 @@ trait AddressRepo { def update: UpdateBuilder[AddressFields, AddressRow] def update(row: AddressRow)(implicit c: Connection): Boolean def upsert(unsaved: AddressRow)(implicit c: Connection): AddressRow + def upsertBatch(unsaved: Iterable[AddressRow])(implicit c: Connection): List[AddressRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Iterator[AddressRow], batchSize: Int = 10000)(implicit c: Connection): Int } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/person/address/AddressRepoImpl.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/person/address/AddressRepoImpl.scala index e4e757127..49ae20e01 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/person/address/AddressRepoImpl.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/person/address/AddressRepoImpl.scala @@ -12,6 +12,7 @@ import adventureworks.customtypes.TypoBytea import adventureworks.customtypes.TypoLocalDateTime import adventureworks.customtypes.TypoUUID import adventureworks.person.stateprovince.StateprovinceId +import anorm.BatchSql import anorm.NamedParameter import anorm.ParameterMetaData import anorm.ParameterValue @@ -21,6 +22,7 @@ import anorm.SimpleSql import anorm.SqlStringInterpolation import anorm.ToStatement import java.sql.Connection +import scala.annotation.nowarn import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder import typo.dsl.SelectBuilderSql @@ -162,4 +164,60 @@ class AddressRepoImpl extends AddressRepo { .executeInsert(AddressRow.rowParser(1).single) } + override def upsertBatch(unsaved: Iterable[AddressRow])(implicit c: Connection): List[AddressRow] = { + def toNamedParameter(row: AddressRow): List[NamedParameter] = List( + NamedParameter("addressid", ParameterValue(row.addressid, null, AddressId.toStatement)), + NamedParameter("addressline1", ParameterValue(row.addressline1, null, ToStatement.stringToStatement)), + NamedParameter("addressline2", ParameterValue(row.addressline2, null, ToStatement.optionToStatement(ToStatement.stringToStatement, ParameterMetaData.StringParameterMetaData))), + NamedParameter("city", ParameterValue(row.city, null, ToStatement.stringToStatement)), + NamedParameter("stateprovinceid", ParameterValue(row.stateprovinceid, null, StateprovinceId.toStatement)), + NamedParameter("postalcode", ParameterValue(row.postalcode, null, ToStatement.stringToStatement)), + NamedParameter("spatiallocation", ParameterValue(row.spatiallocation, null, ToStatement.optionToStatement(TypoBytea.toStatement, TypoBytea.parameterMetadata))), + NamedParameter("rowguid", ParameterValue(row.rowguid, null, TypoUUID.toStatement)), + NamedParameter("modifieddate", ParameterValue(row.modifieddate, null, TypoLocalDateTime.toStatement)) + ) + unsaved.toList match { + case Nil => Nil + case head :: rest => + new anorm.adventureworks.ExecuteReturningSyntax.Ops( + BatchSql( + s"""insert into person.address("addressid", "addressline1", "addressline2", "city", "stateprovinceid", "postalcode", "spatiallocation", "rowguid", "modifieddate") + values ({addressid}::int4, {addressline1}, {addressline2}, {city}, {stateprovinceid}::int4, {postalcode}, {spatiallocation}::bytea, {rowguid}::uuid, {modifieddate}::timestamp) + on conflict ("addressid") + do update set + "addressline1" = EXCLUDED."addressline1", + "addressline2" = EXCLUDED."addressline2", + "city" = EXCLUDED."city", + "stateprovinceid" = EXCLUDED."stateprovinceid", + "postalcode" = EXCLUDED."postalcode", + "spatiallocation" = EXCLUDED."spatiallocation", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + returning "addressid", "addressline1", "addressline2", "city", "stateprovinceid", "postalcode", "spatiallocation", "rowguid", "modifieddate"::text + """, + toNamedParameter(head), + rest.map(toNamedParameter)* + ) + ).executeReturning(AddressRow.rowParser(1).*) + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[AddressRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + SQL"create temporary table address_TEMP (like person.address) on commit drop".execute(): @nowarn + streamingInsert(s"""copy address_TEMP("addressid", "addressline1", "addressline2", "city", "stateprovinceid", "postalcode", "spatiallocation", "rowguid", "modifieddate") from stdin""", batchSize, unsaved)(AddressRow.text, c): @nowarn + SQL"""insert into person.address("addressid", "addressline1", "addressline2", "city", "stateprovinceid", "postalcode", "spatiallocation", "rowguid", "modifieddate") + select * from address_TEMP + on conflict ("addressid") + do update set + "addressline1" = EXCLUDED."addressline1", + "addressline2" = EXCLUDED."addressline2", + "city" = EXCLUDED."city", + "stateprovinceid" = EXCLUDED."stateprovinceid", + "postalcode" = EXCLUDED."postalcode", + "spatiallocation" = EXCLUDED."spatiallocation", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table address_TEMP;""".executeUpdate() + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/person/address/AddressRepoMock.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/person/address/AddressRepoMock.scala index 087842773..b6d6823ae 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/person/address/AddressRepoMock.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/person/address/AddressRepoMock.scala @@ -87,4 +87,17 @@ class AddressRepoMock(toRow: Function1[AddressRowUnsaved, AddressRow], map.put(unsaved.addressid, unsaved): @nowarn unsaved } + override def upsertBatch(unsaved: Iterable[AddressRow])(implicit c: Connection): List[AddressRow] = { + unsaved.map { row => + map += (row.addressid -> row) + row + }.toList + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[AddressRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + unsaved.foreach { row => + map += (row.addressid -> row) + } + unsaved.size + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/person/addresstype/AddresstypeRepo.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/person/addresstype/AddresstypeRepo.scala index b2bd59f5c..226bcb2ae 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/person/addresstype/AddresstypeRepo.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/person/addresstype/AddresstypeRepo.scala @@ -29,4 +29,7 @@ trait AddresstypeRepo { def update: UpdateBuilder[AddresstypeFields, AddresstypeRow] def update(row: AddresstypeRow)(implicit c: Connection): Boolean def upsert(unsaved: AddresstypeRow)(implicit c: Connection): AddresstypeRow + def upsertBatch(unsaved: Iterable[AddresstypeRow])(implicit c: Connection): List[AddresstypeRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Iterator[AddresstypeRow], batchSize: Int = 10000)(implicit c: Connection): Int } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/person/addresstype/AddresstypeRepoImpl.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/person/addresstype/AddresstypeRepoImpl.scala index 92550a804..4a45c73e1 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/person/addresstype/AddresstypeRepoImpl.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/person/addresstype/AddresstypeRepoImpl.scala @@ -11,6 +11,7 @@ import adventureworks.customtypes.Defaulted import adventureworks.customtypes.TypoLocalDateTime import adventureworks.customtypes.TypoUUID import adventureworks.public.Name +import anorm.BatchSql import anorm.NamedParameter import anorm.ParameterValue import anorm.RowParser @@ -18,6 +19,7 @@ import anorm.SQL import anorm.SimpleSql import anorm.SqlStringInterpolation import java.sql.Connection +import scala.annotation.nowarn import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder import typo.dsl.SelectBuilderSql @@ -139,4 +141,45 @@ class AddresstypeRepoImpl extends AddresstypeRepo { .executeInsert(AddresstypeRow.rowParser(1).single) } + override def upsertBatch(unsaved: Iterable[AddresstypeRow])(implicit c: Connection): List[AddresstypeRow] = { + def toNamedParameter(row: AddresstypeRow): List[NamedParameter] = List( + NamedParameter("addresstypeid", ParameterValue(row.addresstypeid, null, AddresstypeId.toStatement)), + NamedParameter("name", ParameterValue(row.name, null, Name.toStatement)), + NamedParameter("rowguid", ParameterValue(row.rowguid, null, TypoUUID.toStatement)), + NamedParameter("modifieddate", ParameterValue(row.modifieddate, null, TypoLocalDateTime.toStatement)) + ) + unsaved.toList match { + case Nil => Nil + case head :: rest => + new anorm.adventureworks.ExecuteReturningSyntax.Ops( + BatchSql( + s"""insert into person.addresstype("addresstypeid", "name", "rowguid", "modifieddate") + values ({addresstypeid}::int4, {name}::varchar, {rowguid}::uuid, {modifieddate}::timestamp) + on conflict ("addresstypeid") + do update set + "name" = EXCLUDED."name", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + returning "addresstypeid", "name", "rowguid", "modifieddate"::text + """, + toNamedParameter(head), + rest.map(toNamedParameter)* + ) + ).executeReturning(AddresstypeRow.rowParser(1).*) + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[AddresstypeRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + SQL"create temporary table addresstype_TEMP (like person.addresstype) on commit drop".execute(): @nowarn + streamingInsert(s"""copy addresstype_TEMP("addresstypeid", "name", "rowguid", "modifieddate") from stdin""", batchSize, unsaved)(AddresstypeRow.text, c): @nowarn + SQL"""insert into person.addresstype("addresstypeid", "name", "rowguid", "modifieddate") + select * from addresstype_TEMP + on conflict ("addresstypeid") + do update set + "name" = EXCLUDED."name", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table addresstype_TEMP;""".executeUpdate() + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/person/addresstype/AddresstypeRepoMock.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/person/addresstype/AddresstypeRepoMock.scala index 922431767..1b15838bc 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/person/addresstype/AddresstypeRepoMock.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/person/addresstype/AddresstypeRepoMock.scala @@ -87,4 +87,17 @@ class AddresstypeRepoMock(toRow: Function1[AddresstypeRowUnsaved, AddresstypeRow map.put(unsaved.addresstypeid, unsaved): @nowarn unsaved } + override def upsertBatch(unsaved: Iterable[AddresstypeRow])(implicit c: Connection): List[AddresstypeRow] = { + unsaved.map { row => + map += (row.addresstypeid -> row) + row + }.toList + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[AddresstypeRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + unsaved.foreach { row => + map += (row.addresstypeid -> row) + } + unsaved.size + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/person/businessentity/BusinessentityRepo.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/person/businessentity/BusinessentityRepo.scala index 655e8b321..b77437aac 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/person/businessentity/BusinessentityRepo.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/person/businessentity/BusinessentityRepo.scala @@ -29,4 +29,7 @@ trait BusinessentityRepo { def update: UpdateBuilder[BusinessentityFields, BusinessentityRow] def update(row: BusinessentityRow)(implicit c: Connection): Boolean def upsert(unsaved: BusinessentityRow)(implicit c: Connection): BusinessentityRow + def upsertBatch(unsaved: Iterable[BusinessentityRow])(implicit c: Connection): List[BusinessentityRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Iterator[BusinessentityRow], batchSize: Int = 10000)(implicit c: Connection): Int } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/person/businessentity/BusinessentityRepoImpl.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/person/businessentity/BusinessentityRepoImpl.scala index bc51a38ed..05ff38757 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/person/businessentity/BusinessentityRepoImpl.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/person/businessentity/BusinessentityRepoImpl.scala @@ -10,6 +10,7 @@ package businessentity import adventureworks.customtypes.Defaulted import adventureworks.customtypes.TypoLocalDateTime import adventureworks.customtypes.TypoUUID +import anorm.BatchSql import anorm.NamedParameter import anorm.ParameterValue import anorm.RowParser @@ -17,6 +18,7 @@ import anorm.SQL import anorm.SimpleSql import anorm.SqlStringInterpolation import java.sql.Connection +import scala.annotation.nowarn import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder import typo.dsl.SelectBuilderSql @@ -134,4 +136,42 @@ class BusinessentityRepoImpl extends BusinessentityRepo { .executeInsert(BusinessentityRow.rowParser(1).single) } + override def upsertBatch(unsaved: Iterable[BusinessentityRow])(implicit c: Connection): List[BusinessentityRow] = { + def toNamedParameter(row: BusinessentityRow): List[NamedParameter] = List( + NamedParameter("businessentityid", ParameterValue(row.businessentityid, null, BusinessentityId.toStatement)), + NamedParameter("rowguid", ParameterValue(row.rowguid, null, TypoUUID.toStatement)), + NamedParameter("modifieddate", ParameterValue(row.modifieddate, null, TypoLocalDateTime.toStatement)) + ) + unsaved.toList match { + case Nil => Nil + case head :: rest => + new anorm.adventureworks.ExecuteReturningSyntax.Ops( + BatchSql( + s"""insert into person.businessentity("businessentityid", "rowguid", "modifieddate") + values ({businessentityid}::int4, {rowguid}::uuid, {modifieddate}::timestamp) + on conflict ("businessentityid") + do update set + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + returning "businessentityid", "rowguid", "modifieddate"::text + """, + toNamedParameter(head), + rest.map(toNamedParameter)* + ) + ).executeReturning(BusinessentityRow.rowParser(1).*) + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[BusinessentityRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + SQL"create temporary table businessentity_TEMP (like person.businessentity) on commit drop".execute(): @nowarn + streamingInsert(s"""copy businessentity_TEMP("businessentityid", "rowguid", "modifieddate") from stdin""", batchSize, unsaved)(BusinessentityRow.text, c): @nowarn + SQL"""insert into person.businessentity("businessentityid", "rowguid", "modifieddate") + select * from businessentity_TEMP + on conflict ("businessentityid") + do update set + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table businessentity_TEMP;""".executeUpdate() + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/person/businessentity/BusinessentityRepoMock.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/person/businessentity/BusinessentityRepoMock.scala index 17cc0fa77..8286a0f73 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/person/businessentity/BusinessentityRepoMock.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/person/businessentity/BusinessentityRepoMock.scala @@ -87,4 +87,17 @@ class BusinessentityRepoMock(toRow: Function1[BusinessentityRowUnsaved, Business map.put(unsaved.businessentityid, unsaved): @nowarn unsaved } + override def upsertBatch(unsaved: Iterable[BusinessentityRow])(implicit c: Connection): List[BusinessentityRow] = { + unsaved.map { row => + map += (row.businessentityid -> row) + row + }.toList + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[BusinessentityRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + unsaved.foreach { row => + map += (row.businessentityid -> row) + } + unsaved.size + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/person/businessentityaddress/BusinessentityaddressRepo.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/person/businessentityaddress/BusinessentityaddressRepo.scala index 85611f4d3..6a659700b 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/person/businessentityaddress/BusinessentityaddressRepo.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/person/businessentityaddress/BusinessentityaddressRepo.scala @@ -29,4 +29,7 @@ trait BusinessentityaddressRepo { def update: UpdateBuilder[BusinessentityaddressFields, BusinessentityaddressRow] def update(row: BusinessentityaddressRow)(implicit c: Connection): Boolean def upsert(unsaved: BusinessentityaddressRow)(implicit c: Connection): BusinessentityaddressRow + def upsertBatch(unsaved: Iterable[BusinessentityaddressRow])(implicit c: Connection): List[BusinessentityaddressRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Iterator[BusinessentityaddressRow], batchSize: Int = 10000)(implicit c: Connection): Int } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/person/businessentityaddress/BusinessentityaddressRepoImpl.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/person/businessentityaddress/BusinessentityaddressRepoImpl.scala index c1468c00a..b3a0d2531 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/person/businessentityaddress/BusinessentityaddressRepoImpl.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/person/businessentityaddress/BusinessentityaddressRepoImpl.scala @@ -13,6 +13,7 @@ import adventureworks.customtypes.TypoUUID import adventureworks.person.address.AddressId import adventureworks.person.addresstype.AddresstypeId import adventureworks.person.businessentity.BusinessentityId +import anorm.BatchSql import anorm.NamedParameter import anorm.ParameterValue import anorm.RowParser @@ -20,6 +21,7 @@ import anorm.SQL import anorm.SimpleSql import anorm.SqlStringInterpolation import java.sql.Connection +import scala.annotation.nowarn import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder import typo.dsl.SelectBuilderSql @@ -146,4 +148,44 @@ class BusinessentityaddressRepoImpl extends BusinessentityaddressRepo { .executeInsert(BusinessentityaddressRow.rowParser(1).single) } + override def upsertBatch(unsaved: Iterable[BusinessentityaddressRow])(implicit c: Connection): List[BusinessentityaddressRow] = { + def toNamedParameter(row: BusinessentityaddressRow): List[NamedParameter] = List( + NamedParameter("businessentityid", ParameterValue(row.businessentityid, null, BusinessentityId.toStatement)), + NamedParameter("addressid", ParameterValue(row.addressid, null, AddressId.toStatement)), + NamedParameter("addresstypeid", ParameterValue(row.addresstypeid, null, AddresstypeId.toStatement)), + NamedParameter("rowguid", ParameterValue(row.rowguid, null, TypoUUID.toStatement)), + NamedParameter("modifieddate", ParameterValue(row.modifieddate, null, TypoLocalDateTime.toStatement)) + ) + unsaved.toList match { + case Nil => Nil + case head :: rest => + new anorm.adventureworks.ExecuteReturningSyntax.Ops( + BatchSql( + s"""insert into person.businessentityaddress("businessentityid", "addressid", "addresstypeid", "rowguid", "modifieddate") + values ({businessentityid}::int4, {addressid}::int4, {addresstypeid}::int4, {rowguid}::uuid, {modifieddate}::timestamp) + on conflict ("businessentityid", "addressid", "addresstypeid") + do update set + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + returning "businessentityid", "addressid", "addresstypeid", "rowguid", "modifieddate"::text + """, + toNamedParameter(head), + rest.map(toNamedParameter)* + ) + ).executeReturning(BusinessentityaddressRow.rowParser(1).*) + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[BusinessentityaddressRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + SQL"create temporary table businessentityaddress_TEMP (like person.businessentityaddress) on commit drop".execute(): @nowarn + streamingInsert(s"""copy businessentityaddress_TEMP("businessentityid", "addressid", "addresstypeid", "rowguid", "modifieddate") from stdin""", batchSize, unsaved)(BusinessentityaddressRow.text, c): @nowarn + SQL"""insert into person.businessentityaddress("businessentityid", "addressid", "addresstypeid", "rowguid", "modifieddate") + select * from businessentityaddress_TEMP + on conflict ("businessentityid", "addressid", "addresstypeid") + do update set + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table businessentityaddress_TEMP;""".executeUpdate() + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/person/businessentityaddress/BusinessentityaddressRepoMock.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/person/businessentityaddress/BusinessentityaddressRepoMock.scala index 7d9a33668..b80a3e4fe 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/person/businessentityaddress/BusinessentityaddressRepoMock.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/person/businessentityaddress/BusinessentityaddressRepoMock.scala @@ -87,4 +87,17 @@ class BusinessentityaddressRepoMock(toRow: Function1[BusinessentityaddressRowUns map.put(unsaved.compositeId, unsaved): @nowarn unsaved } + override def upsertBatch(unsaved: Iterable[BusinessentityaddressRow])(implicit c: Connection): List[BusinessentityaddressRow] = { + unsaved.map { row => + map += (row.compositeId -> row) + row + }.toList + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[BusinessentityaddressRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + unsaved.foreach { row => + map += (row.compositeId -> row) + } + unsaved.size + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/person/businessentitycontact/BusinessentitycontactRepo.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/person/businessentitycontact/BusinessentitycontactRepo.scala index 3d311c871..38734e4d7 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/person/businessentitycontact/BusinessentitycontactRepo.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/person/businessentitycontact/BusinessentitycontactRepo.scala @@ -29,4 +29,7 @@ trait BusinessentitycontactRepo { def update: UpdateBuilder[BusinessentitycontactFields, BusinessentitycontactRow] def update(row: BusinessentitycontactRow)(implicit c: Connection): Boolean def upsert(unsaved: BusinessentitycontactRow)(implicit c: Connection): BusinessentitycontactRow + def upsertBatch(unsaved: Iterable[BusinessentitycontactRow])(implicit c: Connection): List[BusinessentitycontactRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Iterator[BusinessentitycontactRow], batchSize: Int = 10000)(implicit c: Connection): Int } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/person/businessentitycontact/BusinessentitycontactRepoImpl.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/person/businessentitycontact/BusinessentitycontactRepoImpl.scala index 0952223e1..8b449c6e8 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/person/businessentitycontact/BusinessentitycontactRepoImpl.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/person/businessentitycontact/BusinessentitycontactRepoImpl.scala @@ -12,6 +12,7 @@ import adventureworks.customtypes.TypoLocalDateTime import adventureworks.customtypes.TypoUUID import adventureworks.person.businessentity.BusinessentityId import adventureworks.person.contacttype.ContacttypeId +import anorm.BatchSql import anorm.NamedParameter import anorm.ParameterValue import anorm.RowParser @@ -19,6 +20,7 @@ import anorm.SQL import anorm.SimpleSql import anorm.SqlStringInterpolation import java.sql.Connection +import scala.annotation.nowarn import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder import typo.dsl.SelectBuilderSql @@ -145,4 +147,44 @@ class BusinessentitycontactRepoImpl extends BusinessentitycontactRepo { .executeInsert(BusinessentitycontactRow.rowParser(1).single) } + override def upsertBatch(unsaved: Iterable[BusinessentitycontactRow])(implicit c: Connection): List[BusinessentitycontactRow] = { + def toNamedParameter(row: BusinessentitycontactRow): List[NamedParameter] = List( + NamedParameter("businessentityid", ParameterValue(row.businessentityid, null, BusinessentityId.toStatement)), + NamedParameter("personid", ParameterValue(row.personid, null, BusinessentityId.toStatement)), + NamedParameter("contacttypeid", ParameterValue(row.contacttypeid, null, ContacttypeId.toStatement)), + NamedParameter("rowguid", ParameterValue(row.rowguid, null, TypoUUID.toStatement)), + NamedParameter("modifieddate", ParameterValue(row.modifieddate, null, TypoLocalDateTime.toStatement)) + ) + unsaved.toList match { + case Nil => Nil + case head :: rest => + new anorm.adventureworks.ExecuteReturningSyntax.Ops( + BatchSql( + s"""insert into person.businessentitycontact("businessentityid", "personid", "contacttypeid", "rowguid", "modifieddate") + values ({businessentityid}::int4, {personid}::int4, {contacttypeid}::int4, {rowguid}::uuid, {modifieddate}::timestamp) + on conflict ("businessentityid", "personid", "contacttypeid") + do update set + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + returning "businessentityid", "personid", "contacttypeid", "rowguid", "modifieddate"::text + """, + toNamedParameter(head), + rest.map(toNamedParameter)* + ) + ).executeReturning(BusinessentitycontactRow.rowParser(1).*) + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[BusinessentitycontactRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + SQL"create temporary table businessentitycontact_TEMP (like person.businessentitycontact) on commit drop".execute(): @nowarn + streamingInsert(s"""copy businessentitycontact_TEMP("businessentityid", "personid", "contacttypeid", "rowguid", "modifieddate") from stdin""", batchSize, unsaved)(BusinessentitycontactRow.text, c): @nowarn + SQL"""insert into person.businessentitycontact("businessentityid", "personid", "contacttypeid", "rowguid", "modifieddate") + select * from businessentitycontact_TEMP + on conflict ("businessentityid", "personid", "contacttypeid") + do update set + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table businessentitycontact_TEMP;""".executeUpdate() + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/person/businessentitycontact/BusinessentitycontactRepoMock.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/person/businessentitycontact/BusinessentitycontactRepoMock.scala index ca0c74698..c8611d5d4 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/person/businessentitycontact/BusinessentitycontactRepoMock.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/person/businessentitycontact/BusinessentitycontactRepoMock.scala @@ -87,4 +87,17 @@ class BusinessentitycontactRepoMock(toRow: Function1[BusinessentitycontactRowUns map.put(unsaved.compositeId, unsaved): @nowarn unsaved } + override def upsertBatch(unsaved: Iterable[BusinessentitycontactRow])(implicit c: Connection): List[BusinessentitycontactRow] = { + unsaved.map { row => + map += (row.compositeId -> row) + row + }.toList + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[BusinessentitycontactRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + unsaved.foreach { row => + map += (row.compositeId -> row) + } + unsaved.size + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/person/contacttype/ContacttypeRepo.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/person/contacttype/ContacttypeRepo.scala index 69c219b04..8feeadef9 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/person/contacttype/ContacttypeRepo.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/person/contacttype/ContacttypeRepo.scala @@ -29,4 +29,7 @@ trait ContacttypeRepo { def update: UpdateBuilder[ContacttypeFields, ContacttypeRow] def update(row: ContacttypeRow)(implicit c: Connection): Boolean def upsert(unsaved: ContacttypeRow)(implicit c: Connection): ContacttypeRow + def upsertBatch(unsaved: Iterable[ContacttypeRow])(implicit c: Connection): List[ContacttypeRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Iterator[ContacttypeRow], batchSize: Int = 10000)(implicit c: Connection): Int } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/person/contacttype/ContacttypeRepoImpl.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/person/contacttype/ContacttypeRepoImpl.scala index c416212e5..b60426ac6 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/person/contacttype/ContacttypeRepoImpl.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/person/contacttype/ContacttypeRepoImpl.scala @@ -10,6 +10,7 @@ package contacttype import adventureworks.customtypes.Defaulted import adventureworks.customtypes.TypoLocalDateTime import adventureworks.public.Name +import anorm.BatchSql import anorm.NamedParameter import anorm.ParameterValue import anorm.RowParser @@ -17,6 +18,7 @@ import anorm.SQL import anorm.SimpleSql import anorm.SqlStringInterpolation import java.sql.Connection +import scala.annotation.nowarn import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder import typo.dsl.SelectBuilderSql @@ -131,4 +133,42 @@ class ContacttypeRepoImpl extends ContacttypeRepo { .executeInsert(ContacttypeRow.rowParser(1).single) } + override def upsertBatch(unsaved: Iterable[ContacttypeRow])(implicit c: Connection): List[ContacttypeRow] = { + def toNamedParameter(row: ContacttypeRow): List[NamedParameter] = List( + NamedParameter("contacttypeid", ParameterValue(row.contacttypeid, null, ContacttypeId.toStatement)), + NamedParameter("name", ParameterValue(row.name, null, Name.toStatement)), + NamedParameter("modifieddate", ParameterValue(row.modifieddate, null, TypoLocalDateTime.toStatement)) + ) + unsaved.toList match { + case Nil => Nil + case head :: rest => + new anorm.adventureworks.ExecuteReturningSyntax.Ops( + BatchSql( + s"""insert into person.contacttype("contacttypeid", "name", "modifieddate") + values ({contacttypeid}::int4, {name}::varchar, {modifieddate}::timestamp) + on conflict ("contacttypeid") + do update set + "name" = EXCLUDED."name", + "modifieddate" = EXCLUDED."modifieddate" + returning "contacttypeid", "name", "modifieddate"::text + """, + toNamedParameter(head), + rest.map(toNamedParameter)* + ) + ).executeReturning(ContacttypeRow.rowParser(1).*) + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[ContacttypeRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + SQL"create temporary table contacttype_TEMP (like person.contacttype) on commit drop".execute(): @nowarn + streamingInsert(s"""copy contacttype_TEMP("contacttypeid", "name", "modifieddate") from stdin""", batchSize, unsaved)(ContacttypeRow.text, c): @nowarn + SQL"""insert into person.contacttype("contacttypeid", "name", "modifieddate") + select * from contacttype_TEMP + on conflict ("contacttypeid") + do update set + "name" = EXCLUDED."name", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table contacttype_TEMP;""".executeUpdate() + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/person/contacttype/ContacttypeRepoMock.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/person/contacttype/ContacttypeRepoMock.scala index 165c5bdfa..bc9949089 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/person/contacttype/ContacttypeRepoMock.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/person/contacttype/ContacttypeRepoMock.scala @@ -87,4 +87,17 @@ class ContacttypeRepoMock(toRow: Function1[ContacttypeRowUnsaved, ContacttypeRow map.put(unsaved.contacttypeid, unsaved): @nowarn unsaved } + override def upsertBatch(unsaved: Iterable[ContacttypeRow])(implicit c: Connection): List[ContacttypeRow] = { + unsaved.map { row => + map += (row.contacttypeid -> row) + row + }.toList + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[ContacttypeRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + unsaved.foreach { row => + map += (row.contacttypeid -> row) + } + unsaved.size + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/person/countryregion/CountryregionRepo.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/person/countryregion/CountryregionRepo.scala index 3d2aebce3..42d88560f 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/person/countryregion/CountryregionRepo.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/person/countryregion/CountryregionRepo.scala @@ -29,4 +29,7 @@ trait CountryregionRepo { def update: UpdateBuilder[CountryregionFields, CountryregionRow] def update(row: CountryregionRow)(implicit c: Connection): Boolean def upsert(unsaved: CountryregionRow)(implicit c: Connection): CountryregionRow + def upsertBatch(unsaved: Iterable[CountryregionRow])(implicit c: Connection): List[CountryregionRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Iterator[CountryregionRow], batchSize: Int = 10000)(implicit c: Connection): Int } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/person/countryregion/CountryregionRepoImpl.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/person/countryregion/CountryregionRepoImpl.scala index 27182db4b..e09001c83 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/person/countryregion/CountryregionRepoImpl.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/person/countryregion/CountryregionRepoImpl.scala @@ -10,6 +10,7 @@ package countryregion import adventureworks.customtypes.Defaulted import adventureworks.customtypes.TypoLocalDateTime import adventureworks.public.Name +import anorm.BatchSql import anorm.NamedParameter import anorm.ParameterValue import anorm.RowParser @@ -17,6 +18,7 @@ import anorm.SQL import anorm.SimpleSql import anorm.SqlStringInterpolation import java.sql.Connection +import scala.annotation.nowarn import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder import typo.dsl.SelectBuilderSql @@ -128,4 +130,42 @@ class CountryregionRepoImpl extends CountryregionRepo { .executeInsert(CountryregionRow.rowParser(1).single) } + override def upsertBatch(unsaved: Iterable[CountryregionRow])(implicit c: Connection): List[CountryregionRow] = { + def toNamedParameter(row: CountryregionRow): List[NamedParameter] = List( + NamedParameter("countryregioncode", ParameterValue(row.countryregioncode, null, CountryregionId.toStatement)), + NamedParameter("name", ParameterValue(row.name, null, Name.toStatement)), + NamedParameter("modifieddate", ParameterValue(row.modifieddate, null, TypoLocalDateTime.toStatement)) + ) + unsaved.toList match { + case Nil => Nil + case head :: rest => + new anorm.adventureworks.ExecuteReturningSyntax.Ops( + BatchSql( + s"""insert into person.countryregion("countryregioncode", "name", "modifieddate") + values ({countryregioncode}, {name}::varchar, {modifieddate}::timestamp) + on conflict ("countryregioncode") + do update set + "name" = EXCLUDED."name", + "modifieddate" = EXCLUDED."modifieddate" + returning "countryregioncode", "name", "modifieddate"::text + """, + toNamedParameter(head), + rest.map(toNamedParameter)* + ) + ).executeReturning(CountryregionRow.rowParser(1).*) + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[CountryregionRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + SQL"create temporary table countryregion_TEMP (like person.countryregion) on commit drop".execute(): @nowarn + streamingInsert(s"""copy countryregion_TEMP("countryregioncode", "name", "modifieddate") from stdin""", batchSize, unsaved)(CountryregionRow.text, c): @nowarn + SQL"""insert into person.countryregion("countryregioncode", "name", "modifieddate") + select * from countryregion_TEMP + on conflict ("countryregioncode") + do update set + "name" = EXCLUDED."name", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table countryregion_TEMP;""".executeUpdate() + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/person/countryregion/CountryregionRepoMock.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/person/countryregion/CountryregionRepoMock.scala index ac8b61374..0be599f48 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/person/countryregion/CountryregionRepoMock.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/person/countryregion/CountryregionRepoMock.scala @@ -87,4 +87,17 @@ class CountryregionRepoMock(toRow: Function1[CountryregionRowUnsaved, Countryreg map.put(unsaved.countryregioncode, unsaved): @nowarn unsaved } + override def upsertBatch(unsaved: Iterable[CountryregionRow])(implicit c: Connection): List[CountryregionRow] = { + unsaved.map { row => + map += (row.countryregioncode -> row) + row + }.toList + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[CountryregionRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + unsaved.foreach { row => + map += (row.countryregioncode -> row) + } + unsaved.size + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/person/emailaddress/EmailaddressRepo.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/person/emailaddress/EmailaddressRepo.scala index 4c4282b63..1cdd9bf29 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/person/emailaddress/EmailaddressRepo.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/person/emailaddress/EmailaddressRepo.scala @@ -29,4 +29,7 @@ trait EmailaddressRepo { def update: UpdateBuilder[EmailaddressFields, EmailaddressRow] def update(row: EmailaddressRow)(implicit c: Connection): Boolean def upsert(unsaved: EmailaddressRow)(implicit c: Connection): EmailaddressRow + def upsertBatch(unsaved: Iterable[EmailaddressRow])(implicit c: Connection): List[EmailaddressRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Iterator[EmailaddressRow], batchSize: Int = 10000)(implicit c: Connection): Int } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/person/emailaddress/EmailaddressRepoImpl.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/person/emailaddress/EmailaddressRepoImpl.scala index 4a83773fb..d1cf9f628 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/person/emailaddress/EmailaddressRepoImpl.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/person/emailaddress/EmailaddressRepoImpl.scala @@ -11,6 +11,7 @@ import adventureworks.customtypes.Defaulted import adventureworks.customtypes.TypoLocalDateTime import adventureworks.customtypes.TypoUUID import adventureworks.person.businessentity.BusinessentityId +import anorm.BatchSql import anorm.NamedParameter import anorm.ParameterMetaData import anorm.ParameterValue @@ -20,6 +21,7 @@ import anorm.SimpleSql import anorm.SqlStringInterpolation import anorm.ToStatement import java.sql.Connection +import scala.annotation.nowarn import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder import typo.dsl.SelectBuilderSql @@ -149,4 +151,46 @@ class EmailaddressRepoImpl extends EmailaddressRepo { .executeInsert(EmailaddressRow.rowParser(1).single) } + override def upsertBatch(unsaved: Iterable[EmailaddressRow])(implicit c: Connection): List[EmailaddressRow] = { + def toNamedParameter(row: EmailaddressRow): List[NamedParameter] = List( + NamedParameter("businessentityid", ParameterValue(row.businessentityid, null, BusinessentityId.toStatement)), + NamedParameter("emailaddressid", ParameterValue(row.emailaddressid, null, ToStatement.intToStatement)), + NamedParameter("emailaddress", ParameterValue(row.emailaddress, null, ToStatement.optionToStatement(ToStatement.stringToStatement, ParameterMetaData.StringParameterMetaData))), + NamedParameter("rowguid", ParameterValue(row.rowguid, null, TypoUUID.toStatement)), + NamedParameter("modifieddate", ParameterValue(row.modifieddate, null, TypoLocalDateTime.toStatement)) + ) + unsaved.toList match { + case Nil => Nil + case head :: rest => + new anorm.adventureworks.ExecuteReturningSyntax.Ops( + BatchSql( + s"""insert into person.emailaddress("businessentityid", "emailaddressid", "emailaddress", "rowguid", "modifieddate") + values ({businessentityid}::int4, {emailaddressid}::int4, {emailaddress}, {rowguid}::uuid, {modifieddate}::timestamp) + on conflict ("businessentityid", "emailaddressid") + do update set + "emailaddress" = EXCLUDED."emailaddress", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + returning "businessentityid", "emailaddressid", "emailaddress", "rowguid", "modifieddate"::text + """, + toNamedParameter(head), + rest.map(toNamedParameter)* + ) + ).executeReturning(EmailaddressRow.rowParser(1).*) + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[EmailaddressRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + SQL"create temporary table emailaddress_TEMP (like person.emailaddress) on commit drop".execute(): @nowarn + streamingInsert(s"""copy emailaddress_TEMP("businessentityid", "emailaddressid", "emailaddress", "rowguid", "modifieddate") from stdin""", batchSize, unsaved)(EmailaddressRow.text, c): @nowarn + SQL"""insert into person.emailaddress("businessentityid", "emailaddressid", "emailaddress", "rowguid", "modifieddate") + select * from emailaddress_TEMP + on conflict ("businessentityid", "emailaddressid") + do update set + "emailaddress" = EXCLUDED."emailaddress", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table emailaddress_TEMP;""".executeUpdate() + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/person/emailaddress/EmailaddressRepoMock.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/person/emailaddress/EmailaddressRepoMock.scala index 90f6fa0e6..75b2f9a0b 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/person/emailaddress/EmailaddressRepoMock.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/person/emailaddress/EmailaddressRepoMock.scala @@ -87,4 +87,17 @@ class EmailaddressRepoMock(toRow: Function1[EmailaddressRowUnsaved, Emailaddress map.put(unsaved.compositeId, unsaved): @nowarn unsaved } + override def upsertBatch(unsaved: Iterable[EmailaddressRow])(implicit c: Connection): List[EmailaddressRow] = { + unsaved.map { row => + map += (row.compositeId -> row) + row + }.toList + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[EmailaddressRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + unsaved.foreach { row => + map += (row.compositeId -> row) + } + unsaved.size + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/person/password/PasswordRepo.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/person/password/PasswordRepo.scala index bedde72dc..23708ec43 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/person/password/PasswordRepo.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/person/password/PasswordRepo.scala @@ -30,4 +30,7 @@ trait PasswordRepo { def update: UpdateBuilder[PasswordFields, PasswordRow] def update(row: PasswordRow)(implicit c: Connection): Boolean def upsert(unsaved: PasswordRow)(implicit c: Connection): PasswordRow + def upsertBatch(unsaved: Iterable[PasswordRow])(implicit c: Connection): List[PasswordRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Iterator[PasswordRow], batchSize: Int = 10000)(implicit c: Connection): Int } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/person/password/PasswordRepoImpl.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/person/password/PasswordRepoImpl.scala index e5a1c45df..b7f7bd72b 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/person/password/PasswordRepoImpl.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/person/password/PasswordRepoImpl.scala @@ -11,6 +11,7 @@ import adventureworks.customtypes.Defaulted import adventureworks.customtypes.TypoLocalDateTime import adventureworks.customtypes.TypoUUID import adventureworks.person.businessentity.BusinessentityId +import anorm.BatchSql import anorm.NamedParameter import anorm.ParameterValue import anorm.RowParser @@ -19,6 +20,7 @@ import anorm.SimpleSql import anorm.SqlStringInterpolation import anorm.ToStatement import java.sql.Connection +import scala.annotation.nowarn import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder import typo.dsl.SelectBuilderSql @@ -141,4 +143,48 @@ class PasswordRepoImpl extends PasswordRepo { .executeInsert(PasswordRow.rowParser(1).single) } + override def upsertBatch(unsaved: Iterable[PasswordRow])(implicit c: Connection): List[PasswordRow] = { + def toNamedParameter(row: PasswordRow): List[NamedParameter] = List( + NamedParameter("businessentityid", ParameterValue(row.businessentityid, null, BusinessentityId.toStatement)), + NamedParameter("passwordhash", ParameterValue(row.passwordhash, null, ToStatement.stringToStatement)), + NamedParameter("passwordsalt", ParameterValue(row.passwordsalt, null, ToStatement.stringToStatement)), + NamedParameter("rowguid", ParameterValue(row.rowguid, null, TypoUUID.toStatement)), + NamedParameter("modifieddate", ParameterValue(row.modifieddate, null, TypoLocalDateTime.toStatement)) + ) + unsaved.toList match { + case Nil => Nil + case head :: rest => + new anorm.adventureworks.ExecuteReturningSyntax.Ops( + BatchSql( + s"""insert into person.password("businessentityid", "passwordhash", "passwordsalt", "rowguid", "modifieddate") + values ({businessentityid}::int4, {passwordhash}, {passwordsalt}, {rowguid}::uuid, {modifieddate}::timestamp) + on conflict ("businessentityid") + do update set + "passwordhash" = EXCLUDED."passwordhash", + "passwordsalt" = EXCLUDED."passwordsalt", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + returning "businessentityid", "passwordhash", "passwordsalt", "rowguid", "modifieddate"::text + """, + toNamedParameter(head), + rest.map(toNamedParameter)* + ) + ).executeReturning(PasswordRow.rowParser(1).*) + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[PasswordRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + SQL"create temporary table password_TEMP (like person.password) on commit drop".execute(): @nowarn + streamingInsert(s"""copy password_TEMP("businessentityid", "passwordhash", "passwordsalt", "rowguid", "modifieddate") from stdin""", batchSize, unsaved)(PasswordRow.text, c): @nowarn + SQL"""insert into person.password("businessentityid", "passwordhash", "passwordsalt", "rowguid", "modifieddate") + select * from password_TEMP + on conflict ("businessentityid") + do update set + "passwordhash" = EXCLUDED."passwordhash", + "passwordsalt" = EXCLUDED."passwordsalt", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table password_TEMP;""".executeUpdate() + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/person/password/PasswordRepoMock.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/person/password/PasswordRepoMock.scala index 0a5634f39..c4a75b3b1 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/person/password/PasswordRepoMock.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/person/password/PasswordRepoMock.scala @@ -88,4 +88,17 @@ class PasswordRepoMock(toRow: Function1[PasswordRowUnsaved, PasswordRow], map.put(unsaved.businessentityid, unsaved): @nowarn unsaved } + override def upsertBatch(unsaved: Iterable[PasswordRow])(implicit c: Connection): List[PasswordRow] = { + unsaved.map { row => + map += (row.businessentityid -> row) + row + }.toList + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[PasswordRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + unsaved.foreach { row => + map += (row.businessentityid -> row) + } + unsaved.size + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/person/person/PersonRepo.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/person/person/PersonRepo.scala index 9dbec6653..cb6c61d72 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/person/person/PersonRepo.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/person/person/PersonRepo.scala @@ -30,4 +30,7 @@ trait PersonRepo { def update: UpdateBuilder[PersonFields, PersonRow] def update(row: PersonRow)(implicit c: Connection): Boolean def upsert(unsaved: PersonRow)(implicit c: Connection): PersonRow + def upsertBatch(unsaved: Iterable[PersonRow])(implicit c: Connection): List[PersonRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Iterator[PersonRow], batchSize: Int = 10000)(implicit c: Connection): Int } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/person/person/PersonRepoImpl.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/person/person/PersonRepoImpl.scala index 940ca5225..391584271 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/person/person/PersonRepoImpl.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/person/person/PersonRepoImpl.scala @@ -15,6 +15,7 @@ import adventureworks.person.businessentity.BusinessentityId import adventureworks.public.Name import adventureworks.public.NameStyle import adventureworks.userdefined.FirstName +import anorm.BatchSql import anorm.NamedParameter import anorm.ParameterMetaData import anorm.ParameterValue @@ -24,6 +25,7 @@ import anorm.SimpleSql import anorm.SqlStringInterpolation import anorm.ToStatement import java.sql.Connection +import scala.annotation.nowarn import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder import typo.dsl.SelectBuilderSql @@ -184,4 +186,72 @@ class PersonRepoImpl extends PersonRepo { .executeInsert(PersonRow.rowParser(1).single) } + override def upsertBatch(unsaved: Iterable[PersonRow])(implicit c: Connection): List[PersonRow] = { + def toNamedParameter(row: PersonRow): List[NamedParameter] = List( + NamedParameter("businessentityid", ParameterValue(row.businessentityid, null, BusinessentityId.toStatement)), + NamedParameter("persontype", ParameterValue(row.persontype, null, ToStatement.stringToStatement)), + NamedParameter("namestyle", ParameterValue(row.namestyle, null, NameStyle.toStatement)), + NamedParameter("title", ParameterValue(row.title, null, ToStatement.optionToStatement(ToStatement.stringToStatement, ParameterMetaData.StringParameterMetaData))), + NamedParameter("firstname", ParameterValue(row.firstname, null, /* user-picked */ FirstName.toStatement)), + NamedParameter("middlename", ParameterValue(row.middlename, null, ToStatement.optionToStatement(Name.toStatement, Name.parameterMetadata))), + NamedParameter("lastname", ParameterValue(row.lastname, null, Name.toStatement)), + NamedParameter("suffix", ParameterValue(row.suffix, null, ToStatement.optionToStatement(ToStatement.stringToStatement, ParameterMetaData.StringParameterMetaData))), + NamedParameter("emailpromotion", ParameterValue(row.emailpromotion, null, ToStatement.intToStatement)), + NamedParameter("additionalcontactinfo", ParameterValue(row.additionalcontactinfo, null, ToStatement.optionToStatement(TypoXml.toStatement, TypoXml.parameterMetadata))), + NamedParameter("demographics", ParameterValue(row.demographics, null, ToStatement.optionToStatement(TypoXml.toStatement, TypoXml.parameterMetadata))), + NamedParameter("rowguid", ParameterValue(row.rowguid, null, TypoUUID.toStatement)), + NamedParameter("modifieddate", ParameterValue(row.modifieddate, null, TypoLocalDateTime.toStatement)) + ) + unsaved.toList match { + case Nil => Nil + case head :: rest => + new anorm.adventureworks.ExecuteReturningSyntax.Ops( + BatchSql( + s"""insert into person.person("businessentityid", "persontype", "namestyle", "title", "firstname", "middlename", "lastname", "suffix", "emailpromotion", "additionalcontactinfo", "demographics", "rowguid", "modifieddate") + values ({businessentityid}::int4, {persontype}::bpchar, {namestyle}::bool, {title}, {firstname}::varchar, {middlename}::varchar, {lastname}::varchar, {suffix}, {emailpromotion}::int4, {additionalcontactinfo}::xml, {demographics}::xml, {rowguid}::uuid, {modifieddate}::timestamp) + on conflict ("businessentityid") + do update set + "persontype" = EXCLUDED."persontype", + "namestyle" = EXCLUDED."namestyle", + "title" = EXCLUDED."title", + "firstname" = EXCLUDED."firstname", + "middlename" = EXCLUDED."middlename", + "lastname" = EXCLUDED."lastname", + "suffix" = EXCLUDED."suffix", + "emailpromotion" = EXCLUDED."emailpromotion", + "additionalcontactinfo" = EXCLUDED."additionalcontactinfo", + "demographics" = EXCLUDED."demographics", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + returning "businessentityid", "persontype", "namestyle", "title", "firstname", "middlename", "lastname", "suffix", "emailpromotion", "additionalcontactinfo", "demographics", "rowguid", "modifieddate"::text + """, + toNamedParameter(head), + rest.map(toNamedParameter)* + ) + ).executeReturning(PersonRow.rowParser(1).*) + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[PersonRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + SQL"create temporary table person_TEMP (like person.person) on commit drop".execute(): @nowarn + streamingInsert(s"""copy person_TEMP("businessentityid", "persontype", "namestyle", "title", "firstname", "middlename", "lastname", "suffix", "emailpromotion", "additionalcontactinfo", "demographics", "rowguid", "modifieddate") from stdin""", batchSize, unsaved)(PersonRow.text, c): @nowarn + SQL"""insert into person.person("businessentityid", "persontype", "namestyle", "title", "firstname", "middlename", "lastname", "suffix", "emailpromotion", "additionalcontactinfo", "demographics", "rowguid", "modifieddate") + select * from person_TEMP + on conflict ("businessentityid") + do update set + "persontype" = EXCLUDED."persontype", + "namestyle" = EXCLUDED."namestyle", + "title" = EXCLUDED."title", + "firstname" = EXCLUDED."firstname", + "middlename" = EXCLUDED."middlename", + "lastname" = EXCLUDED."lastname", + "suffix" = EXCLUDED."suffix", + "emailpromotion" = EXCLUDED."emailpromotion", + "additionalcontactinfo" = EXCLUDED."additionalcontactinfo", + "demographics" = EXCLUDED."demographics", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table person_TEMP;""".executeUpdate() + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/person/person/PersonRepoMock.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/person/person/PersonRepoMock.scala index ab4d398f2..da4f66c04 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/person/person/PersonRepoMock.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/person/person/PersonRepoMock.scala @@ -88,4 +88,17 @@ class PersonRepoMock(toRow: Function1[PersonRowUnsaved, PersonRow], map.put(unsaved.businessentityid, unsaved): @nowarn unsaved } + override def upsertBatch(unsaved: Iterable[PersonRow])(implicit c: Connection): List[PersonRow] = { + unsaved.map { row => + map += (row.businessentityid -> row) + row + }.toList + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[PersonRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + unsaved.foreach { row => + map += (row.businessentityid -> row) + } + unsaved.size + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/person/personphone/PersonphoneRepo.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/person/personphone/PersonphoneRepo.scala index e1ef04026..742f73178 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/person/personphone/PersonphoneRepo.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/person/personphone/PersonphoneRepo.scala @@ -29,4 +29,7 @@ trait PersonphoneRepo { def update: UpdateBuilder[PersonphoneFields, PersonphoneRow] def update(row: PersonphoneRow)(implicit c: Connection): Boolean def upsert(unsaved: PersonphoneRow)(implicit c: Connection): PersonphoneRow + def upsertBatch(unsaved: Iterable[PersonphoneRow])(implicit c: Connection): List[PersonphoneRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Iterator[PersonphoneRow], batchSize: Int = 10000)(implicit c: Connection): Int } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/person/personphone/PersonphoneRepoImpl.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/person/personphone/PersonphoneRepoImpl.scala index c2afac90f..aec325a32 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/person/personphone/PersonphoneRepoImpl.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/person/personphone/PersonphoneRepoImpl.scala @@ -12,6 +12,7 @@ import adventureworks.customtypes.TypoLocalDateTime import adventureworks.person.businessentity.BusinessentityId import adventureworks.person.phonenumbertype.PhonenumbertypeId import adventureworks.public.Phone +import anorm.BatchSql import anorm.NamedParameter import anorm.ParameterValue import anorm.RowParser @@ -19,6 +20,7 @@ import anorm.SQL import anorm.SimpleSql import anorm.SqlStringInterpolation import java.sql.Connection +import scala.annotation.nowarn import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder import typo.dsl.SelectBuilderSql @@ -138,4 +140,41 @@ class PersonphoneRepoImpl extends PersonphoneRepo { .executeInsert(PersonphoneRow.rowParser(1).single) } + override def upsertBatch(unsaved: Iterable[PersonphoneRow])(implicit c: Connection): List[PersonphoneRow] = { + def toNamedParameter(row: PersonphoneRow): List[NamedParameter] = List( + NamedParameter("businessentityid", ParameterValue(row.businessentityid, null, BusinessentityId.toStatement)), + NamedParameter("phonenumber", ParameterValue(row.phonenumber, null, Phone.toStatement)), + NamedParameter("phonenumbertypeid", ParameterValue(row.phonenumbertypeid, null, PhonenumbertypeId.toStatement)), + NamedParameter("modifieddate", ParameterValue(row.modifieddate, null, TypoLocalDateTime.toStatement)) + ) + unsaved.toList match { + case Nil => Nil + case head :: rest => + new anorm.adventureworks.ExecuteReturningSyntax.Ops( + BatchSql( + s"""insert into person.personphone("businessentityid", "phonenumber", "phonenumbertypeid", "modifieddate") + values ({businessentityid}::int4, {phonenumber}::varchar, {phonenumbertypeid}::int4, {modifieddate}::timestamp) + on conflict ("businessentityid", "phonenumber", "phonenumbertypeid") + do update set + "modifieddate" = EXCLUDED."modifieddate" + returning "businessentityid", "phonenumber", "phonenumbertypeid", "modifieddate"::text + """, + toNamedParameter(head), + rest.map(toNamedParameter)* + ) + ).executeReturning(PersonphoneRow.rowParser(1).*) + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[PersonphoneRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + SQL"create temporary table personphone_TEMP (like person.personphone) on commit drop".execute(): @nowarn + streamingInsert(s"""copy personphone_TEMP("businessentityid", "phonenumber", "phonenumbertypeid", "modifieddate") from stdin""", batchSize, unsaved)(PersonphoneRow.text, c): @nowarn + SQL"""insert into person.personphone("businessentityid", "phonenumber", "phonenumbertypeid", "modifieddate") + select * from personphone_TEMP + on conflict ("businessentityid", "phonenumber", "phonenumbertypeid") + do update set + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table personphone_TEMP;""".executeUpdate() + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/person/personphone/PersonphoneRepoMock.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/person/personphone/PersonphoneRepoMock.scala index 92853f813..ed12d4df3 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/person/personphone/PersonphoneRepoMock.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/person/personphone/PersonphoneRepoMock.scala @@ -87,4 +87,17 @@ class PersonphoneRepoMock(toRow: Function1[PersonphoneRowUnsaved, PersonphoneRow map.put(unsaved.compositeId, unsaved): @nowarn unsaved } + override def upsertBatch(unsaved: Iterable[PersonphoneRow])(implicit c: Connection): List[PersonphoneRow] = { + unsaved.map { row => + map += (row.compositeId -> row) + row + }.toList + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[PersonphoneRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + unsaved.foreach { row => + map += (row.compositeId -> row) + } + unsaved.size + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/person/phonenumbertype/PhonenumbertypeRepo.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/person/phonenumbertype/PhonenumbertypeRepo.scala index 0be2c2c17..1d49582a9 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/person/phonenumbertype/PhonenumbertypeRepo.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/person/phonenumbertype/PhonenumbertypeRepo.scala @@ -29,4 +29,7 @@ trait PhonenumbertypeRepo { def update: UpdateBuilder[PhonenumbertypeFields, PhonenumbertypeRow] def update(row: PhonenumbertypeRow)(implicit c: Connection): Boolean def upsert(unsaved: PhonenumbertypeRow)(implicit c: Connection): PhonenumbertypeRow + def upsertBatch(unsaved: Iterable[PhonenumbertypeRow])(implicit c: Connection): List[PhonenumbertypeRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Iterator[PhonenumbertypeRow], batchSize: Int = 10000)(implicit c: Connection): Int } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/person/phonenumbertype/PhonenumbertypeRepoImpl.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/person/phonenumbertype/PhonenumbertypeRepoImpl.scala index 065ac42cc..988e4cbd4 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/person/phonenumbertype/PhonenumbertypeRepoImpl.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/person/phonenumbertype/PhonenumbertypeRepoImpl.scala @@ -10,6 +10,7 @@ package phonenumbertype import adventureworks.customtypes.Defaulted import adventureworks.customtypes.TypoLocalDateTime import adventureworks.public.Name +import anorm.BatchSql import anorm.NamedParameter import anorm.ParameterValue import anorm.RowParser @@ -17,6 +18,7 @@ import anorm.SQL import anorm.SimpleSql import anorm.SqlStringInterpolation import java.sql.Connection +import scala.annotation.nowarn import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder import typo.dsl.SelectBuilderSql @@ -131,4 +133,42 @@ class PhonenumbertypeRepoImpl extends PhonenumbertypeRepo { .executeInsert(PhonenumbertypeRow.rowParser(1).single) } + override def upsertBatch(unsaved: Iterable[PhonenumbertypeRow])(implicit c: Connection): List[PhonenumbertypeRow] = { + def toNamedParameter(row: PhonenumbertypeRow): List[NamedParameter] = List( + NamedParameter("phonenumbertypeid", ParameterValue(row.phonenumbertypeid, null, PhonenumbertypeId.toStatement)), + NamedParameter("name", ParameterValue(row.name, null, Name.toStatement)), + NamedParameter("modifieddate", ParameterValue(row.modifieddate, null, TypoLocalDateTime.toStatement)) + ) + unsaved.toList match { + case Nil => Nil + case head :: rest => + new anorm.adventureworks.ExecuteReturningSyntax.Ops( + BatchSql( + s"""insert into person.phonenumbertype("phonenumbertypeid", "name", "modifieddate") + values ({phonenumbertypeid}::int4, {name}::varchar, {modifieddate}::timestamp) + on conflict ("phonenumbertypeid") + do update set + "name" = EXCLUDED."name", + "modifieddate" = EXCLUDED."modifieddate" + returning "phonenumbertypeid", "name", "modifieddate"::text + """, + toNamedParameter(head), + rest.map(toNamedParameter)* + ) + ).executeReturning(PhonenumbertypeRow.rowParser(1).*) + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[PhonenumbertypeRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + SQL"create temporary table phonenumbertype_TEMP (like person.phonenumbertype) on commit drop".execute(): @nowarn + streamingInsert(s"""copy phonenumbertype_TEMP("phonenumbertypeid", "name", "modifieddate") from stdin""", batchSize, unsaved)(PhonenumbertypeRow.text, c): @nowarn + SQL"""insert into person.phonenumbertype("phonenumbertypeid", "name", "modifieddate") + select * from phonenumbertype_TEMP + on conflict ("phonenumbertypeid") + do update set + "name" = EXCLUDED."name", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table phonenumbertype_TEMP;""".executeUpdate() + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/person/phonenumbertype/PhonenumbertypeRepoMock.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/person/phonenumbertype/PhonenumbertypeRepoMock.scala index eb0b166c6..3b0853dd1 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/person/phonenumbertype/PhonenumbertypeRepoMock.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/person/phonenumbertype/PhonenumbertypeRepoMock.scala @@ -87,4 +87,17 @@ class PhonenumbertypeRepoMock(toRow: Function1[PhonenumbertypeRowUnsaved, Phonen map.put(unsaved.phonenumbertypeid, unsaved): @nowarn unsaved } + override def upsertBatch(unsaved: Iterable[PhonenumbertypeRow])(implicit c: Connection): List[PhonenumbertypeRow] = { + unsaved.map { row => + map += (row.phonenumbertypeid -> row) + row + }.toList + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[PhonenumbertypeRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + unsaved.foreach { row => + map += (row.phonenumbertypeid -> row) + } + unsaved.size + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/person/stateprovince/StateprovinceRepo.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/person/stateprovince/StateprovinceRepo.scala index 279c3c91a..b9d836a6e 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/person/stateprovince/StateprovinceRepo.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/person/stateprovince/StateprovinceRepo.scala @@ -29,4 +29,7 @@ trait StateprovinceRepo { def update: UpdateBuilder[StateprovinceFields, StateprovinceRow] def update(row: StateprovinceRow)(implicit c: Connection): Boolean def upsert(unsaved: StateprovinceRow)(implicit c: Connection): StateprovinceRow + def upsertBatch(unsaved: Iterable[StateprovinceRow])(implicit c: Connection): List[StateprovinceRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Iterator[StateprovinceRow], batchSize: Int = 10000)(implicit c: Connection): Int } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/person/stateprovince/StateprovinceRepoImpl.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/person/stateprovince/StateprovinceRepoImpl.scala index 228f4abf1..ac856af52 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/person/stateprovince/StateprovinceRepoImpl.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/person/stateprovince/StateprovinceRepoImpl.scala @@ -14,6 +14,7 @@ import adventureworks.person.countryregion.CountryregionId import adventureworks.public.Flag import adventureworks.public.Name import adventureworks.sales.salesterritory.SalesterritoryId +import anorm.BatchSql import anorm.NamedParameter import anorm.ParameterValue import anorm.RowParser @@ -22,6 +23,7 @@ import anorm.SimpleSql import anorm.SqlStringInterpolation import anorm.ToStatement import java.sql.Connection +import scala.annotation.nowarn import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder import typo.dsl.SelectBuilderSql @@ -162,4 +164,57 @@ class StateprovinceRepoImpl extends StateprovinceRepo { .executeInsert(StateprovinceRow.rowParser(1).single) } + override def upsertBatch(unsaved: Iterable[StateprovinceRow])(implicit c: Connection): List[StateprovinceRow] = { + def toNamedParameter(row: StateprovinceRow): List[NamedParameter] = List( + NamedParameter("stateprovinceid", ParameterValue(row.stateprovinceid, null, StateprovinceId.toStatement)), + NamedParameter("stateprovincecode", ParameterValue(row.stateprovincecode, null, ToStatement.stringToStatement)), + NamedParameter("countryregioncode", ParameterValue(row.countryregioncode, null, CountryregionId.toStatement)), + NamedParameter("isonlystateprovinceflag", ParameterValue(row.isonlystateprovinceflag, null, Flag.toStatement)), + NamedParameter("name", ParameterValue(row.name, null, Name.toStatement)), + NamedParameter("territoryid", ParameterValue(row.territoryid, null, SalesterritoryId.toStatement)), + NamedParameter("rowguid", ParameterValue(row.rowguid, null, TypoUUID.toStatement)), + NamedParameter("modifieddate", ParameterValue(row.modifieddate, null, TypoLocalDateTime.toStatement)) + ) + unsaved.toList match { + case Nil => Nil + case head :: rest => + new anorm.adventureworks.ExecuteReturningSyntax.Ops( + BatchSql( + s"""insert into person.stateprovince("stateprovinceid", "stateprovincecode", "countryregioncode", "isonlystateprovinceflag", "name", "territoryid", "rowguid", "modifieddate") + values ({stateprovinceid}::int4, {stateprovincecode}::bpchar, {countryregioncode}, {isonlystateprovinceflag}::bool, {name}::varchar, {territoryid}::int4, {rowguid}::uuid, {modifieddate}::timestamp) + on conflict ("stateprovinceid") + do update set + "stateprovincecode" = EXCLUDED."stateprovincecode", + "countryregioncode" = EXCLUDED."countryregioncode", + "isonlystateprovinceflag" = EXCLUDED."isonlystateprovinceflag", + "name" = EXCLUDED."name", + "territoryid" = EXCLUDED."territoryid", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + returning "stateprovinceid", "stateprovincecode", "countryregioncode", "isonlystateprovinceflag", "name", "territoryid", "rowguid", "modifieddate"::text + """, + toNamedParameter(head), + rest.map(toNamedParameter)* + ) + ).executeReturning(StateprovinceRow.rowParser(1).*) + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[StateprovinceRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + SQL"create temporary table stateprovince_TEMP (like person.stateprovince) on commit drop".execute(): @nowarn + streamingInsert(s"""copy stateprovince_TEMP("stateprovinceid", "stateprovincecode", "countryregioncode", "isonlystateprovinceflag", "name", "territoryid", "rowguid", "modifieddate") from stdin""", batchSize, unsaved)(StateprovinceRow.text, c): @nowarn + SQL"""insert into person.stateprovince("stateprovinceid", "stateprovincecode", "countryregioncode", "isonlystateprovinceflag", "name", "territoryid", "rowguid", "modifieddate") + select * from stateprovince_TEMP + on conflict ("stateprovinceid") + do update set + "stateprovincecode" = EXCLUDED."stateprovincecode", + "countryregioncode" = EXCLUDED."countryregioncode", + "isonlystateprovinceflag" = EXCLUDED."isonlystateprovinceflag", + "name" = EXCLUDED."name", + "territoryid" = EXCLUDED."territoryid", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table stateprovince_TEMP;""".executeUpdate() + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/person/stateprovince/StateprovinceRepoMock.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/person/stateprovince/StateprovinceRepoMock.scala index d7b8f962a..ad2e4cb12 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/person/stateprovince/StateprovinceRepoMock.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/person/stateprovince/StateprovinceRepoMock.scala @@ -87,4 +87,17 @@ class StateprovinceRepoMock(toRow: Function1[StateprovinceRowUnsaved, Stateprovi map.put(unsaved.stateprovinceid, unsaved): @nowarn unsaved } + override def upsertBatch(unsaved: Iterable[StateprovinceRow])(implicit c: Connection): List[StateprovinceRow] = { + unsaved.map { row => + map += (row.stateprovinceid -> row) + row + }.toList + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[StateprovinceRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + unsaved.foreach { row => + map += (row.stateprovinceid -> row) + } + unsaved.size + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/billofmaterials/BillofmaterialsRepo.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/billofmaterials/BillofmaterialsRepo.scala index 560fea6a7..e7d39a783 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/billofmaterials/BillofmaterialsRepo.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/billofmaterials/BillofmaterialsRepo.scala @@ -29,4 +29,7 @@ trait BillofmaterialsRepo { def update: UpdateBuilder[BillofmaterialsFields, BillofmaterialsRow] def update(row: BillofmaterialsRow)(implicit c: Connection): Boolean def upsert(unsaved: BillofmaterialsRow)(implicit c: Connection): BillofmaterialsRow + def upsertBatch(unsaved: Iterable[BillofmaterialsRow])(implicit c: Connection): List[BillofmaterialsRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Iterator[BillofmaterialsRow], batchSize: Int = 10000)(implicit c: Connection): Int } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/billofmaterials/BillofmaterialsRepoImpl.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/billofmaterials/BillofmaterialsRepoImpl.scala index a8c84e1f5..7ab3b068c 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/billofmaterials/BillofmaterialsRepoImpl.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/billofmaterials/BillofmaterialsRepoImpl.scala @@ -12,6 +12,7 @@ import adventureworks.customtypes.TypoLocalDateTime import adventureworks.customtypes.TypoShort import adventureworks.production.product.ProductId import adventureworks.production.unitmeasure.UnitmeasureId +import anorm.BatchSql import anorm.NamedParameter import anorm.ParameterValue import anorm.RowParser @@ -20,6 +21,7 @@ import anorm.SimpleSql import anorm.SqlStringInterpolation import anorm.ToStatement import java.sql.Connection +import scala.annotation.nowarn import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder import typo.dsl.SelectBuilderSql @@ -164,4 +166,60 @@ class BillofmaterialsRepoImpl extends BillofmaterialsRepo { .executeInsert(BillofmaterialsRow.rowParser(1).single) } + override def upsertBatch(unsaved: Iterable[BillofmaterialsRow])(implicit c: Connection): List[BillofmaterialsRow] = { + def toNamedParameter(row: BillofmaterialsRow): List[NamedParameter] = List( + NamedParameter("billofmaterialsid", ParameterValue(row.billofmaterialsid, null, ToStatement.intToStatement)), + NamedParameter("productassemblyid", ParameterValue(row.productassemblyid, null, ToStatement.optionToStatement(ProductId.toStatement, ProductId.parameterMetadata))), + NamedParameter("componentid", ParameterValue(row.componentid, null, ProductId.toStatement)), + NamedParameter("startdate", ParameterValue(row.startdate, null, TypoLocalDateTime.toStatement)), + NamedParameter("enddate", ParameterValue(row.enddate, null, ToStatement.optionToStatement(TypoLocalDateTime.toStatement, TypoLocalDateTime.parameterMetadata))), + NamedParameter("unitmeasurecode", ParameterValue(row.unitmeasurecode, null, UnitmeasureId.toStatement)), + NamedParameter("bomlevel", ParameterValue(row.bomlevel, null, TypoShort.toStatement)), + NamedParameter("perassemblyqty", ParameterValue(row.perassemblyqty, null, ToStatement.scalaBigDecimalToStatement)), + NamedParameter("modifieddate", ParameterValue(row.modifieddate, null, TypoLocalDateTime.toStatement)) + ) + unsaved.toList match { + case Nil => Nil + case head :: rest => + new anorm.adventureworks.ExecuteReturningSyntax.Ops( + BatchSql( + s"""insert into production.billofmaterials("billofmaterialsid", "productassemblyid", "componentid", "startdate", "enddate", "unitmeasurecode", "bomlevel", "perassemblyqty", "modifieddate") + values ({billofmaterialsid}::int4, {productassemblyid}::int4, {componentid}::int4, {startdate}::timestamp, {enddate}::timestamp, {unitmeasurecode}::bpchar, {bomlevel}::int2, {perassemblyqty}::numeric, {modifieddate}::timestamp) + on conflict ("billofmaterialsid") + do update set + "productassemblyid" = EXCLUDED."productassemblyid", + "componentid" = EXCLUDED."componentid", + "startdate" = EXCLUDED."startdate", + "enddate" = EXCLUDED."enddate", + "unitmeasurecode" = EXCLUDED."unitmeasurecode", + "bomlevel" = EXCLUDED."bomlevel", + "perassemblyqty" = EXCLUDED."perassemblyqty", + "modifieddate" = EXCLUDED."modifieddate" + returning "billofmaterialsid", "productassemblyid", "componentid", "startdate"::text, "enddate"::text, "unitmeasurecode", "bomlevel", "perassemblyqty", "modifieddate"::text + """, + toNamedParameter(head), + rest.map(toNamedParameter)* + ) + ).executeReturning(BillofmaterialsRow.rowParser(1).*) + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[BillofmaterialsRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + SQL"create temporary table billofmaterials_TEMP (like production.billofmaterials) on commit drop".execute(): @nowarn + streamingInsert(s"""copy billofmaterials_TEMP("billofmaterialsid", "productassemblyid", "componentid", "startdate", "enddate", "unitmeasurecode", "bomlevel", "perassemblyqty", "modifieddate") from stdin""", batchSize, unsaved)(BillofmaterialsRow.text, c): @nowarn + SQL"""insert into production.billofmaterials("billofmaterialsid", "productassemblyid", "componentid", "startdate", "enddate", "unitmeasurecode", "bomlevel", "perassemblyqty", "modifieddate") + select * from billofmaterials_TEMP + on conflict ("billofmaterialsid") + do update set + "productassemblyid" = EXCLUDED."productassemblyid", + "componentid" = EXCLUDED."componentid", + "startdate" = EXCLUDED."startdate", + "enddate" = EXCLUDED."enddate", + "unitmeasurecode" = EXCLUDED."unitmeasurecode", + "bomlevel" = EXCLUDED."bomlevel", + "perassemblyqty" = EXCLUDED."perassemblyqty", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table billofmaterials_TEMP;""".executeUpdate() + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/billofmaterials/BillofmaterialsRepoMock.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/billofmaterials/BillofmaterialsRepoMock.scala index 397e97f21..a50cef175 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/billofmaterials/BillofmaterialsRepoMock.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/billofmaterials/BillofmaterialsRepoMock.scala @@ -87,4 +87,17 @@ class BillofmaterialsRepoMock(toRow: Function1[BillofmaterialsRowUnsaved, Billof map.put(unsaved.billofmaterialsid, unsaved): @nowarn unsaved } + override def upsertBatch(unsaved: Iterable[BillofmaterialsRow])(implicit c: Connection): List[BillofmaterialsRow] = { + unsaved.map { row => + map += (row.billofmaterialsid -> row) + row + }.toList + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[BillofmaterialsRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + unsaved.foreach { row => + map += (row.billofmaterialsid -> row) + } + unsaved.size + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/culture/CultureRepo.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/culture/CultureRepo.scala index f1c7fcff1..cd42c61ca 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/culture/CultureRepo.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/culture/CultureRepo.scala @@ -29,4 +29,7 @@ trait CultureRepo { def update: UpdateBuilder[CultureFields, CultureRow] def update(row: CultureRow)(implicit c: Connection): Boolean def upsert(unsaved: CultureRow)(implicit c: Connection): CultureRow + def upsertBatch(unsaved: Iterable[CultureRow])(implicit c: Connection): List[CultureRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Iterator[CultureRow], batchSize: Int = 10000)(implicit c: Connection): Int } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/culture/CultureRepoImpl.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/culture/CultureRepoImpl.scala index 168ccdcdc..d414001c5 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/culture/CultureRepoImpl.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/culture/CultureRepoImpl.scala @@ -10,6 +10,7 @@ package culture import adventureworks.customtypes.Defaulted import adventureworks.customtypes.TypoLocalDateTime import adventureworks.public.Name +import anorm.BatchSql import anorm.NamedParameter import anorm.ParameterValue import anorm.RowParser @@ -17,6 +18,7 @@ import anorm.SQL import anorm.SimpleSql import anorm.SqlStringInterpolation import java.sql.Connection +import scala.annotation.nowarn import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder import typo.dsl.SelectBuilderSql @@ -128,4 +130,42 @@ class CultureRepoImpl extends CultureRepo { .executeInsert(CultureRow.rowParser(1).single) } + override def upsertBatch(unsaved: Iterable[CultureRow])(implicit c: Connection): List[CultureRow] = { + def toNamedParameter(row: CultureRow): List[NamedParameter] = List( + NamedParameter("cultureid", ParameterValue(row.cultureid, null, CultureId.toStatement)), + NamedParameter("name", ParameterValue(row.name, null, Name.toStatement)), + NamedParameter("modifieddate", ParameterValue(row.modifieddate, null, TypoLocalDateTime.toStatement)) + ) + unsaved.toList match { + case Nil => Nil + case head :: rest => + new anorm.adventureworks.ExecuteReturningSyntax.Ops( + BatchSql( + s"""insert into production.culture("cultureid", "name", "modifieddate") + values ({cultureid}::bpchar, {name}::varchar, {modifieddate}::timestamp) + on conflict ("cultureid") + do update set + "name" = EXCLUDED."name", + "modifieddate" = EXCLUDED."modifieddate" + returning "cultureid", "name", "modifieddate"::text + """, + toNamedParameter(head), + rest.map(toNamedParameter)* + ) + ).executeReturning(CultureRow.rowParser(1).*) + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[CultureRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + SQL"create temporary table culture_TEMP (like production.culture) on commit drop".execute(): @nowarn + streamingInsert(s"""copy culture_TEMP("cultureid", "name", "modifieddate") from stdin""", batchSize, unsaved)(CultureRow.text, c): @nowarn + SQL"""insert into production.culture("cultureid", "name", "modifieddate") + select * from culture_TEMP + on conflict ("cultureid") + do update set + "name" = EXCLUDED."name", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table culture_TEMP;""".executeUpdate() + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/culture/CultureRepoMock.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/culture/CultureRepoMock.scala index 79c532d76..8e36e29cb 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/culture/CultureRepoMock.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/culture/CultureRepoMock.scala @@ -87,4 +87,17 @@ class CultureRepoMock(toRow: Function1[CultureRowUnsaved, CultureRow], map.put(unsaved.cultureid, unsaved): @nowarn unsaved } + override def upsertBatch(unsaved: Iterable[CultureRow])(implicit c: Connection): List[CultureRow] = { + unsaved.map { row => + map += (row.cultureid -> row) + row + }.toList + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[CultureRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + unsaved.foreach { row => + map += (row.cultureid -> row) + } + unsaved.size + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/document/DocumentRepo.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/document/DocumentRepo.scala index e11b65000..95154b1b2 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/document/DocumentRepo.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/document/DocumentRepo.scala @@ -31,4 +31,7 @@ trait DocumentRepo { def update: UpdateBuilder[DocumentFields, DocumentRow] def update(row: DocumentRow)(implicit c: Connection): Boolean def upsert(unsaved: DocumentRow)(implicit c: Connection): DocumentRow + def upsertBatch(unsaved: Iterable[DocumentRow])(implicit c: Connection): List[DocumentRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Iterator[DocumentRow], batchSize: Int = 10000)(implicit c: Connection): Int } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/document/DocumentRepoImpl.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/document/DocumentRepoImpl.scala index 130dfb5a0..3a832c4c6 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/document/DocumentRepoImpl.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/document/DocumentRepoImpl.scala @@ -14,6 +14,7 @@ import adventureworks.customtypes.TypoShort import adventureworks.customtypes.TypoUUID import adventureworks.person.businessentity.BusinessentityId import adventureworks.public.Flag +import anorm.BatchSql import anorm.NamedParameter import anorm.ParameterMetaData import anorm.ParameterValue @@ -23,6 +24,7 @@ import anorm.SimpleSql import anorm.SqlStringInterpolation import anorm.ToStatement import java.sql.Connection +import scala.annotation.nowarn import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder import typo.dsl.SelectBuilderSql @@ -193,4 +195,72 @@ class DocumentRepoImpl extends DocumentRepo { .executeInsert(DocumentRow.rowParser(1).single) } + override def upsertBatch(unsaved: Iterable[DocumentRow])(implicit c: Connection): List[DocumentRow] = { + def toNamedParameter(row: DocumentRow): List[NamedParameter] = List( + NamedParameter("title", ParameterValue(row.title, null, ToStatement.stringToStatement)), + NamedParameter("owner", ParameterValue(row.owner, null, BusinessentityId.toStatement)), + NamedParameter("folderflag", ParameterValue(row.folderflag, null, Flag.toStatement)), + NamedParameter("filename", ParameterValue(row.filename, null, ToStatement.stringToStatement)), + NamedParameter("fileextension", ParameterValue(row.fileextension, null, ToStatement.optionToStatement(ToStatement.stringToStatement, ParameterMetaData.StringParameterMetaData))), + NamedParameter("revision", ParameterValue(row.revision, null, ToStatement.stringToStatement)), + NamedParameter("changenumber", ParameterValue(row.changenumber, null, ToStatement.intToStatement)), + NamedParameter("status", ParameterValue(row.status, null, TypoShort.toStatement)), + NamedParameter("documentsummary", ParameterValue(row.documentsummary, null, ToStatement.optionToStatement(ToStatement.stringToStatement, ParameterMetaData.StringParameterMetaData))), + NamedParameter("document", ParameterValue(row.document, null, ToStatement.optionToStatement(TypoBytea.toStatement, TypoBytea.parameterMetadata))), + NamedParameter("rowguid", ParameterValue(row.rowguid, null, TypoUUID.toStatement)), + NamedParameter("modifieddate", ParameterValue(row.modifieddate, null, TypoLocalDateTime.toStatement)), + NamedParameter("documentnode", ParameterValue(row.documentnode, null, DocumentId.toStatement)) + ) + unsaved.toList match { + case Nil => Nil + case head :: rest => + new anorm.adventureworks.ExecuteReturningSyntax.Ops( + BatchSql( + s"""insert into production.document("title", "owner", "folderflag", "filename", "fileextension", "revision", "changenumber", "status", "documentsummary", "document", "rowguid", "modifieddate", "documentnode") + values ({title}, {owner}::int4, {folderflag}::bool, {filename}, {fileextension}, {revision}::bpchar, {changenumber}::int4, {status}::int2, {documentsummary}, {document}::bytea, {rowguid}::uuid, {modifieddate}::timestamp, {documentnode}) + on conflict ("documentnode") + do update set + "title" = EXCLUDED."title", + "owner" = EXCLUDED."owner", + "folderflag" = EXCLUDED."folderflag", + "filename" = EXCLUDED."filename", + "fileextension" = EXCLUDED."fileextension", + "revision" = EXCLUDED."revision", + "changenumber" = EXCLUDED."changenumber", + "status" = EXCLUDED."status", + "documentsummary" = EXCLUDED."documentsummary", + "document" = EXCLUDED."document", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + returning "title", "owner", "folderflag", "filename", "fileextension", "revision", "changenumber", "status", "documentsummary", "document", "rowguid", "modifieddate"::text, "documentnode" + """, + toNamedParameter(head), + rest.map(toNamedParameter)* + ) + ).executeReturning(DocumentRow.rowParser(1).*) + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[DocumentRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + SQL"create temporary table document_TEMP (like production.document) on commit drop".execute(): @nowarn + streamingInsert(s"""copy document_TEMP("title", "owner", "folderflag", "filename", "fileextension", "revision", "changenumber", "status", "documentsummary", "document", "rowguid", "modifieddate", "documentnode") from stdin""", batchSize, unsaved)(DocumentRow.text, c): @nowarn + SQL"""insert into production.document("title", "owner", "folderflag", "filename", "fileextension", "revision", "changenumber", "status", "documentsummary", "document", "rowguid", "modifieddate", "documentnode") + select * from document_TEMP + on conflict ("documentnode") + do update set + "title" = EXCLUDED."title", + "owner" = EXCLUDED."owner", + "folderflag" = EXCLUDED."folderflag", + "filename" = EXCLUDED."filename", + "fileextension" = EXCLUDED."fileextension", + "revision" = EXCLUDED."revision", + "changenumber" = EXCLUDED."changenumber", + "status" = EXCLUDED."status", + "documentsummary" = EXCLUDED."documentsummary", + "document" = EXCLUDED."document", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table document_TEMP;""".executeUpdate() + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/document/DocumentRepoMock.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/document/DocumentRepoMock.scala index 9f4d2ff64..ee6057ff8 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/document/DocumentRepoMock.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/document/DocumentRepoMock.scala @@ -91,4 +91,17 @@ class DocumentRepoMock(toRow: Function1[DocumentRowUnsaved, DocumentRow], map.put(unsaved.documentnode, unsaved): @nowarn unsaved } + override def upsertBatch(unsaved: Iterable[DocumentRow])(implicit c: Connection): List[DocumentRow] = { + unsaved.map { row => + map += (row.documentnode -> row) + row + }.toList + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[DocumentRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + unsaved.foreach { row => + map += (row.documentnode -> row) + } + unsaved.size + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/illustration/IllustrationRepo.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/illustration/IllustrationRepo.scala index 6c0210c76..9f4506d3c 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/illustration/IllustrationRepo.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/illustration/IllustrationRepo.scala @@ -29,4 +29,7 @@ trait IllustrationRepo { def update: UpdateBuilder[IllustrationFields, IllustrationRow] def update(row: IllustrationRow)(implicit c: Connection): Boolean def upsert(unsaved: IllustrationRow)(implicit c: Connection): IllustrationRow + def upsertBatch(unsaved: Iterable[IllustrationRow])(implicit c: Connection): List[IllustrationRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Iterator[IllustrationRow], batchSize: Int = 10000)(implicit c: Connection): Int } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/illustration/IllustrationRepoImpl.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/illustration/IllustrationRepoImpl.scala index e366c777a..1808e94d9 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/illustration/IllustrationRepoImpl.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/illustration/IllustrationRepoImpl.scala @@ -10,6 +10,7 @@ package illustration import adventureworks.customtypes.Defaulted import adventureworks.customtypes.TypoLocalDateTime import adventureworks.customtypes.TypoXml +import anorm.BatchSql import anorm.NamedParameter import anorm.ParameterValue import anorm.RowParser @@ -18,6 +19,7 @@ import anorm.SimpleSql import anorm.SqlStringInterpolation import anorm.ToStatement import java.sql.Connection +import scala.annotation.nowarn import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder import typo.dsl.SelectBuilderSql @@ -132,4 +134,42 @@ class IllustrationRepoImpl extends IllustrationRepo { .executeInsert(IllustrationRow.rowParser(1).single) } + override def upsertBatch(unsaved: Iterable[IllustrationRow])(implicit c: Connection): List[IllustrationRow] = { + def toNamedParameter(row: IllustrationRow): List[NamedParameter] = List( + NamedParameter("illustrationid", ParameterValue(row.illustrationid, null, IllustrationId.toStatement)), + NamedParameter("diagram", ParameterValue(row.diagram, null, ToStatement.optionToStatement(TypoXml.toStatement, TypoXml.parameterMetadata))), + NamedParameter("modifieddate", ParameterValue(row.modifieddate, null, TypoLocalDateTime.toStatement)) + ) + unsaved.toList match { + case Nil => Nil + case head :: rest => + new anorm.adventureworks.ExecuteReturningSyntax.Ops( + BatchSql( + s"""insert into production.illustration("illustrationid", "diagram", "modifieddate") + values ({illustrationid}::int4, {diagram}::xml, {modifieddate}::timestamp) + on conflict ("illustrationid") + do update set + "diagram" = EXCLUDED."diagram", + "modifieddate" = EXCLUDED."modifieddate" + returning "illustrationid", "diagram", "modifieddate"::text + """, + toNamedParameter(head), + rest.map(toNamedParameter)* + ) + ).executeReturning(IllustrationRow.rowParser(1).*) + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[IllustrationRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + SQL"create temporary table illustration_TEMP (like production.illustration) on commit drop".execute(): @nowarn + streamingInsert(s"""copy illustration_TEMP("illustrationid", "diagram", "modifieddate") from stdin""", batchSize, unsaved)(IllustrationRow.text, c): @nowarn + SQL"""insert into production.illustration("illustrationid", "diagram", "modifieddate") + select * from illustration_TEMP + on conflict ("illustrationid") + do update set + "diagram" = EXCLUDED."diagram", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table illustration_TEMP;""".executeUpdate() + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/illustration/IllustrationRepoMock.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/illustration/IllustrationRepoMock.scala index 3ecbea1fa..6767884d6 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/illustration/IllustrationRepoMock.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/illustration/IllustrationRepoMock.scala @@ -87,4 +87,17 @@ class IllustrationRepoMock(toRow: Function1[IllustrationRowUnsaved, Illustration map.put(unsaved.illustrationid, unsaved): @nowarn unsaved } + override def upsertBatch(unsaved: Iterable[IllustrationRow])(implicit c: Connection): List[IllustrationRow] = { + unsaved.map { row => + map += (row.illustrationid -> row) + row + }.toList + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[IllustrationRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + unsaved.foreach { row => + map += (row.illustrationid -> row) + } + unsaved.size + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/location/LocationRepo.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/location/LocationRepo.scala index 5a1437580..0a5620479 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/location/LocationRepo.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/location/LocationRepo.scala @@ -29,4 +29,7 @@ trait LocationRepo { def update: UpdateBuilder[LocationFields, LocationRow] def update(row: LocationRow)(implicit c: Connection): Boolean def upsert(unsaved: LocationRow)(implicit c: Connection): LocationRow + def upsertBatch(unsaved: Iterable[LocationRow])(implicit c: Connection): List[LocationRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Iterator[LocationRow], batchSize: Int = 10000)(implicit c: Connection): Int } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/location/LocationRepoImpl.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/location/LocationRepoImpl.scala index 431455ca8..b5662c828 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/location/LocationRepoImpl.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/location/LocationRepoImpl.scala @@ -10,6 +10,7 @@ package location import adventureworks.customtypes.Defaulted import adventureworks.customtypes.TypoLocalDateTime import adventureworks.public.Name +import anorm.BatchSql import anorm.NamedParameter import anorm.ParameterValue import anorm.RowParser @@ -18,6 +19,7 @@ import anorm.SimpleSql import anorm.SqlStringInterpolation import anorm.ToStatement import java.sql.Connection +import scala.annotation.nowarn import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder import typo.dsl.SelectBuilderSql @@ -146,4 +148,48 @@ class LocationRepoImpl extends LocationRepo { .executeInsert(LocationRow.rowParser(1).single) } + override def upsertBatch(unsaved: Iterable[LocationRow])(implicit c: Connection): List[LocationRow] = { + def toNamedParameter(row: LocationRow): List[NamedParameter] = List( + NamedParameter("locationid", ParameterValue(row.locationid, null, LocationId.toStatement)), + NamedParameter("name", ParameterValue(row.name, null, Name.toStatement)), + NamedParameter("costrate", ParameterValue(row.costrate, null, ToStatement.scalaBigDecimalToStatement)), + NamedParameter("availability", ParameterValue(row.availability, null, ToStatement.scalaBigDecimalToStatement)), + NamedParameter("modifieddate", ParameterValue(row.modifieddate, null, TypoLocalDateTime.toStatement)) + ) + unsaved.toList match { + case Nil => Nil + case head :: rest => + new anorm.adventureworks.ExecuteReturningSyntax.Ops( + BatchSql( + s"""insert into production.location("locationid", "name", "costrate", "availability", "modifieddate") + values ({locationid}::int4, {name}::varchar, {costrate}::numeric, {availability}::numeric, {modifieddate}::timestamp) + on conflict ("locationid") + do update set + "name" = EXCLUDED."name", + "costrate" = EXCLUDED."costrate", + "availability" = EXCLUDED."availability", + "modifieddate" = EXCLUDED."modifieddate" + returning "locationid", "name", "costrate", "availability", "modifieddate"::text + """, + toNamedParameter(head), + rest.map(toNamedParameter)* + ) + ).executeReturning(LocationRow.rowParser(1).*) + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[LocationRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + SQL"create temporary table location_TEMP (like production.location) on commit drop".execute(): @nowarn + streamingInsert(s"""copy location_TEMP("locationid", "name", "costrate", "availability", "modifieddate") from stdin""", batchSize, unsaved)(LocationRow.text, c): @nowarn + SQL"""insert into production.location("locationid", "name", "costrate", "availability", "modifieddate") + select * from location_TEMP + on conflict ("locationid") + do update set + "name" = EXCLUDED."name", + "costrate" = EXCLUDED."costrate", + "availability" = EXCLUDED."availability", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table location_TEMP;""".executeUpdate() + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/location/LocationRepoMock.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/location/LocationRepoMock.scala index da374be2c..b0e7d5c87 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/location/LocationRepoMock.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/location/LocationRepoMock.scala @@ -87,4 +87,17 @@ class LocationRepoMock(toRow: Function1[LocationRowUnsaved, LocationRow], map.put(unsaved.locationid, unsaved): @nowarn unsaved } + override def upsertBatch(unsaved: Iterable[LocationRow])(implicit c: Connection): List[LocationRow] = { + unsaved.map { row => + map += (row.locationid -> row) + row + }.toList + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[LocationRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + unsaved.foreach { row => + map += (row.locationid -> row) + } + unsaved.size + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/product/ProductRepo.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/product/ProductRepo.scala index e077e4b2d..ea99bb45f 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/product/ProductRepo.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/product/ProductRepo.scala @@ -29,4 +29,7 @@ trait ProductRepo { def update: UpdateBuilder[ProductFields, ProductRow] def update(row: ProductRow)(implicit c: Connection): Boolean def upsert(unsaved: ProductRow)(implicit c: Connection): ProductRow + def upsertBatch(unsaved: Iterable[ProductRow])(implicit c: Connection): List[ProductRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Iterator[ProductRow], batchSize: Int = 10000)(implicit c: Connection): Int } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/product/ProductRepoImpl.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/product/ProductRepoImpl.scala index 12baefb38..40f07e7eb 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/product/ProductRepoImpl.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/product/ProductRepoImpl.scala @@ -16,6 +16,7 @@ import adventureworks.production.productsubcategory.ProductsubcategoryId import adventureworks.production.unitmeasure.UnitmeasureId import adventureworks.public.Flag import adventureworks.public.Name +import anorm.BatchSql import anorm.NamedParameter import anorm.ParameterMetaData import anorm.ParameterValue @@ -25,6 +26,7 @@ import anorm.SimpleSql import anorm.SqlStringInterpolation import anorm.ToStatement import java.sql.Connection +import scala.annotation.nowarn import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder import typo.dsl.SelectBuilderSql @@ -236,4 +238,108 @@ class ProductRepoImpl extends ProductRepo { .executeInsert(ProductRow.rowParser(1).single) } + override def upsertBatch(unsaved: Iterable[ProductRow])(implicit c: Connection): List[ProductRow] = { + def toNamedParameter(row: ProductRow): List[NamedParameter] = List( + NamedParameter("productid", ParameterValue(row.productid, null, ProductId.toStatement)), + NamedParameter("name", ParameterValue(row.name, null, Name.toStatement)), + NamedParameter("productnumber", ParameterValue(row.productnumber, null, ToStatement.stringToStatement)), + NamedParameter("makeflag", ParameterValue(row.makeflag, null, Flag.toStatement)), + NamedParameter("finishedgoodsflag", ParameterValue(row.finishedgoodsflag, null, Flag.toStatement)), + NamedParameter("color", ParameterValue(row.color, null, ToStatement.optionToStatement(ToStatement.stringToStatement, ParameterMetaData.StringParameterMetaData))), + NamedParameter("safetystocklevel", ParameterValue(row.safetystocklevel, null, TypoShort.toStatement)), + NamedParameter("reorderpoint", ParameterValue(row.reorderpoint, null, TypoShort.toStatement)), + NamedParameter("standardcost", ParameterValue(row.standardcost, null, ToStatement.scalaBigDecimalToStatement)), + NamedParameter("listprice", ParameterValue(row.listprice, null, ToStatement.scalaBigDecimalToStatement)), + NamedParameter("size", ParameterValue(row.size, null, ToStatement.optionToStatement(ToStatement.stringToStatement, ParameterMetaData.StringParameterMetaData))), + NamedParameter("sizeunitmeasurecode", ParameterValue(row.sizeunitmeasurecode, null, ToStatement.optionToStatement(UnitmeasureId.toStatement, UnitmeasureId.parameterMetadata))), + NamedParameter("weightunitmeasurecode", ParameterValue(row.weightunitmeasurecode, null, ToStatement.optionToStatement(UnitmeasureId.toStatement, UnitmeasureId.parameterMetadata))), + NamedParameter("weight", ParameterValue(row.weight, null, ToStatement.optionToStatement(ToStatement.scalaBigDecimalToStatement, ParameterMetaData.BigDecimalParameterMetaData))), + NamedParameter("daystomanufacture", ParameterValue(row.daystomanufacture, null, ToStatement.intToStatement)), + NamedParameter("productline", ParameterValue(row.productline, null, ToStatement.optionToStatement(ToStatement.stringToStatement, ParameterMetaData.StringParameterMetaData))), + NamedParameter("class", ParameterValue(row.`class`, null, ToStatement.optionToStatement(ToStatement.stringToStatement, ParameterMetaData.StringParameterMetaData))), + NamedParameter("style", ParameterValue(row.style, null, ToStatement.optionToStatement(ToStatement.stringToStatement, ParameterMetaData.StringParameterMetaData))), + NamedParameter("productsubcategoryid", ParameterValue(row.productsubcategoryid, null, ToStatement.optionToStatement(ProductsubcategoryId.toStatement, ProductsubcategoryId.parameterMetadata))), + NamedParameter("productmodelid", ParameterValue(row.productmodelid, null, ToStatement.optionToStatement(ProductmodelId.toStatement, ProductmodelId.parameterMetadata))), + NamedParameter("sellstartdate", ParameterValue(row.sellstartdate, null, TypoLocalDateTime.toStatement)), + NamedParameter("sellenddate", ParameterValue(row.sellenddate, null, ToStatement.optionToStatement(TypoLocalDateTime.toStatement, TypoLocalDateTime.parameterMetadata))), + NamedParameter("discontinueddate", ParameterValue(row.discontinueddate, null, ToStatement.optionToStatement(TypoLocalDateTime.toStatement, TypoLocalDateTime.parameterMetadata))), + NamedParameter("rowguid", ParameterValue(row.rowguid, null, TypoUUID.toStatement)), + NamedParameter("modifieddate", ParameterValue(row.modifieddate, null, TypoLocalDateTime.toStatement)) + ) + unsaved.toList match { + case Nil => Nil + case head :: rest => + new anorm.adventureworks.ExecuteReturningSyntax.Ops( + BatchSql( + s"""insert into production.product("productid", "name", "productnumber", "makeflag", "finishedgoodsflag", "color", "safetystocklevel", "reorderpoint", "standardcost", "listprice", "size", "sizeunitmeasurecode", "weightunitmeasurecode", "weight", "daystomanufacture", "productline", "class", "style", "productsubcategoryid", "productmodelid", "sellstartdate", "sellenddate", "discontinueddate", "rowguid", "modifieddate") + values ({productid}::int4, {name}::varchar, {productnumber}, {makeflag}::bool, {finishedgoodsflag}::bool, {color}, {safetystocklevel}::int2, {reorderpoint}::int2, {standardcost}::numeric, {listprice}::numeric, {size}, {sizeunitmeasurecode}::bpchar, {weightunitmeasurecode}::bpchar, {weight}::numeric, {daystomanufacture}::int4, {productline}::bpchar, {class}::bpchar, {style}::bpchar, {productsubcategoryid}::int4, {productmodelid}::int4, {sellstartdate}::timestamp, {sellenddate}::timestamp, {discontinueddate}::timestamp, {rowguid}::uuid, {modifieddate}::timestamp) + on conflict ("productid") + do update set + "name" = EXCLUDED."name", + "productnumber" = EXCLUDED."productnumber", + "makeflag" = EXCLUDED."makeflag", + "finishedgoodsflag" = EXCLUDED."finishedgoodsflag", + "color" = EXCLUDED."color", + "safetystocklevel" = EXCLUDED."safetystocklevel", + "reorderpoint" = EXCLUDED."reorderpoint", + "standardcost" = EXCLUDED."standardcost", + "listprice" = EXCLUDED."listprice", + "size" = EXCLUDED."size", + "sizeunitmeasurecode" = EXCLUDED."sizeunitmeasurecode", + "weightunitmeasurecode" = EXCLUDED."weightunitmeasurecode", + "weight" = EXCLUDED."weight", + "daystomanufacture" = EXCLUDED."daystomanufacture", + "productline" = EXCLUDED."productline", + "class" = EXCLUDED."class", + "style" = EXCLUDED."style", + "productsubcategoryid" = EXCLUDED."productsubcategoryid", + "productmodelid" = EXCLUDED."productmodelid", + "sellstartdate" = EXCLUDED."sellstartdate", + "sellenddate" = EXCLUDED."sellenddate", + "discontinueddate" = EXCLUDED."discontinueddate", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + returning "productid", "name", "productnumber", "makeflag", "finishedgoodsflag", "color", "safetystocklevel", "reorderpoint", "standardcost", "listprice", "size", "sizeunitmeasurecode", "weightunitmeasurecode", "weight", "daystomanufacture", "productline", "class", "style", "productsubcategoryid", "productmodelid", "sellstartdate"::text, "sellenddate"::text, "discontinueddate"::text, "rowguid", "modifieddate"::text + """, + toNamedParameter(head), + rest.map(toNamedParameter)* + ) + ).executeReturning(ProductRow.rowParser(1).*) + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[ProductRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + SQL"create temporary table product_TEMP (like production.product) on commit drop".execute(): @nowarn + streamingInsert(s"""copy product_TEMP("productid", "name", "productnumber", "makeflag", "finishedgoodsflag", "color", "safetystocklevel", "reorderpoint", "standardcost", "listprice", "size", "sizeunitmeasurecode", "weightunitmeasurecode", "weight", "daystomanufacture", "productline", "class", "style", "productsubcategoryid", "productmodelid", "sellstartdate", "sellenddate", "discontinueddate", "rowguid", "modifieddate") from stdin""", batchSize, unsaved)(ProductRow.text, c): @nowarn + SQL"""insert into production.product("productid", "name", "productnumber", "makeflag", "finishedgoodsflag", "color", "safetystocklevel", "reorderpoint", "standardcost", "listprice", "size", "sizeunitmeasurecode", "weightunitmeasurecode", "weight", "daystomanufacture", "productline", "class", "style", "productsubcategoryid", "productmodelid", "sellstartdate", "sellenddate", "discontinueddate", "rowguid", "modifieddate") + select * from product_TEMP + on conflict ("productid") + do update set + "name" = EXCLUDED."name", + "productnumber" = EXCLUDED."productnumber", + "makeflag" = EXCLUDED."makeflag", + "finishedgoodsflag" = EXCLUDED."finishedgoodsflag", + "color" = EXCLUDED."color", + "safetystocklevel" = EXCLUDED."safetystocklevel", + "reorderpoint" = EXCLUDED."reorderpoint", + "standardcost" = EXCLUDED."standardcost", + "listprice" = EXCLUDED."listprice", + "size" = EXCLUDED."size", + "sizeunitmeasurecode" = EXCLUDED."sizeunitmeasurecode", + "weightunitmeasurecode" = EXCLUDED."weightunitmeasurecode", + "weight" = EXCLUDED."weight", + "daystomanufacture" = EXCLUDED."daystomanufacture", + "productline" = EXCLUDED."productline", + "class" = EXCLUDED."class", + "style" = EXCLUDED."style", + "productsubcategoryid" = EXCLUDED."productsubcategoryid", + "productmodelid" = EXCLUDED."productmodelid", + "sellstartdate" = EXCLUDED."sellstartdate", + "sellenddate" = EXCLUDED."sellenddate", + "discontinueddate" = EXCLUDED."discontinueddate", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table product_TEMP;""".executeUpdate() + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/product/ProductRepoMock.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/product/ProductRepoMock.scala index dd88783d4..b2c06c5cb 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/product/ProductRepoMock.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/product/ProductRepoMock.scala @@ -87,4 +87,17 @@ class ProductRepoMock(toRow: Function1[ProductRowUnsaved, ProductRow], map.put(unsaved.productid, unsaved): @nowarn unsaved } + override def upsertBatch(unsaved: Iterable[ProductRow])(implicit c: Connection): List[ProductRow] = { + unsaved.map { row => + map += (row.productid -> row) + row + }.toList + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[ProductRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + unsaved.foreach { row => + map += (row.productid -> row) + } + unsaved.size + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productcategory/ProductcategoryRepo.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productcategory/ProductcategoryRepo.scala index 3715886c0..b9ec06ec3 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productcategory/ProductcategoryRepo.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productcategory/ProductcategoryRepo.scala @@ -29,4 +29,7 @@ trait ProductcategoryRepo { def update: UpdateBuilder[ProductcategoryFields, ProductcategoryRow] def update(row: ProductcategoryRow)(implicit c: Connection): Boolean def upsert(unsaved: ProductcategoryRow)(implicit c: Connection): ProductcategoryRow + def upsertBatch(unsaved: Iterable[ProductcategoryRow])(implicit c: Connection): List[ProductcategoryRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Iterator[ProductcategoryRow], batchSize: Int = 10000)(implicit c: Connection): Int } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productcategory/ProductcategoryRepoImpl.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productcategory/ProductcategoryRepoImpl.scala index 95c88ef90..8a05b1550 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productcategory/ProductcategoryRepoImpl.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productcategory/ProductcategoryRepoImpl.scala @@ -11,6 +11,7 @@ import adventureworks.customtypes.Defaulted import adventureworks.customtypes.TypoLocalDateTime import adventureworks.customtypes.TypoUUID import adventureworks.public.Name +import anorm.BatchSql import anorm.NamedParameter import anorm.ParameterValue import anorm.RowParser @@ -18,6 +19,7 @@ import anorm.SQL import anorm.SimpleSql import anorm.SqlStringInterpolation import java.sql.Connection +import scala.annotation.nowarn import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder import typo.dsl.SelectBuilderSql @@ -139,4 +141,45 @@ class ProductcategoryRepoImpl extends ProductcategoryRepo { .executeInsert(ProductcategoryRow.rowParser(1).single) } + override def upsertBatch(unsaved: Iterable[ProductcategoryRow])(implicit c: Connection): List[ProductcategoryRow] = { + def toNamedParameter(row: ProductcategoryRow): List[NamedParameter] = List( + NamedParameter("productcategoryid", ParameterValue(row.productcategoryid, null, ProductcategoryId.toStatement)), + NamedParameter("name", ParameterValue(row.name, null, Name.toStatement)), + NamedParameter("rowguid", ParameterValue(row.rowguid, null, TypoUUID.toStatement)), + NamedParameter("modifieddate", ParameterValue(row.modifieddate, null, TypoLocalDateTime.toStatement)) + ) + unsaved.toList match { + case Nil => Nil + case head :: rest => + new anorm.adventureworks.ExecuteReturningSyntax.Ops( + BatchSql( + s"""insert into production.productcategory("productcategoryid", "name", "rowguid", "modifieddate") + values ({productcategoryid}::int4, {name}::varchar, {rowguid}::uuid, {modifieddate}::timestamp) + on conflict ("productcategoryid") + do update set + "name" = EXCLUDED."name", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + returning "productcategoryid", "name", "rowguid", "modifieddate"::text + """, + toNamedParameter(head), + rest.map(toNamedParameter)* + ) + ).executeReturning(ProductcategoryRow.rowParser(1).*) + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[ProductcategoryRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + SQL"create temporary table productcategory_TEMP (like production.productcategory) on commit drop".execute(): @nowarn + streamingInsert(s"""copy productcategory_TEMP("productcategoryid", "name", "rowguid", "modifieddate") from stdin""", batchSize, unsaved)(ProductcategoryRow.text, c): @nowarn + SQL"""insert into production.productcategory("productcategoryid", "name", "rowguid", "modifieddate") + select * from productcategory_TEMP + on conflict ("productcategoryid") + do update set + "name" = EXCLUDED."name", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table productcategory_TEMP;""".executeUpdate() + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productcategory/ProductcategoryRepoMock.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productcategory/ProductcategoryRepoMock.scala index 98bf69181..6b343b445 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productcategory/ProductcategoryRepoMock.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productcategory/ProductcategoryRepoMock.scala @@ -87,4 +87,17 @@ class ProductcategoryRepoMock(toRow: Function1[ProductcategoryRowUnsaved, Produc map.put(unsaved.productcategoryid, unsaved): @nowarn unsaved } + override def upsertBatch(unsaved: Iterable[ProductcategoryRow])(implicit c: Connection): List[ProductcategoryRow] = { + unsaved.map { row => + map += (row.productcategoryid -> row) + row + }.toList + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[ProductcategoryRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + unsaved.foreach { row => + map += (row.productcategoryid -> row) + } + unsaved.size + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productcosthistory/ProductcosthistoryRepo.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productcosthistory/ProductcosthistoryRepo.scala index b1a1d6a0f..96d1b517e 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productcosthistory/ProductcosthistoryRepo.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productcosthistory/ProductcosthistoryRepo.scala @@ -29,4 +29,7 @@ trait ProductcosthistoryRepo { def update: UpdateBuilder[ProductcosthistoryFields, ProductcosthistoryRow] def update(row: ProductcosthistoryRow)(implicit c: Connection): Boolean def upsert(unsaved: ProductcosthistoryRow)(implicit c: Connection): ProductcosthistoryRow + def upsertBatch(unsaved: Iterable[ProductcosthistoryRow])(implicit c: Connection): List[ProductcosthistoryRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Iterator[ProductcosthistoryRow], batchSize: Int = 10000)(implicit c: Connection): Int } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productcosthistory/ProductcosthistoryRepoImpl.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productcosthistory/ProductcosthistoryRepoImpl.scala index 3d06d52ce..29759da23 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productcosthistory/ProductcosthistoryRepoImpl.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productcosthistory/ProductcosthistoryRepoImpl.scala @@ -10,6 +10,7 @@ package productcosthistory import adventureworks.customtypes.Defaulted import adventureworks.customtypes.TypoLocalDateTime import adventureworks.production.product.ProductId +import anorm.BatchSql import anorm.NamedParameter import anorm.ParameterValue import anorm.RowParser @@ -18,6 +19,7 @@ import anorm.SimpleSql import anorm.SqlStringInterpolation import anorm.ToStatement import java.sql.Connection +import scala.annotation.nowarn import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder import typo.dsl.SelectBuilderSql @@ -141,4 +143,46 @@ class ProductcosthistoryRepoImpl extends ProductcosthistoryRepo { .executeInsert(ProductcosthistoryRow.rowParser(1).single) } + override def upsertBatch(unsaved: Iterable[ProductcosthistoryRow])(implicit c: Connection): List[ProductcosthistoryRow] = { + def toNamedParameter(row: ProductcosthistoryRow): List[NamedParameter] = List( + NamedParameter("productid", ParameterValue(row.productid, null, ProductId.toStatement)), + NamedParameter("startdate", ParameterValue(row.startdate, null, TypoLocalDateTime.toStatement)), + NamedParameter("enddate", ParameterValue(row.enddate, null, ToStatement.optionToStatement(TypoLocalDateTime.toStatement, TypoLocalDateTime.parameterMetadata))), + NamedParameter("standardcost", ParameterValue(row.standardcost, null, ToStatement.scalaBigDecimalToStatement)), + NamedParameter("modifieddate", ParameterValue(row.modifieddate, null, TypoLocalDateTime.toStatement)) + ) + unsaved.toList match { + case Nil => Nil + case head :: rest => + new anorm.adventureworks.ExecuteReturningSyntax.Ops( + BatchSql( + s"""insert into production.productcosthistory("productid", "startdate", "enddate", "standardcost", "modifieddate") + values ({productid}::int4, {startdate}::timestamp, {enddate}::timestamp, {standardcost}::numeric, {modifieddate}::timestamp) + on conflict ("productid", "startdate") + do update set + "enddate" = EXCLUDED."enddate", + "standardcost" = EXCLUDED."standardcost", + "modifieddate" = EXCLUDED."modifieddate" + returning "productid", "startdate"::text, "enddate"::text, "standardcost", "modifieddate"::text + """, + toNamedParameter(head), + rest.map(toNamedParameter)* + ) + ).executeReturning(ProductcosthistoryRow.rowParser(1).*) + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[ProductcosthistoryRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + SQL"create temporary table productcosthistory_TEMP (like production.productcosthistory) on commit drop".execute(): @nowarn + streamingInsert(s"""copy productcosthistory_TEMP("productid", "startdate", "enddate", "standardcost", "modifieddate") from stdin""", batchSize, unsaved)(ProductcosthistoryRow.text, c): @nowarn + SQL"""insert into production.productcosthistory("productid", "startdate", "enddate", "standardcost", "modifieddate") + select * from productcosthistory_TEMP + on conflict ("productid", "startdate") + do update set + "enddate" = EXCLUDED."enddate", + "standardcost" = EXCLUDED."standardcost", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table productcosthistory_TEMP;""".executeUpdate() + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productcosthistory/ProductcosthistoryRepoMock.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productcosthistory/ProductcosthistoryRepoMock.scala index 4f92133d1..36a348be4 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productcosthistory/ProductcosthistoryRepoMock.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productcosthistory/ProductcosthistoryRepoMock.scala @@ -87,4 +87,17 @@ class ProductcosthistoryRepoMock(toRow: Function1[ProductcosthistoryRowUnsaved, map.put(unsaved.compositeId, unsaved): @nowarn unsaved } + override def upsertBatch(unsaved: Iterable[ProductcosthistoryRow])(implicit c: Connection): List[ProductcosthistoryRow] = { + unsaved.map { row => + map += (row.compositeId -> row) + row + }.toList + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[ProductcosthistoryRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + unsaved.foreach { row => + map += (row.compositeId -> row) + } + unsaved.size + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productdescription/ProductdescriptionRepo.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productdescription/ProductdescriptionRepo.scala index 56c66e170..9cb988c8b 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productdescription/ProductdescriptionRepo.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productdescription/ProductdescriptionRepo.scala @@ -29,4 +29,7 @@ trait ProductdescriptionRepo { def update: UpdateBuilder[ProductdescriptionFields, ProductdescriptionRow] def update(row: ProductdescriptionRow)(implicit c: Connection): Boolean def upsert(unsaved: ProductdescriptionRow)(implicit c: Connection): ProductdescriptionRow + def upsertBatch(unsaved: Iterable[ProductdescriptionRow])(implicit c: Connection): List[ProductdescriptionRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Iterator[ProductdescriptionRow], batchSize: Int = 10000)(implicit c: Connection): Int } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productdescription/ProductdescriptionRepoImpl.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productdescription/ProductdescriptionRepoImpl.scala index 42190ac7a..a4b595c15 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productdescription/ProductdescriptionRepoImpl.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productdescription/ProductdescriptionRepoImpl.scala @@ -10,6 +10,7 @@ package productdescription import adventureworks.customtypes.Defaulted import adventureworks.customtypes.TypoLocalDateTime import adventureworks.customtypes.TypoUUID +import anorm.BatchSql import anorm.NamedParameter import anorm.ParameterValue import anorm.RowParser @@ -18,6 +19,7 @@ import anorm.SimpleSql import anorm.SqlStringInterpolation import anorm.ToStatement import java.sql.Connection +import scala.annotation.nowarn import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder import typo.dsl.SelectBuilderSql @@ -139,4 +141,45 @@ class ProductdescriptionRepoImpl extends ProductdescriptionRepo { .executeInsert(ProductdescriptionRow.rowParser(1).single) } + override def upsertBatch(unsaved: Iterable[ProductdescriptionRow])(implicit c: Connection): List[ProductdescriptionRow] = { + def toNamedParameter(row: ProductdescriptionRow): List[NamedParameter] = List( + NamedParameter("productdescriptionid", ParameterValue(row.productdescriptionid, null, ProductdescriptionId.toStatement)), + NamedParameter("description", ParameterValue(row.description, null, ToStatement.stringToStatement)), + NamedParameter("rowguid", ParameterValue(row.rowguid, null, TypoUUID.toStatement)), + NamedParameter("modifieddate", ParameterValue(row.modifieddate, null, TypoLocalDateTime.toStatement)) + ) + unsaved.toList match { + case Nil => Nil + case head :: rest => + new anorm.adventureworks.ExecuteReturningSyntax.Ops( + BatchSql( + s"""insert into production.productdescription("productdescriptionid", "description", "rowguid", "modifieddate") + values ({productdescriptionid}::int4, {description}, {rowguid}::uuid, {modifieddate}::timestamp) + on conflict ("productdescriptionid") + do update set + "description" = EXCLUDED."description", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + returning "productdescriptionid", "description", "rowguid", "modifieddate"::text + """, + toNamedParameter(head), + rest.map(toNamedParameter)* + ) + ).executeReturning(ProductdescriptionRow.rowParser(1).*) + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[ProductdescriptionRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + SQL"create temporary table productdescription_TEMP (like production.productdescription) on commit drop".execute(): @nowarn + streamingInsert(s"""copy productdescription_TEMP("productdescriptionid", "description", "rowguid", "modifieddate") from stdin""", batchSize, unsaved)(ProductdescriptionRow.text, c): @nowarn + SQL"""insert into production.productdescription("productdescriptionid", "description", "rowguid", "modifieddate") + select * from productdescription_TEMP + on conflict ("productdescriptionid") + do update set + "description" = EXCLUDED."description", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table productdescription_TEMP;""".executeUpdate() + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productdescription/ProductdescriptionRepoMock.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productdescription/ProductdescriptionRepoMock.scala index 80c10400c..cc7cd1ad0 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productdescription/ProductdescriptionRepoMock.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productdescription/ProductdescriptionRepoMock.scala @@ -87,4 +87,17 @@ class ProductdescriptionRepoMock(toRow: Function1[ProductdescriptionRowUnsaved, map.put(unsaved.productdescriptionid, unsaved): @nowarn unsaved } + override def upsertBatch(unsaved: Iterable[ProductdescriptionRow])(implicit c: Connection): List[ProductdescriptionRow] = { + unsaved.map { row => + map += (row.productdescriptionid -> row) + row + }.toList + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[ProductdescriptionRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + unsaved.foreach { row => + map += (row.productdescriptionid -> row) + } + unsaved.size + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productdocument/ProductdocumentRepo.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productdocument/ProductdocumentRepo.scala index 2a78e5277..198e291d8 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productdocument/ProductdocumentRepo.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productdocument/ProductdocumentRepo.scala @@ -29,4 +29,7 @@ trait ProductdocumentRepo { def update: UpdateBuilder[ProductdocumentFields, ProductdocumentRow] def update(row: ProductdocumentRow)(implicit c: Connection): Boolean def upsert(unsaved: ProductdocumentRow)(implicit c: Connection): ProductdocumentRow + def upsertBatch(unsaved: Iterable[ProductdocumentRow])(implicit c: Connection): List[ProductdocumentRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Iterator[ProductdocumentRow], batchSize: Int = 10000)(implicit c: Connection): Int } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productdocument/ProductdocumentRepoImpl.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productdocument/ProductdocumentRepoImpl.scala index 28a794ad1..f3c228a8a 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productdocument/ProductdocumentRepoImpl.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productdocument/ProductdocumentRepoImpl.scala @@ -11,6 +11,7 @@ import adventureworks.customtypes.Defaulted import adventureworks.customtypes.TypoLocalDateTime import adventureworks.production.document.DocumentId import adventureworks.production.product.ProductId +import anorm.BatchSql import anorm.NamedParameter import anorm.ParameterValue import anorm.RowParser @@ -18,6 +19,7 @@ import anorm.SQL import anorm.SimpleSql import anorm.SqlStringInterpolation import java.sql.Connection +import scala.annotation.nowarn import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder import typo.dsl.SelectBuilderSql @@ -136,4 +138,40 @@ class ProductdocumentRepoImpl extends ProductdocumentRepo { .executeInsert(ProductdocumentRow.rowParser(1).single) } + override def upsertBatch(unsaved: Iterable[ProductdocumentRow])(implicit c: Connection): List[ProductdocumentRow] = { + def toNamedParameter(row: ProductdocumentRow): List[NamedParameter] = List( + NamedParameter("productid", ParameterValue(row.productid, null, ProductId.toStatement)), + NamedParameter("modifieddate", ParameterValue(row.modifieddate, null, TypoLocalDateTime.toStatement)), + NamedParameter("documentnode", ParameterValue(row.documentnode, null, DocumentId.toStatement)) + ) + unsaved.toList match { + case Nil => Nil + case head :: rest => + new anorm.adventureworks.ExecuteReturningSyntax.Ops( + BatchSql( + s"""insert into production.productdocument("productid", "modifieddate", "documentnode") + values ({productid}::int4, {modifieddate}::timestamp, {documentnode}) + on conflict ("productid", "documentnode") + do update set + "modifieddate" = EXCLUDED."modifieddate" + returning "productid", "modifieddate"::text, "documentnode" + """, + toNamedParameter(head), + rest.map(toNamedParameter)* + ) + ).executeReturning(ProductdocumentRow.rowParser(1).*) + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[ProductdocumentRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + SQL"create temporary table productdocument_TEMP (like production.productdocument) on commit drop".execute(): @nowarn + streamingInsert(s"""copy productdocument_TEMP("productid", "modifieddate", "documentnode") from stdin""", batchSize, unsaved)(ProductdocumentRow.text, c): @nowarn + SQL"""insert into production.productdocument("productid", "modifieddate", "documentnode") + select * from productdocument_TEMP + on conflict ("productid", "documentnode") + do update set + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table productdocument_TEMP;""".executeUpdate() + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productdocument/ProductdocumentRepoMock.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productdocument/ProductdocumentRepoMock.scala index d7b8cd12b..08229c907 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productdocument/ProductdocumentRepoMock.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productdocument/ProductdocumentRepoMock.scala @@ -87,4 +87,17 @@ class ProductdocumentRepoMock(toRow: Function1[ProductdocumentRowUnsaved, Produc map.put(unsaved.compositeId, unsaved): @nowarn unsaved } + override def upsertBatch(unsaved: Iterable[ProductdocumentRow])(implicit c: Connection): List[ProductdocumentRow] = { + unsaved.map { row => + map += (row.compositeId -> row) + row + }.toList + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[ProductdocumentRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + unsaved.foreach { row => + map += (row.compositeId -> row) + } + unsaved.size + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productinventory/ProductinventoryRepo.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productinventory/ProductinventoryRepo.scala index ed3a87eb9..32b1602db 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productinventory/ProductinventoryRepo.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productinventory/ProductinventoryRepo.scala @@ -29,4 +29,7 @@ trait ProductinventoryRepo { def update: UpdateBuilder[ProductinventoryFields, ProductinventoryRow] def update(row: ProductinventoryRow)(implicit c: Connection): Boolean def upsert(unsaved: ProductinventoryRow)(implicit c: Connection): ProductinventoryRow + def upsertBatch(unsaved: Iterable[ProductinventoryRow])(implicit c: Connection): List[ProductinventoryRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Iterator[ProductinventoryRow], batchSize: Int = 10000)(implicit c: Connection): Int } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productinventory/ProductinventoryRepoImpl.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productinventory/ProductinventoryRepoImpl.scala index fd0dc5d4e..bc6576b60 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productinventory/ProductinventoryRepoImpl.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productinventory/ProductinventoryRepoImpl.scala @@ -13,6 +13,7 @@ import adventureworks.customtypes.TypoShort import adventureworks.customtypes.TypoUUID import adventureworks.production.location.LocationId import adventureworks.production.product.ProductId +import anorm.BatchSql import anorm.NamedParameter import anorm.ParameterValue import anorm.RowParser @@ -21,6 +22,7 @@ import anorm.SimpleSql import anorm.SqlStringInterpolation import anorm.ToStatement import java.sql.Connection +import scala.annotation.nowarn import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder import typo.dsl.SelectBuilderSql @@ -158,4 +160,52 @@ class ProductinventoryRepoImpl extends ProductinventoryRepo { .executeInsert(ProductinventoryRow.rowParser(1).single) } + override def upsertBatch(unsaved: Iterable[ProductinventoryRow])(implicit c: Connection): List[ProductinventoryRow] = { + def toNamedParameter(row: ProductinventoryRow): List[NamedParameter] = List( + NamedParameter("productid", ParameterValue(row.productid, null, ProductId.toStatement)), + NamedParameter("locationid", ParameterValue(row.locationid, null, LocationId.toStatement)), + NamedParameter("shelf", ParameterValue(row.shelf, null, ToStatement.stringToStatement)), + NamedParameter("bin", ParameterValue(row.bin, null, TypoShort.toStatement)), + NamedParameter("quantity", ParameterValue(row.quantity, null, TypoShort.toStatement)), + NamedParameter("rowguid", ParameterValue(row.rowguid, null, TypoUUID.toStatement)), + NamedParameter("modifieddate", ParameterValue(row.modifieddate, null, TypoLocalDateTime.toStatement)) + ) + unsaved.toList match { + case Nil => Nil + case head :: rest => + new anorm.adventureworks.ExecuteReturningSyntax.Ops( + BatchSql( + s"""insert into production.productinventory("productid", "locationid", "shelf", "bin", "quantity", "rowguid", "modifieddate") + values ({productid}::int4, {locationid}::int2, {shelf}, {bin}::int2, {quantity}::int2, {rowguid}::uuid, {modifieddate}::timestamp) + on conflict ("productid", "locationid") + do update set + "shelf" = EXCLUDED."shelf", + "bin" = EXCLUDED."bin", + "quantity" = EXCLUDED."quantity", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + returning "productid", "locationid", "shelf", "bin", "quantity", "rowguid", "modifieddate"::text + """, + toNamedParameter(head), + rest.map(toNamedParameter)* + ) + ).executeReturning(ProductinventoryRow.rowParser(1).*) + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[ProductinventoryRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + SQL"create temporary table productinventory_TEMP (like production.productinventory) on commit drop".execute(): @nowarn + streamingInsert(s"""copy productinventory_TEMP("productid", "locationid", "shelf", "bin", "quantity", "rowguid", "modifieddate") from stdin""", batchSize, unsaved)(ProductinventoryRow.text, c): @nowarn + SQL"""insert into production.productinventory("productid", "locationid", "shelf", "bin", "quantity", "rowguid", "modifieddate") + select * from productinventory_TEMP + on conflict ("productid", "locationid") + do update set + "shelf" = EXCLUDED."shelf", + "bin" = EXCLUDED."bin", + "quantity" = EXCLUDED."quantity", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table productinventory_TEMP;""".executeUpdate() + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productinventory/ProductinventoryRepoMock.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productinventory/ProductinventoryRepoMock.scala index 0039022c3..75c940d7a 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productinventory/ProductinventoryRepoMock.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productinventory/ProductinventoryRepoMock.scala @@ -87,4 +87,17 @@ class ProductinventoryRepoMock(toRow: Function1[ProductinventoryRowUnsaved, Prod map.put(unsaved.compositeId, unsaved): @nowarn unsaved } + override def upsertBatch(unsaved: Iterable[ProductinventoryRow])(implicit c: Connection): List[ProductinventoryRow] = { + unsaved.map { row => + map += (row.compositeId -> row) + row + }.toList + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[ProductinventoryRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + unsaved.foreach { row => + map += (row.compositeId -> row) + } + unsaved.size + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productlistpricehistory/ProductlistpricehistoryRepo.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productlistpricehistory/ProductlistpricehistoryRepo.scala index f2af79010..f7d65f998 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productlistpricehistory/ProductlistpricehistoryRepo.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productlistpricehistory/ProductlistpricehistoryRepo.scala @@ -29,4 +29,7 @@ trait ProductlistpricehistoryRepo { def update: UpdateBuilder[ProductlistpricehistoryFields, ProductlistpricehistoryRow] def update(row: ProductlistpricehistoryRow)(implicit c: Connection): Boolean def upsert(unsaved: ProductlistpricehistoryRow)(implicit c: Connection): ProductlistpricehistoryRow + def upsertBatch(unsaved: Iterable[ProductlistpricehistoryRow])(implicit c: Connection): List[ProductlistpricehistoryRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Iterator[ProductlistpricehistoryRow], batchSize: Int = 10000)(implicit c: Connection): Int } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productlistpricehistory/ProductlistpricehistoryRepoImpl.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productlistpricehistory/ProductlistpricehistoryRepoImpl.scala index 22559619f..0ae88601e 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productlistpricehistory/ProductlistpricehistoryRepoImpl.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productlistpricehistory/ProductlistpricehistoryRepoImpl.scala @@ -10,6 +10,7 @@ package productlistpricehistory import adventureworks.customtypes.Defaulted import adventureworks.customtypes.TypoLocalDateTime import adventureworks.production.product.ProductId +import anorm.BatchSql import anorm.NamedParameter import anorm.ParameterValue import anorm.RowParser @@ -18,6 +19,7 @@ import anorm.SimpleSql import anorm.SqlStringInterpolation import anorm.ToStatement import java.sql.Connection +import scala.annotation.nowarn import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder import typo.dsl.SelectBuilderSql @@ -141,4 +143,46 @@ class ProductlistpricehistoryRepoImpl extends ProductlistpricehistoryRepo { .executeInsert(ProductlistpricehistoryRow.rowParser(1).single) } + override def upsertBatch(unsaved: Iterable[ProductlistpricehistoryRow])(implicit c: Connection): List[ProductlistpricehistoryRow] = { + def toNamedParameter(row: ProductlistpricehistoryRow): List[NamedParameter] = List( + NamedParameter("productid", ParameterValue(row.productid, null, ProductId.toStatement)), + NamedParameter("startdate", ParameterValue(row.startdate, null, TypoLocalDateTime.toStatement)), + NamedParameter("enddate", ParameterValue(row.enddate, null, ToStatement.optionToStatement(TypoLocalDateTime.toStatement, TypoLocalDateTime.parameterMetadata))), + NamedParameter("listprice", ParameterValue(row.listprice, null, ToStatement.scalaBigDecimalToStatement)), + NamedParameter("modifieddate", ParameterValue(row.modifieddate, null, TypoLocalDateTime.toStatement)) + ) + unsaved.toList match { + case Nil => Nil + case head :: rest => + new anorm.adventureworks.ExecuteReturningSyntax.Ops( + BatchSql( + s"""insert into production.productlistpricehistory("productid", "startdate", "enddate", "listprice", "modifieddate") + values ({productid}::int4, {startdate}::timestamp, {enddate}::timestamp, {listprice}::numeric, {modifieddate}::timestamp) + on conflict ("productid", "startdate") + do update set + "enddate" = EXCLUDED."enddate", + "listprice" = EXCLUDED."listprice", + "modifieddate" = EXCLUDED."modifieddate" + returning "productid", "startdate"::text, "enddate"::text, "listprice", "modifieddate"::text + """, + toNamedParameter(head), + rest.map(toNamedParameter)* + ) + ).executeReturning(ProductlistpricehistoryRow.rowParser(1).*) + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[ProductlistpricehistoryRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + SQL"create temporary table productlistpricehistory_TEMP (like production.productlistpricehistory) on commit drop".execute(): @nowarn + streamingInsert(s"""copy productlistpricehistory_TEMP("productid", "startdate", "enddate", "listprice", "modifieddate") from stdin""", batchSize, unsaved)(ProductlistpricehistoryRow.text, c): @nowarn + SQL"""insert into production.productlistpricehistory("productid", "startdate", "enddate", "listprice", "modifieddate") + select * from productlistpricehistory_TEMP + on conflict ("productid", "startdate") + do update set + "enddate" = EXCLUDED."enddate", + "listprice" = EXCLUDED."listprice", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table productlistpricehistory_TEMP;""".executeUpdate() + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productlistpricehistory/ProductlistpricehistoryRepoMock.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productlistpricehistory/ProductlistpricehistoryRepoMock.scala index 36b13e37a..4e6472f29 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productlistpricehistory/ProductlistpricehistoryRepoMock.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productlistpricehistory/ProductlistpricehistoryRepoMock.scala @@ -87,4 +87,17 @@ class ProductlistpricehistoryRepoMock(toRow: Function1[ProductlistpricehistoryRo map.put(unsaved.compositeId, unsaved): @nowarn unsaved } + override def upsertBatch(unsaved: Iterable[ProductlistpricehistoryRow])(implicit c: Connection): List[ProductlistpricehistoryRow] = { + unsaved.map { row => + map += (row.compositeId -> row) + row + }.toList + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[ProductlistpricehistoryRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + unsaved.foreach { row => + map += (row.compositeId -> row) + } + unsaved.size + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productmodel/ProductmodelRepo.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productmodel/ProductmodelRepo.scala index 091ac06c9..97e574513 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productmodel/ProductmodelRepo.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productmodel/ProductmodelRepo.scala @@ -29,4 +29,7 @@ trait ProductmodelRepo { def update: UpdateBuilder[ProductmodelFields, ProductmodelRow] def update(row: ProductmodelRow)(implicit c: Connection): Boolean def upsert(unsaved: ProductmodelRow)(implicit c: Connection): ProductmodelRow + def upsertBatch(unsaved: Iterable[ProductmodelRow])(implicit c: Connection): List[ProductmodelRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Iterator[ProductmodelRow], batchSize: Int = 10000)(implicit c: Connection): Int } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productmodel/ProductmodelRepoImpl.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productmodel/ProductmodelRepoImpl.scala index 965bdf1e3..951891e45 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productmodel/ProductmodelRepoImpl.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productmodel/ProductmodelRepoImpl.scala @@ -12,6 +12,7 @@ import adventureworks.customtypes.TypoLocalDateTime import adventureworks.customtypes.TypoUUID import adventureworks.customtypes.TypoXml import adventureworks.public.Name +import anorm.BatchSql import anorm.NamedParameter import anorm.ParameterValue import anorm.RowParser @@ -20,6 +21,7 @@ import anorm.SimpleSql import anorm.SqlStringInterpolation import anorm.ToStatement import java.sql.Connection +import scala.annotation.nowarn import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder import typo.dsl.SelectBuilderSql @@ -149,4 +151,51 @@ class ProductmodelRepoImpl extends ProductmodelRepo { .executeInsert(ProductmodelRow.rowParser(1).single) } + override def upsertBatch(unsaved: Iterable[ProductmodelRow])(implicit c: Connection): List[ProductmodelRow] = { + def toNamedParameter(row: ProductmodelRow): List[NamedParameter] = List( + NamedParameter("productmodelid", ParameterValue(row.productmodelid, null, ProductmodelId.toStatement)), + NamedParameter("name", ParameterValue(row.name, null, Name.toStatement)), + NamedParameter("catalogdescription", ParameterValue(row.catalogdescription, null, ToStatement.optionToStatement(TypoXml.toStatement, TypoXml.parameterMetadata))), + NamedParameter("instructions", ParameterValue(row.instructions, null, ToStatement.optionToStatement(TypoXml.toStatement, TypoXml.parameterMetadata))), + NamedParameter("rowguid", ParameterValue(row.rowguid, null, TypoUUID.toStatement)), + NamedParameter("modifieddate", ParameterValue(row.modifieddate, null, TypoLocalDateTime.toStatement)) + ) + unsaved.toList match { + case Nil => Nil + case head :: rest => + new anorm.adventureworks.ExecuteReturningSyntax.Ops( + BatchSql( + s"""insert into production.productmodel("productmodelid", "name", "catalogdescription", "instructions", "rowguid", "modifieddate") + values ({productmodelid}::int4, {name}::varchar, {catalogdescription}::xml, {instructions}::xml, {rowguid}::uuid, {modifieddate}::timestamp) + on conflict ("productmodelid") + do update set + "name" = EXCLUDED."name", + "catalogdescription" = EXCLUDED."catalogdescription", + "instructions" = EXCLUDED."instructions", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + returning "productmodelid", "name", "catalogdescription", "instructions", "rowguid", "modifieddate"::text + """, + toNamedParameter(head), + rest.map(toNamedParameter)* + ) + ).executeReturning(ProductmodelRow.rowParser(1).*) + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[ProductmodelRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + SQL"create temporary table productmodel_TEMP (like production.productmodel) on commit drop".execute(): @nowarn + streamingInsert(s"""copy productmodel_TEMP("productmodelid", "name", "catalogdescription", "instructions", "rowguid", "modifieddate") from stdin""", batchSize, unsaved)(ProductmodelRow.text, c): @nowarn + SQL"""insert into production.productmodel("productmodelid", "name", "catalogdescription", "instructions", "rowguid", "modifieddate") + select * from productmodel_TEMP + on conflict ("productmodelid") + do update set + "name" = EXCLUDED."name", + "catalogdescription" = EXCLUDED."catalogdescription", + "instructions" = EXCLUDED."instructions", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table productmodel_TEMP;""".executeUpdate() + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productmodel/ProductmodelRepoMock.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productmodel/ProductmodelRepoMock.scala index 138aaf1bd..2ac224d6a 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productmodel/ProductmodelRepoMock.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productmodel/ProductmodelRepoMock.scala @@ -87,4 +87,17 @@ class ProductmodelRepoMock(toRow: Function1[ProductmodelRowUnsaved, Productmodel map.put(unsaved.productmodelid, unsaved): @nowarn unsaved } + override def upsertBatch(unsaved: Iterable[ProductmodelRow])(implicit c: Connection): List[ProductmodelRow] = { + unsaved.map { row => + map += (row.productmodelid -> row) + row + }.toList + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[ProductmodelRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + unsaved.foreach { row => + map += (row.productmodelid -> row) + } + unsaved.size + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productmodelillustration/ProductmodelillustrationRepo.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productmodelillustration/ProductmodelillustrationRepo.scala index 31b7f1969..b889f2672 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productmodelillustration/ProductmodelillustrationRepo.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productmodelillustration/ProductmodelillustrationRepo.scala @@ -29,4 +29,7 @@ trait ProductmodelillustrationRepo { def update: UpdateBuilder[ProductmodelillustrationFields, ProductmodelillustrationRow] def update(row: ProductmodelillustrationRow)(implicit c: Connection): Boolean def upsert(unsaved: ProductmodelillustrationRow)(implicit c: Connection): ProductmodelillustrationRow + def upsertBatch(unsaved: Iterable[ProductmodelillustrationRow])(implicit c: Connection): List[ProductmodelillustrationRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Iterator[ProductmodelillustrationRow], batchSize: Int = 10000)(implicit c: Connection): Int } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productmodelillustration/ProductmodelillustrationRepoImpl.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productmodelillustration/ProductmodelillustrationRepoImpl.scala index 41df2e3e8..294caa715 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productmodelillustration/ProductmodelillustrationRepoImpl.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productmodelillustration/ProductmodelillustrationRepoImpl.scala @@ -11,6 +11,7 @@ import adventureworks.customtypes.Defaulted import adventureworks.customtypes.TypoLocalDateTime import adventureworks.production.illustration.IllustrationId import adventureworks.production.productmodel.ProductmodelId +import anorm.BatchSql import anorm.NamedParameter import anorm.ParameterValue import anorm.RowParser @@ -18,6 +19,7 @@ import anorm.SQL import anorm.SimpleSql import anorm.SqlStringInterpolation import java.sql.Connection +import scala.annotation.nowarn import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder import typo.dsl.SelectBuilderSql @@ -133,4 +135,40 @@ class ProductmodelillustrationRepoImpl extends ProductmodelillustrationRepo { .executeInsert(ProductmodelillustrationRow.rowParser(1).single) } + override def upsertBatch(unsaved: Iterable[ProductmodelillustrationRow])(implicit c: Connection): List[ProductmodelillustrationRow] = { + def toNamedParameter(row: ProductmodelillustrationRow): List[NamedParameter] = List( + NamedParameter("productmodelid", ParameterValue(row.productmodelid, null, ProductmodelId.toStatement)), + NamedParameter("illustrationid", ParameterValue(row.illustrationid, null, IllustrationId.toStatement)), + NamedParameter("modifieddate", ParameterValue(row.modifieddate, null, TypoLocalDateTime.toStatement)) + ) + unsaved.toList match { + case Nil => Nil + case head :: rest => + new anorm.adventureworks.ExecuteReturningSyntax.Ops( + BatchSql( + s"""insert into production.productmodelillustration("productmodelid", "illustrationid", "modifieddate") + values ({productmodelid}::int4, {illustrationid}::int4, {modifieddate}::timestamp) + on conflict ("productmodelid", "illustrationid") + do update set + "modifieddate" = EXCLUDED."modifieddate" + returning "productmodelid", "illustrationid", "modifieddate"::text + """, + toNamedParameter(head), + rest.map(toNamedParameter)* + ) + ).executeReturning(ProductmodelillustrationRow.rowParser(1).*) + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[ProductmodelillustrationRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + SQL"create temporary table productmodelillustration_TEMP (like production.productmodelillustration) on commit drop".execute(): @nowarn + streamingInsert(s"""copy productmodelillustration_TEMP("productmodelid", "illustrationid", "modifieddate") from stdin""", batchSize, unsaved)(ProductmodelillustrationRow.text, c): @nowarn + SQL"""insert into production.productmodelillustration("productmodelid", "illustrationid", "modifieddate") + select * from productmodelillustration_TEMP + on conflict ("productmodelid", "illustrationid") + do update set + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table productmodelillustration_TEMP;""".executeUpdate() + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productmodelillustration/ProductmodelillustrationRepoMock.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productmodelillustration/ProductmodelillustrationRepoMock.scala index 903ada02a..ae362a4e9 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productmodelillustration/ProductmodelillustrationRepoMock.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productmodelillustration/ProductmodelillustrationRepoMock.scala @@ -87,4 +87,17 @@ class ProductmodelillustrationRepoMock(toRow: Function1[Productmodelillustration map.put(unsaved.compositeId, unsaved): @nowarn unsaved } + override def upsertBatch(unsaved: Iterable[ProductmodelillustrationRow])(implicit c: Connection): List[ProductmodelillustrationRow] = { + unsaved.map { row => + map += (row.compositeId -> row) + row + }.toList + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[ProductmodelillustrationRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + unsaved.foreach { row => + map += (row.compositeId -> row) + } + unsaved.size + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productmodelproductdescriptionculture/ProductmodelproductdescriptioncultureRepo.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productmodelproductdescriptionculture/ProductmodelproductdescriptioncultureRepo.scala index cbc87422c..a6e0f537f 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productmodelproductdescriptionculture/ProductmodelproductdescriptioncultureRepo.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productmodelproductdescriptionculture/ProductmodelproductdescriptioncultureRepo.scala @@ -29,4 +29,7 @@ trait ProductmodelproductdescriptioncultureRepo { def update: UpdateBuilder[ProductmodelproductdescriptioncultureFields, ProductmodelproductdescriptioncultureRow] def update(row: ProductmodelproductdescriptioncultureRow)(implicit c: Connection): Boolean def upsert(unsaved: ProductmodelproductdescriptioncultureRow)(implicit c: Connection): ProductmodelproductdescriptioncultureRow + def upsertBatch(unsaved: Iterable[ProductmodelproductdescriptioncultureRow])(implicit c: Connection): List[ProductmodelproductdescriptioncultureRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Iterator[ProductmodelproductdescriptioncultureRow], batchSize: Int = 10000)(implicit c: Connection): Int } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productmodelproductdescriptionculture/ProductmodelproductdescriptioncultureRepoImpl.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productmodelproductdescriptionculture/ProductmodelproductdescriptioncultureRepoImpl.scala index 366b82cff..ec82f9f96 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productmodelproductdescriptionculture/ProductmodelproductdescriptioncultureRepoImpl.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productmodelproductdescriptionculture/ProductmodelproductdescriptioncultureRepoImpl.scala @@ -12,6 +12,7 @@ import adventureworks.customtypes.TypoLocalDateTime import adventureworks.production.culture.CultureId import adventureworks.production.productdescription.ProductdescriptionId import adventureworks.production.productmodel.ProductmodelId +import anorm.BatchSql import anorm.NamedParameter import anorm.ParameterValue import anorm.RowParser @@ -19,6 +20,7 @@ import anorm.SQL import anorm.SimpleSql import anorm.SqlStringInterpolation import java.sql.Connection +import scala.annotation.nowarn import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder import typo.dsl.SelectBuilderSql @@ -138,4 +140,41 @@ class ProductmodelproductdescriptioncultureRepoImpl extends Productmodelproductd .executeInsert(ProductmodelproductdescriptioncultureRow.rowParser(1).single) } + override def upsertBatch(unsaved: Iterable[ProductmodelproductdescriptioncultureRow])(implicit c: Connection): List[ProductmodelproductdescriptioncultureRow] = { + def toNamedParameter(row: ProductmodelproductdescriptioncultureRow): List[NamedParameter] = List( + NamedParameter("productmodelid", ParameterValue(row.productmodelid, null, ProductmodelId.toStatement)), + NamedParameter("productdescriptionid", ParameterValue(row.productdescriptionid, null, ProductdescriptionId.toStatement)), + NamedParameter("cultureid", ParameterValue(row.cultureid, null, CultureId.toStatement)), + NamedParameter("modifieddate", ParameterValue(row.modifieddate, null, TypoLocalDateTime.toStatement)) + ) + unsaved.toList match { + case Nil => Nil + case head :: rest => + new anorm.adventureworks.ExecuteReturningSyntax.Ops( + BatchSql( + s"""insert into production.productmodelproductdescriptionculture("productmodelid", "productdescriptionid", "cultureid", "modifieddate") + values ({productmodelid}::int4, {productdescriptionid}::int4, {cultureid}::bpchar, {modifieddate}::timestamp) + on conflict ("productmodelid", "productdescriptionid", "cultureid") + do update set + "modifieddate" = EXCLUDED."modifieddate" + returning "productmodelid", "productdescriptionid", "cultureid", "modifieddate"::text + """, + toNamedParameter(head), + rest.map(toNamedParameter)* + ) + ).executeReturning(ProductmodelproductdescriptioncultureRow.rowParser(1).*) + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[ProductmodelproductdescriptioncultureRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + SQL"create temporary table productmodelproductdescriptionculture_TEMP (like production.productmodelproductdescriptionculture) on commit drop".execute(): @nowarn + streamingInsert(s"""copy productmodelproductdescriptionculture_TEMP("productmodelid", "productdescriptionid", "cultureid", "modifieddate") from stdin""", batchSize, unsaved)(ProductmodelproductdescriptioncultureRow.text, c): @nowarn + SQL"""insert into production.productmodelproductdescriptionculture("productmodelid", "productdescriptionid", "cultureid", "modifieddate") + select * from productmodelproductdescriptionculture_TEMP + on conflict ("productmodelid", "productdescriptionid", "cultureid") + do update set + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table productmodelproductdescriptionculture_TEMP;""".executeUpdate() + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productmodelproductdescriptionculture/ProductmodelproductdescriptioncultureRepoMock.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productmodelproductdescriptionculture/ProductmodelproductdescriptioncultureRepoMock.scala index be236f953..ecb8b8245 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productmodelproductdescriptionculture/ProductmodelproductdescriptioncultureRepoMock.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productmodelproductdescriptionculture/ProductmodelproductdescriptioncultureRepoMock.scala @@ -87,4 +87,17 @@ class ProductmodelproductdescriptioncultureRepoMock(toRow: Function1[Productmode map.put(unsaved.compositeId, unsaved): @nowarn unsaved } + override def upsertBatch(unsaved: Iterable[ProductmodelproductdescriptioncultureRow])(implicit c: Connection): List[ProductmodelproductdescriptioncultureRow] = { + unsaved.map { row => + map += (row.compositeId -> row) + row + }.toList + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[ProductmodelproductdescriptioncultureRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + unsaved.foreach { row => + map += (row.compositeId -> row) + } + unsaved.size + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productphoto/ProductphotoRepo.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productphoto/ProductphotoRepo.scala index e73a0c312..4b8824674 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productphoto/ProductphotoRepo.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productphoto/ProductphotoRepo.scala @@ -29,4 +29,7 @@ trait ProductphotoRepo { def update: UpdateBuilder[ProductphotoFields, ProductphotoRow] def update(row: ProductphotoRow)(implicit c: Connection): Boolean def upsert(unsaved: ProductphotoRow)(implicit c: Connection): ProductphotoRow + def upsertBatch(unsaved: Iterable[ProductphotoRow])(implicit c: Connection): List[ProductphotoRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Iterator[ProductphotoRow], batchSize: Int = 10000)(implicit c: Connection): Int } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productphoto/ProductphotoRepoImpl.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productphoto/ProductphotoRepoImpl.scala index c67fe329f..6ec60ea74 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productphoto/ProductphotoRepoImpl.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productphoto/ProductphotoRepoImpl.scala @@ -10,6 +10,7 @@ package productphoto import adventureworks.customtypes.Defaulted import adventureworks.customtypes.TypoBytea import adventureworks.customtypes.TypoLocalDateTime +import anorm.BatchSql import anorm.NamedParameter import anorm.ParameterMetaData import anorm.ParameterValue @@ -19,6 +20,7 @@ import anorm.SimpleSql import anorm.SqlStringInterpolation import anorm.ToStatement import java.sql.Connection +import scala.annotation.nowarn import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder import typo.dsl.SelectBuilderSql @@ -145,4 +147,51 @@ class ProductphotoRepoImpl extends ProductphotoRepo { .executeInsert(ProductphotoRow.rowParser(1).single) } + override def upsertBatch(unsaved: Iterable[ProductphotoRow])(implicit c: Connection): List[ProductphotoRow] = { + def toNamedParameter(row: ProductphotoRow): List[NamedParameter] = List( + NamedParameter("productphotoid", ParameterValue(row.productphotoid, null, ProductphotoId.toStatement)), + NamedParameter("thumbnailphoto", ParameterValue(row.thumbnailphoto, null, ToStatement.optionToStatement(TypoBytea.toStatement, TypoBytea.parameterMetadata))), + NamedParameter("thumbnailphotofilename", ParameterValue(row.thumbnailphotofilename, null, ToStatement.optionToStatement(ToStatement.stringToStatement, ParameterMetaData.StringParameterMetaData))), + NamedParameter("largephoto", ParameterValue(row.largephoto, null, ToStatement.optionToStatement(TypoBytea.toStatement, TypoBytea.parameterMetadata))), + NamedParameter("largephotofilename", ParameterValue(row.largephotofilename, null, ToStatement.optionToStatement(ToStatement.stringToStatement, ParameterMetaData.StringParameterMetaData))), + NamedParameter("modifieddate", ParameterValue(row.modifieddate, null, TypoLocalDateTime.toStatement)) + ) + unsaved.toList match { + case Nil => Nil + case head :: rest => + new anorm.adventureworks.ExecuteReturningSyntax.Ops( + BatchSql( + s"""insert into production.productphoto("productphotoid", "thumbnailphoto", "thumbnailphotofilename", "largephoto", "largephotofilename", "modifieddate") + values ({productphotoid}::int4, {thumbnailphoto}::bytea, {thumbnailphotofilename}, {largephoto}::bytea, {largephotofilename}, {modifieddate}::timestamp) + on conflict ("productphotoid") + do update set + "thumbnailphoto" = EXCLUDED."thumbnailphoto", + "thumbnailphotofilename" = EXCLUDED."thumbnailphotofilename", + "largephoto" = EXCLUDED."largephoto", + "largephotofilename" = EXCLUDED."largephotofilename", + "modifieddate" = EXCLUDED."modifieddate" + returning "productphotoid", "thumbnailphoto", "thumbnailphotofilename", "largephoto", "largephotofilename", "modifieddate"::text + """, + toNamedParameter(head), + rest.map(toNamedParameter)* + ) + ).executeReturning(ProductphotoRow.rowParser(1).*) + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[ProductphotoRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + SQL"create temporary table productphoto_TEMP (like production.productphoto) on commit drop".execute(): @nowarn + streamingInsert(s"""copy productphoto_TEMP("productphotoid", "thumbnailphoto", "thumbnailphotofilename", "largephoto", "largephotofilename", "modifieddate") from stdin""", batchSize, unsaved)(ProductphotoRow.text, c): @nowarn + SQL"""insert into production.productphoto("productphotoid", "thumbnailphoto", "thumbnailphotofilename", "largephoto", "largephotofilename", "modifieddate") + select * from productphoto_TEMP + on conflict ("productphotoid") + do update set + "thumbnailphoto" = EXCLUDED."thumbnailphoto", + "thumbnailphotofilename" = EXCLUDED."thumbnailphotofilename", + "largephoto" = EXCLUDED."largephoto", + "largephotofilename" = EXCLUDED."largephotofilename", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table productphoto_TEMP;""".executeUpdate() + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productphoto/ProductphotoRepoMock.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productphoto/ProductphotoRepoMock.scala index fa6298867..56b5cbfec 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productphoto/ProductphotoRepoMock.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productphoto/ProductphotoRepoMock.scala @@ -87,4 +87,17 @@ class ProductphotoRepoMock(toRow: Function1[ProductphotoRowUnsaved, Productphoto map.put(unsaved.productphotoid, unsaved): @nowarn unsaved } + override def upsertBatch(unsaved: Iterable[ProductphotoRow])(implicit c: Connection): List[ProductphotoRow] = { + unsaved.map { row => + map += (row.productphotoid -> row) + row + }.toList + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[ProductphotoRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + unsaved.foreach { row => + map += (row.productphotoid -> row) + } + unsaved.size + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productproductphoto/ProductproductphotoRepo.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productproductphoto/ProductproductphotoRepo.scala index 669286788..cd172131d 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productproductphoto/ProductproductphotoRepo.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productproductphoto/ProductproductphotoRepo.scala @@ -29,4 +29,7 @@ trait ProductproductphotoRepo { def update: UpdateBuilder[ProductproductphotoFields, ProductproductphotoRow] def update(row: ProductproductphotoRow)(implicit c: Connection): Boolean def upsert(unsaved: ProductproductphotoRow)(implicit c: Connection): ProductproductphotoRow + def upsertBatch(unsaved: Iterable[ProductproductphotoRow])(implicit c: Connection): List[ProductproductphotoRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Iterator[ProductproductphotoRow], batchSize: Int = 10000)(implicit c: Connection): Int } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productproductphoto/ProductproductphotoRepoImpl.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productproductphoto/ProductproductphotoRepoImpl.scala index c790f676a..8812d246a 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productproductphoto/ProductproductphotoRepoImpl.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productproductphoto/ProductproductphotoRepoImpl.scala @@ -12,6 +12,7 @@ import adventureworks.customtypes.TypoLocalDateTime import adventureworks.production.product.ProductId import adventureworks.production.productphoto.ProductphotoId import adventureworks.public.Flag +import anorm.BatchSql import anorm.NamedParameter import anorm.ParameterValue import anorm.RowParser @@ -19,6 +20,7 @@ import anorm.SQL import anorm.SimpleSql import anorm.SqlStringInterpolation import java.sql.Connection +import scala.annotation.nowarn import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder import typo.dsl.SelectBuilderSql @@ -141,4 +143,43 @@ class ProductproductphotoRepoImpl extends ProductproductphotoRepo { .executeInsert(ProductproductphotoRow.rowParser(1).single) } + override def upsertBatch(unsaved: Iterable[ProductproductphotoRow])(implicit c: Connection): List[ProductproductphotoRow] = { + def toNamedParameter(row: ProductproductphotoRow): List[NamedParameter] = List( + NamedParameter("productid", ParameterValue(row.productid, null, ProductId.toStatement)), + NamedParameter("productphotoid", ParameterValue(row.productphotoid, null, ProductphotoId.toStatement)), + NamedParameter("primary", ParameterValue(row.primary, null, Flag.toStatement)), + NamedParameter("modifieddate", ParameterValue(row.modifieddate, null, TypoLocalDateTime.toStatement)) + ) + unsaved.toList match { + case Nil => Nil + case head :: rest => + new anorm.adventureworks.ExecuteReturningSyntax.Ops( + BatchSql( + s"""insert into production.productproductphoto("productid", "productphotoid", "primary", "modifieddate") + values ({productid}::int4, {productphotoid}::int4, {primary}::bool, {modifieddate}::timestamp) + on conflict ("productid", "productphotoid") + do update set + "primary" = EXCLUDED."primary", + "modifieddate" = EXCLUDED."modifieddate" + returning "productid", "productphotoid", "primary", "modifieddate"::text + """, + toNamedParameter(head), + rest.map(toNamedParameter)* + ) + ).executeReturning(ProductproductphotoRow.rowParser(1).*) + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[ProductproductphotoRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + SQL"create temporary table productproductphoto_TEMP (like production.productproductphoto) on commit drop".execute(): @nowarn + streamingInsert(s"""copy productproductphoto_TEMP("productid", "productphotoid", "primary", "modifieddate") from stdin""", batchSize, unsaved)(ProductproductphotoRow.text, c): @nowarn + SQL"""insert into production.productproductphoto("productid", "productphotoid", "primary", "modifieddate") + select * from productproductphoto_TEMP + on conflict ("productid", "productphotoid") + do update set + "primary" = EXCLUDED."primary", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table productproductphoto_TEMP;""".executeUpdate() + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productproductphoto/ProductproductphotoRepoMock.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productproductphoto/ProductproductphotoRepoMock.scala index 71f288b1e..8c274815d 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productproductphoto/ProductproductphotoRepoMock.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productproductphoto/ProductproductphotoRepoMock.scala @@ -87,4 +87,17 @@ class ProductproductphotoRepoMock(toRow: Function1[ProductproductphotoRowUnsaved map.put(unsaved.compositeId, unsaved): @nowarn unsaved } + override def upsertBatch(unsaved: Iterable[ProductproductphotoRow])(implicit c: Connection): List[ProductproductphotoRow] = { + unsaved.map { row => + map += (row.compositeId -> row) + row + }.toList + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[ProductproductphotoRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + unsaved.foreach { row => + map += (row.compositeId -> row) + } + unsaved.size + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productreview/ProductreviewRepo.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productreview/ProductreviewRepo.scala index 1dce4ece3..16c0c6814 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productreview/ProductreviewRepo.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productreview/ProductreviewRepo.scala @@ -29,4 +29,7 @@ trait ProductreviewRepo { def update: UpdateBuilder[ProductreviewFields, ProductreviewRow] def update(row: ProductreviewRow)(implicit c: Connection): Boolean def upsert(unsaved: ProductreviewRow)(implicit c: Connection): ProductreviewRow + def upsertBatch(unsaved: Iterable[ProductreviewRow])(implicit c: Connection): List[ProductreviewRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Iterator[ProductreviewRow], batchSize: Int = 10000)(implicit c: Connection): Int } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productreview/ProductreviewRepoImpl.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productreview/ProductreviewRepoImpl.scala index da515e31d..5256b0e30 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productreview/ProductreviewRepoImpl.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productreview/ProductreviewRepoImpl.scala @@ -11,6 +11,7 @@ import adventureworks.customtypes.Defaulted import adventureworks.customtypes.TypoLocalDateTime import adventureworks.production.product.ProductId import adventureworks.public.Name +import anorm.BatchSql import anorm.NamedParameter import anorm.ParameterMetaData import anorm.ParameterValue @@ -20,6 +21,7 @@ import anorm.SimpleSql import anorm.SqlStringInterpolation import anorm.ToStatement import java.sql.Connection +import scala.annotation.nowarn import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder import typo.dsl.SelectBuilderSql @@ -157,4 +159,57 @@ class ProductreviewRepoImpl extends ProductreviewRepo { .executeInsert(ProductreviewRow.rowParser(1).single) } + override def upsertBatch(unsaved: Iterable[ProductreviewRow])(implicit c: Connection): List[ProductreviewRow] = { + def toNamedParameter(row: ProductreviewRow): List[NamedParameter] = List( + NamedParameter("productreviewid", ParameterValue(row.productreviewid, null, ProductreviewId.toStatement)), + NamedParameter("productid", ParameterValue(row.productid, null, ProductId.toStatement)), + NamedParameter("reviewername", ParameterValue(row.reviewername, null, Name.toStatement)), + NamedParameter("reviewdate", ParameterValue(row.reviewdate, null, TypoLocalDateTime.toStatement)), + NamedParameter("emailaddress", ParameterValue(row.emailaddress, null, ToStatement.stringToStatement)), + NamedParameter("rating", ParameterValue(row.rating, null, ToStatement.intToStatement)), + NamedParameter("comments", ParameterValue(row.comments, null, ToStatement.optionToStatement(ToStatement.stringToStatement, ParameterMetaData.StringParameterMetaData))), + NamedParameter("modifieddate", ParameterValue(row.modifieddate, null, TypoLocalDateTime.toStatement)) + ) + unsaved.toList match { + case Nil => Nil + case head :: rest => + new anorm.adventureworks.ExecuteReturningSyntax.Ops( + BatchSql( + s"""insert into production.productreview("productreviewid", "productid", "reviewername", "reviewdate", "emailaddress", "rating", "comments", "modifieddate") + values ({productreviewid}::int4, {productid}::int4, {reviewername}::varchar, {reviewdate}::timestamp, {emailaddress}, {rating}::int4, {comments}, {modifieddate}::timestamp) + on conflict ("productreviewid") + do update set + "productid" = EXCLUDED."productid", + "reviewername" = EXCLUDED."reviewername", + "reviewdate" = EXCLUDED."reviewdate", + "emailaddress" = EXCLUDED."emailaddress", + "rating" = EXCLUDED."rating", + "comments" = EXCLUDED."comments", + "modifieddate" = EXCLUDED."modifieddate" + returning "productreviewid", "productid", "reviewername", "reviewdate"::text, "emailaddress", "rating", "comments", "modifieddate"::text + """, + toNamedParameter(head), + rest.map(toNamedParameter)* + ) + ).executeReturning(ProductreviewRow.rowParser(1).*) + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[ProductreviewRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + SQL"create temporary table productreview_TEMP (like production.productreview) on commit drop".execute(): @nowarn + streamingInsert(s"""copy productreview_TEMP("productreviewid", "productid", "reviewername", "reviewdate", "emailaddress", "rating", "comments", "modifieddate") from stdin""", batchSize, unsaved)(ProductreviewRow.text, c): @nowarn + SQL"""insert into production.productreview("productreviewid", "productid", "reviewername", "reviewdate", "emailaddress", "rating", "comments", "modifieddate") + select * from productreview_TEMP + on conflict ("productreviewid") + do update set + "productid" = EXCLUDED."productid", + "reviewername" = EXCLUDED."reviewername", + "reviewdate" = EXCLUDED."reviewdate", + "emailaddress" = EXCLUDED."emailaddress", + "rating" = EXCLUDED."rating", + "comments" = EXCLUDED."comments", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table productreview_TEMP;""".executeUpdate() + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productreview/ProductreviewRepoMock.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productreview/ProductreviewRepoMock.scala index 1bc884d95..f65ed62d3 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productreview/ProductreviewRepoMock.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productreview/ProductreviewRepoMock.scala @@ -87,4 +87,17 @@ class ProductreviewRepoMock(toRow: Function1[ProductreviewRowUnsaved, Productrev map.put(unsaved.productreviewid, unsaved): @nowarn unsaved } + override def upsertBatch(unsaved: Iterable[ProductreviewRow])(implicit c: Connection): List[ProductreviewRow] = { + unsaved.map { row => + map += (row.productreviewid -> row) + row + }.toList + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[ProductreviewRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + unsaved.foreach { row => + map += (row.productreviewid -> row) + } + unsaved.size + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productsubcategory/ProductsubcategoryRepo.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productsubcategory/ProductsubcategoryRepo.scala index 14ea90426..41fcf6294 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productsubcategory/ProductsubcategoryRepo.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productsubcategory/ProductsubcategoryRepo.scala @@ -29,4 +29,7 @@ trait ProductsubcategoryRepo { def update: UpdateBuilder[ProductsubcategoryFields, ProductsubcategoryRow] def update(row: ProductsubcategoryRow)(implicit c: Connection): Boolean def upsert(unsaved: ProductsubcategoryRow)(implicit c: Connection): ProductsubcategoryRow + def upsertBatch(unsaved: Iterable[ProductsubcategoryRow])(implicit c: Connection): List[ProductsubcategoryRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Iterator[ProductsubcategoryRow], batchSize: Int = 10000)(implicit c: Connection): Int } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productsubcategory/ProductsubcategoryRepoImpl.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productsubcategory/ProductsubcategoryRepoImpl.scala index cade4b72d..b2c54983d 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productsubcategory/ProductsubcategoryRepoImpl.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productsubcategory/ProductsubcategoryRepoImpl.scala @@ -12,6 +12,7 @@ import adventureworks.customtypes.TypoLocalDateTime import adventureworks.customtypes.TypoUUID import adventureworks.production.productcategory.ProductcategoryId import adventureworks.public.Name +import anorm.BatchSql import anorm.NamedParameter import anorm.ParameterValue import anorm.RowParser @@ -19,6 +20,7 @@ import anorm.SQL import anorm.SimpleSql import anorm.SqlStringInterpolation import java.sql.Connection +import scala.annotation.nowarn import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder import typo.dsl.SelectBuilderSql @@ -144,4 +146,48 @@ class ProductsubcategoryRepoImpl extends ProductsubcategoryRepo { .executeInsert(ProductsubcategoryRow.rowParser(1).single) } + override def upsertBatch(unsaved: Iterable[ProductsubcategoryRow])(implicit c: Connection): List[ProductsubcategoryRow] = { + def toNamedParameter(row: ProductsubcategoryRow): List[NamedParameter] = List( + NamedParameter("productsubcategoryid", ParameterValue(row.productsubcategoryid, null, ProductsubcategoryId.toStatement)), + NamedParameter("productcategoryid", ParameterValue(row.productcategoryid, null, ProductcategoryId.toStatement)), + NamedParameter("name", ParameterValue(row.name, null, Name.toStatement)), + NamedParameter("rowguid", ParameterValue(row.rowguid, null, TypoUUID.toStatement)), + NamedParameter("modifieddate", ParameterValue(row.modifieddate, null, TypoLocalDateTime.toStatement)) + ) + unsaved.toList match { + case Nil => Nil + case head :: rest => + new anorm.adventureworks.ExecuteReturningSyntax.Ops( + BatchSql( + s"""insert into production.productsubcategory("productsubcategoryid", "productcategoryid", "name", "rowguid", "modifieddate") + values ({productsubcategoryid}::int4, {productcategoryid}::int4, {name}::varchar, {rowguid}::uuid, {modifieddate}::timestamp) + on conflict ("productsubcategoryid") + do update set + "productcategoryid" = EXCLUDED."productcategoryid", + "name" = EXCLUDED."name", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + returning "productsubcategoryid", "productcategoryid", "name", "rowguid", "modifieddate"::text + """, + toNamedParameter(head), + rest.map(toNamedParameter)* + ) + ).executeReturning(ProductsubcategoryRow.rowParser(1).*) + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[ProductsubcategoryRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + SQL"create temporary table productsubcategory_TEMP (like production.productsubcategory) on commit drop".execute(): @nowarn + streamingInsert(s"""copy productsubcategory_TEMP("productsubcategoryid", "productcategoryid", "name", "rowguid", "modifieddate") from stdin""", batchSize, unsaved)(ProductsubcategoryRow.text, c): @nowarn + SQL"""insert into production.productsubcategory("productsubcategoryid", "productcategoryid", "name", "rowguid", "modifieddate") + select * from productsubcategory_TEMP + on conflict ("productsubcategoryid") + do update set + "productcategoryid" = EXCLUDED."productcategoryid", + "name" = EXCLUDED."name", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table productsubcategory_TEMP;""".executeUpdate() + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productsubcategory/ProductsubcategoryRepoMock.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productsubcategory/ProductsubcategoryRepoMock.scala index 2336212e1..62510fdab 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productsubcategory/ProductsubcategoryRepoMock.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productsubcategory/ProductsubcategoryRepoMock.scala @@ -87,4 +87,17 @@ class ProductsubcategoryRepoMock(toRow: Function1[ProductsubcategoryRowUnsaved, map.put(unsaved.productsubcategoryid, unsaved): @nowarn unsaved } + override def upsertBatch(unsaved: Iterable[ProductsubcategoryRow])(implicit c: Connection): List[ProductsubcategoryRow] = { + unsaved.map { row => + map += (row.productsubcategoryid -> row) + row + }.toList + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[ProductsubcategoryRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + unsaved.foreach { row => + map += (row.productsubcategoryid -> row) + } + unsaved.size + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/scrapreason/ScrapreasonRepo.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/scrapreason/ScrapreasonRepo.scala index 011a077cd..7ebfefd6c 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/scrapreason/ScrapreasonRepo.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/scrapreason/ScrapreasonRepo.scala @@ -29,4 +29,7 @@ trait ScrapreasonRepo { def update: UpdateBuilder[ScrapreasonFields, ScrapreasonRow] def update(row: ScrapreasonRow)(implicit c: Connection): Boolean def upsert(unsaved: ScrapreasonRow)(implicit c: Connection): ScrapreasonRow + def upsertBatch(unsaved: Iterable[ScrapreasonRow])(implicit c: Connection): List[ScrapreasonRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Iterator[ScrapreasonRow], batchSize: Int = 10000)(implicit c: Connection): Int } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/scrapreason/ScrapreasonRepoImpl.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/scrapreason/ScrapreasonRepoImpl.scala index e19d32c76..41dfd1c30 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/scrapreason/ScrapreasonRepoImpl.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/scrapreason/ScrapreasonRepoImpl.scala @@ -10,6 +10,7 @@ package scrapreason import adventureworks.customtypes.Defaulted import adventureworks.customtypes.TypoLocalDateTime import adventureworks.public.Name +import anorm.BatchSql import anorm.NamedParameter import anorm.ParameterValue import anorm.RowParser @@ -17,6 +18,7 @@ import anorm.SQL import anorm.SimpleSql import anorm.SqlStringInterpolation import java.sql.Connection +import scala.annotation.nowarn import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder import typo.dsl.SelectBuilderSql @@ -131,4 +133,42 @@ class ScrapreasonRepoImpl extends ScrapreasonRepo { .executeInsert(ScrapreasonRow.rowParser(1).single) } + override def upsertBatch(unsaved: Iterable[ScrapreasonRow])(implicit c: Connection): List[ScrapreasonRow] = { + def toNamedParameter(row: ScrapreasonRow): List[NamedParameter] = List( + NamedParameter("scrapreasonid", ParameterValue(row.scrapreasonid, null, ScrapreasonId.toStatement)), + NamedParameter("name", ParameterValue(row.name, null, Name.toStatement)), + NamedParameter("modifieddate", ParameterValue(row.modifieddate, null, TypoLocalDateTime.toStatement)) + ) + unsaved.toList match { + case Nil => Nil + case head :: rest => + new anorm.adventureworks.ExecuteReturningSyntax.Ops( + BatchSql( + s"""insert into production.scrapreason("scrapreasonid", "name", "modifieddate") + values ({scrapreasonid}::int4, {name}::varchar, {modifieddate}::timestamp) + on conflict ("scrapreasonid") + do update set + "name" = EXCLUDED."name", + "modifieddate" = EXCLUDED."modifieddate" + returning "scrapreasonid", "name", "modifieddate"::text + """, + toNamedParameter(head), + rest.map(toNamedParameter)* + ) + ).executeReturning(ScrapreasonRow.rowParser(1).*) + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[ScrapreasonRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + SQL"create temporary table scrapreason_TEMP (like production.scrapreason) on commit drop".execute(): @nowarn + streamingInsert(s"""copy scrapreason_TEMP("scrapreasonid", "name", "modifieddate") from stdin""", batchSize, unsaved)(ScrapreasonRow.text, c): @nowarn + SQL"""insert into production.scrapreason("scrapreasonid", "name", "modifieddate") + select * from scrapreason_TEMP + on conflict ("scrapreasonid") + do update set + "name" = EXCLUDED."name", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table scrapreason_TEMP;""".executeUpdate() + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/scrapreason/ScrapreasonRepoMock.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/scrapreason/ScrapreasonRepoMock.scala index 5d161f69c..107aa26c2 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/scrapreason/ScrapreasonRepoMock.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/scrapreason/ScrapreasonRepoMock.scala @@ -87,4 +87,17 @@ class ScrapreasonRepoMock(toRow: Function1[ScrapreasonRowUnsaved, ScrapreasonRow map.put(unsaved.scrapreasonid, unsaved): @nowarn unsaved } + override def upsertBatch(unsaved: Iterable[ScrapreasonRow])(implicit c: Connection): List[ScrapreasonRow] = { + unsaved.map { row => + map += (row.scrapreasonid -> row) + row + }.toList + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[ScrapreasonRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + unsaved.foreach { row => + map += (row.scrapreasonid -> row) + } + unsaved.size + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/transactionhistory/TransactionhistoryRepo.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/transactionhistory/TransactionhistoryRepo.scala index 0a61c5d64..0dbc93d54 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/transactionhistory/TransactionhistoryRepo.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/transactionhistory/TransactionhistoryRepo.scala @@ -29,4 +29,7 @@ trait TransactionhistoryRepo { def update: UpdateBuilder[TransactionhistoryFields, TransactionhistoryRow] def update(row: TransactionhistoryRow)(implicit c: Connection): Boolean def upsert(unsaved: TransactionhistoryRow)(implicit c: Connection): TransactionhistoryRow + def upsertBatch(unsaved: Iterable[TransactionhistoryRow])(implicit c: Connection): List[TransactionhistoryRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Iterator[TransactionhistoryRow], batchSize: Int = 10000)(implicit c: Connection): Int } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/transactionhistory/TransactionhistoryRepoImpl.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/transactionhistory/TransactionhistoryRepoImpl.scala index 48f473c4e..356202296 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/transactionhistory/TransactionhistoryRepoImpl.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/transactionhistory/TransactionhistoryRepoImpl.scala @@ -10,6 +10,7 @@ package transactionhistory import adventureworks.customtypes.Defaulted import adventureworks.customtypes.TypoLocalDateTime import adventureworks.production.product.ProductId +import anorm.BatchSql import anorm.NamedParameter import anorm.ParameterValue import anorm.RowParser @@ -18,6 +19,7 @@ import anorm.SimpleSql import anorm.SqlStringInterpolation import anorm.ToStatement import java.sql.Connection +import scala.annotation.nowarn import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder import typo.dsl.SelectBuilderSql @@ -162,4 +164,60 @@ class TransactionhistoryRepoImpl extends TransactionhistoryRepo { .executeInsert(TransactionhistoryRow.rowParser(1).single) } + override def upsertBatch(unsaved: Iterable[TransactionhistoryRow])(implicit c: Connection): List[TransactionhistoryRow] = { + def toNamedParameter(row: TransactionhistoryRow): List[NamedParameter] = List( + NamedParameter("transactionid", ParameterValue(row.transactionid, null, TransactionhistoryId.toStatement)), + NamedParameter("productid", ParameterValue(row.productid, null, ProductId.toStatement)), + NamedParameter("referenceorderid", ParameterValue(row.referenceorderid, null, ToStatement.intToStatement)), + NamedParameter("referenceorderlineid", ParameterValue(row.referenceorderlineid, null, ToStatement.intToStatement)), + NamedParameter("transactiondate", ParameterValue(row.transactiondate, null, TypoLocalDateTime.toStatement)), + NamedParameter("transactiontype", ParameterValue(row.transactiontype, null, ToStatement.stringToStatement)), + NamedParameter("quantity", ParameterValue(row.quantity, null, ToStatement.intToStatement)), + NamedParameter("actualcost", ParameterValue(row.actualcost, null, ToStatement.scalaBigDecimalToStatement)), + NamedParameter("modifieddate", ParameterValue(row.modifieddate, null, TypoLocalDateTime.toStatement)) + ) + unsaved.toList match { + case Nil => Nil + case head :: rest => + new anorm.adventureworks.ExecuteReturningSyntax.Ops( + BatchSql( + s"""insert into production.transactionhistory("transactionid", "productid", "referenceorderid", "referenceorderlineid", "transactiondate", "transactiontype", "quantity", "actualcost", "modifieddate") + values ({transactionid}::int4, {productid}::int4, {referenceorderid}::int4, {referenceorderlineid}::int4, {transactiondate}::timestamp, {transactiontype}::bpchar, {quantity}::int4, {actualcost}::numeric, {modifieddate}::timestamp) + on conflict ("transactionid") + do update set + "productid" = EXCLUDED."productid", + "referenceorderid" = EXCLUDED."referenceorderid", + "referenceorderlineid" = EXCLUDED."referenceorderlineid", + "transactiondate" = EXCLUDED."transactiondate", + "transactiontype" = EXCLUDED."transactiontype", + "quantity" = EXCLUDED."quantity", + "actualcost" = EXCLUDED."actualcost", + "modifieddate" = EXCLUDED."modifieddate" + returning "transactionid", "productid", "referenceorderid", "referenceorderlineid", "transactiondate"::text, "transactiontype", "quantity", "actualcost", "modifieddate"::text + """, + toNamedParameter(head), + rest.map(toNamedParameter)* + ) + ).executeReturning(TransactionhistoryRow.rowParser(1).*) + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[TransactionhistoryRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + SQL"create temporary table transactionhistory_TEMP (like production.transactionhistory) on commit drop".execute(): @nowarn + streamingInsert(s"""copy transactionhistory_TEMP("transactionid", "productid", "referenceorderid", "referenceorderlineid", "transactiondate", "transactiontype", "quantity", "actualcost", "modifieddate") from stdin""", batchSize, unsaved)(TransactionhistoryRow.text, c): @nowarn + SQL"""insert into production.transactionhistory("transactionid", "productid", "referenceorderid", "referenceorderlineid", "transactiondate", "transactiontype", "quantity", "actualcost", "modifieddate") + select * from transactionhistory_TEMP + on conflict ("transactionid") + do update set + "productid" = EXCLUDED."productid", + "referenceorderid" = EXCLUDED."referenceorderid", + "referenceorderlineid" = EXCLUDED."referenceorderlineid", + "transactiondate" = EXCLUDED."transactiondate", + "transactiontype" = EXCLUDED."transactiontype", + "quantity" = EXCLUDED."quantity", + "actualcost" = EXCLUDED."actualcost", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table transactionhistory_TEMP;""".executeUpdate() + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/transactionhistory/TransactionhistoryRepoMock.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/transactionhistory/TransactionhistoryRepoMock.scala index d3a7048ab..6eab7339a 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/transactionhistory/TransactionhistoryRepoMock.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/transactionhistory/TransactionhistoryRepoMock.scala @@ -87,4 +87,17 @@ class TransactionhistoryRepoMock(toRow: Function1[TransactionhistoryRowUnsaved, map.put(unsaved.transactionid, unsaved): @nowarn unsaved } + override def upsertBatch(unsaved: Iterable[TransactionhistoryRow])(implicit c: Connection): List[TransactionhistoryRow] = { + unsaved.map { row => + map += (row.transactionid -> row) + row + }.toList + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[TransactionhistoryRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + unsaved.foreach { row => + map += (row.transactionid -> row) + } + unsaved.size + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/transactionhistoryarchive/TransactionhistoryarchiveRepo.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/transactionhistoryarchive/TransactionhistoryarchiveRepo.scala index 6e0b145c2..8d30c0a9b 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/transactionhistoryarchive/TransactionhistoryarchiveRepo.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/transactionhistoryarchive/TransactionhistoryarchiveRepo.scala @@ -29,4 +29,7 @@ trait TransactionhistoryarchiveRepo { def update: UpdateBuilder[TransactionhistoryarchiveFields, TransactionhistoryarchiveRow] def update(row: TransactionhistoryarchiveRow)(implicit c: Connection): Boolean def upsert(unsaved: TransactionhistoryarchiveRow)(implicit c: Connection): TransactionhistoryarchiveRow + def upsertBatch(unsaved: Iterable[TransactionhistoryarchiveRow])(implicit c: Connection): List[TransactionhistoryarchiveRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Iterator[TransactionhistoryarchiveRow], batchSize: Int = 10000)(implicit c: Connection): Int } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/transactionhistoryarchive/TransactionhistoryarchiveRepoImpl.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/transactionhistoryarchive/TransactionhistoryarchiveRepoImpl.scala index d3ebb6a4a..2f601f16c 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/transactionhistoryarchive/TransactionhistoryarchiveRepoImpl.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/transactionhistoryarchive/TransactionhistoryarchiveRepoImpl.scala @@ -9,6 +9,7 @@ package transactionhistoryarchive import adventureworks.customtypes.Defaulted import adventureworks.customtypes.TypoLocalDateTime +import anorm.BatchSql import anorm.NamedParameter import anorm.ParameterValue import anorm.RowParser @@ -17,6 +18,7 @@ import anorm.SimpleSql import anorm.SqlStringInterpolation import anorm.ToStatement import java.sql.Connection +import scala.annotation.nowarn import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder import typo.dsl.SelectBuilderSql @@ -158,4 +160,60 @@ class TransactionhistoryarchiveRepoImpl extends TransactionhistoryarchiveRepo { .executeInsert(TransactionhistoryarchiveRow.rowParser(1).single) } + override def upsertBatch(unsaved: Iterable[TransactionhistoryarchiveRow])(implicit c: Connection): List[TransactionhistoryarchiveRow] = { + def toNamedParameter(row: TransactionhistoryarchiveRow): List[NamedParameter] = List( + NamedParameter("transactionid", ParameterValue(row.transactionid, null, TransactionhistoryarchiveId.toStatement)), + NamedParameter("productid", ParameterValue(row.productid, null, ToStatement.intToStatement)), + NamedParameter("referenceorderid", ParameterValue(row.referenceorderid, null, ToStatement.intToStatement)), + NamedParameter("referenceorderlineid", ParameterValue(row.referenceorderlineid, null, ToStatement.intToStatement)), + NamedParameter("transactiondate", ParameterValue(row.transactiondate, null, TypoLocalDateTime.toStatement)), + NamedParameter("transactiontype", ParameterValue(row.transactiontype, null, ToStatement.stringToStatement)), + NamedParameter("quantity", ParameterValue(row.quantity, null, ToStatement.intToStatement)), + NamedParameter("actualcost", ParameterValue(row.actualcost, null, ToStatement.scalaBigDecimalToStatement)), + NamedParameter("modifieddate", ParameterValue(row.modifieddate, null, TypoLocalDateTime.toStatement)) + ) + unsaved.toList match { + case Nil => Nil + case head :: rest => + new anorm.adventureworks.ExecuteReturningSyntax.Ops( + BatchSql( + s"""insert into production.transactionhistoryarchive("transactionid", "productid", "referenceorderid", "referenceorderlineid", "transactiondate", "transactiontype", "quantity", "actualcost", "modifieddate") + values ({transactionid}::int4, {productid}::int4, {referenceorderid}::int4, {referenceorderlineid}::int4, {transactiondate}::timestamp, {transactiontype}::bpchar, {quantity}::int4, {actualcost}::numeric, {modifieddate}::timestamp) + on conflict ("transactionid") + do update set + "productid" = EXCLUDED."productid", + "referenceorderid" = EXCLUDED."referenceorderid", + "referenceorderlineid" = EXCLUDED."referenceorderlineid", + "transactiondate" = EXCLUDED."transactiondate", + "transactiontype" = EXCLUDED."transactiontype", + "quantity" = EXCLUDED."quantity", + "actualcost" = EXCLUDED."actualcost", + "modifieddate" = EXCLUDED."modifieddate" + returning "transactionid", "productid", "referenceorderid", "referenceorderlineid", "transactiondate"::text, "transactiontype", "quantity", "actualcost", "modifieddate"::text + """, + toNamedParameter(head), + rest.map(toNamedParameter)* + ) + ).executeReturning(TransactionhistoryarchiveRow.rowParser(1).*) + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[TransactionhistoryarchiveRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + SQL"create temporary table transactionhistoryarchive_TEMP (like production.transactionhistoryarchive) on commit drop".execute(): @nowarn + streamingInsert(s"""copy transactionhistoryarchive_TEMP("transactionid", "productid", "referenceorderid", "referenceorderlineid", "transactiondate", "transactiontype", "quantity", "actualcost", "modifieddate") from stdin""", batchSize, unsaved)(TransactionhistoryarchiveRow.text, c): @nowarn + SQL"""insert into production.transactionhistoryarchive("transactionid", "productid", "referenceorderid", "referenceorderlineid", "transactiondate", "transactiontype", "quantity", "actualcost", "modifieddate") + select * from transactionhistoryarchive_TEMP + on conflict ("transactionid") + do update set + "productid" = EXCLUDED."productid", + "referenceorderid" = EXCLUDED."referenceorderid", + "referenceorderlineid" = EXCLUDED."referenceorderlineid", + "transactiondate" = EXCLUDED."transactiondate", + "transactiontype" = EXCLUDED."transactiontype", + "quantity" = EXCLUDED."quantity", + "actualcost" = EXCLUDED."actualcost", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table transactionhistoryarchive_TEMP;""".executeUpdate() + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/transactionhistoryarchive/TransactionhistoryarchiveRepoMock.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/transactionhistoryarchive/TransactionhistoryarchiveRepoMock.scala index 9c972c3b8..b4b9ad1d8 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/transactionhistoryarchive/TransactionhistoryarchiveRepoMock.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/transactionhistoryarchive/TransactionhistoryarchiveRepoMock.scala @@ -87,4 +87,17 @@ class TransactionhistoryarchiveRepoMock(toRow: Function1[Transactionhistoryarchi map.put(unsaved.transactionid, unsaved): @nowarn unsaved } + override def upsertBatch(unsaved: Iterable[TransactionhistoryarchiveRow])(implicit c: Connection): List[TransactionhistoryarchiveRow] = { + unsaved.map { row => + map += (row.transactionid -> row) + row + }.toList + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[TransactionhistoryarchiveRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + unsaved.foreach { row => + map += (row.transactionid -> row) + } + unsaved.size + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/unitmeasure/UnitmeasureRepo.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/unitmeasure/UnitmeasureRepo.scala index fbf624a03..ca017dc08 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/unitmeasure/UnitmeasureRepo.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/unitmeasure/UnitmeasureRepo.scala @@ -29,4 +29,7 @@ trait UnitmeasureRepo { def update: UpdateBuilder[UnitmeasureFields, UnitmeasureRow] def update(row: UnitmeasureRow)(implicit c: Connection): Boolean def upsert(unsaved: UnitmeasureRow)(implicit c: Connection): UnitmeasureRow + def upsertBatch(unsaved: Iterable[UnitmeasureRow])(implicit c: Connection): List[UnitmeasureRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Iterator[UnitmeasureRow], batchSize: Int = 10000)(implicit c: Connection): Int } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/unitmeasure/UnitmeasureRepoImpl.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/unitmeasure/UnitmeasureRepoImpl.scala index 3afc1c48c..bb09e5832 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/unitmeasure/UnitmeasureRepoImpl.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/unitmeasure/UnitmeasureRepoImpl.scala @@ -10,6 +10,7 @@ package unitmeasure import adventureworks.customtypes.Defaulted import adventureworks.customtypes.TypoLocalDateTime import adventureworks.public.Name +import anorm.BatchSql import anorm.NamedParameter import anorm.ParameterValue import anorm.RowParser @@ -17,6 +18,7 @@ import anorm.SQL import anorm.SimpleSql import anorm.SqlStringInterpolation import java.sql.Connection +import scala.annotation.nowarn import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder import typo.dsl.SelectBuilderSql @@ -128,4 +130,42 @@ class UnitmeasureRepoImpl extends UnitmeasureRepo { .executeInsert(UnitmeasureRow.rowParser(1).single) } + override def upsertBatch(unsaved: Iterable[UnitmeasureRow])(implicit c: Connection): List[UnitmeasureRow] = { + def toNamedParameter(row: UnitmeasureRow): List[NamedParameter] = List( + NamedParameter("unitmeasurecode", ParameterValue(row.unitmeasurecode, null, UnitmeasureId.toStatement)), + NamedParameter("name", ParameterValue(row.name, null, Name.toStatement)), + NamedParameter("modifieddate", ParameterValue(row.modifieddate, null, TypoLocalDateTime.toStatement)) + ) + unsaved.toList match { + case Nil => Nil + case head :: rest => + new anorm.adventureworks.ExecuteReturningSyntax.Ops( + BatchSql( + s"""insert into production.unitmeasure("unitmeasurecode", "name", "modifieddate") + values ({unitmeasurecode}::bpchar, {name}::varchar, {modifieddate}::timestamp) + on conflict ("unitmeasurecode") + do update set + "name" = EXCLUDED."name", + "modifieddate" = EXCLUDED."modifieddate" + returning "unitmeasurecode", "name", "modifieddate"::text + """, + toNamedParameter(head), + rest.map(toNamedParameter)* + ) + ).executeReturning(UnitmeasureRow.rowParser(1).*) + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[UnitmeasureRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + SQL"create temporary table unitmeasure_TEMP (like production.unitmeasure) on commit drop".execute(): @nowarn + streamingInsert(s"""copy unitmeasure_TEMP("unitmeasurecode", "name", "modifieddate") from stdin""", batchSize, unsaved)(UnitmeasureRow.text, c): @nowarn + SQL"""insert into production.unitmeasure("unitmeasurecode", "name", "modifieddate") + select * from unitmeasure_TEMP + on conflict ("unitmeasurecode") + do update set + "name" = EXCLUDED."name", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table unitmeasure_TEMP;""".executeUpdate() + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/unitmeasure/UnitmeasureRepoMock.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/unitmeasure/UnitmeasureRepoMock.scala index 524898961..95af9d14c 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/unitmeasure/UnitmeasureRepoMock.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/unitmeasure/UnitmeasureRepoMock.scala @@ -87,4 +87,17 @@ class UnitmeasureRepoMock(toRow: Function1[UnitmeasureRowUnsaved, UnitmeasureRow map.put(unsaved.unitmeasurecode, unsaved): @nowarn unsaved } + override def upsertBatch(unsaved: Iterable[UnitmeasureRow])(implicit c: Connection): List[UnitmeasureRow] = { + unsaved.map { row => + map += (row.unitmeasurecode -> row) + row + }.toList + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[UnitmeasureRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + unsaved.foreach { row => + map += (row.unitmeasurecode -> row) + } + unsaved.size + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/workorder/WorkorderRepo.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/workorder/WorkorderRepo.scala index 08cb693e5..c2f482859 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/workorder/WorkorderRepo.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/workorder/WorkorderRepo.scala @@ -29,4 +29,7 @@ trait WorkorderRepo { def update: UpdateBuilder[WorkorderFields, WorkorderRow] def update(row: WorkorderRow)(implicit c: Connection): Boolean def upsert(unsaved: WorkorderRow)(implicit c: Connection): WorkorderRow + def upsertBatch(unsaved: Iterable[WorkorderRow])(implicit c: Connection): List[WorkorderRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Iterator[WorkorderRow], batchSize: Int = 10000)(implicit c: Connection): Int } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/workorder/WorkorderRepoImpl.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/workorder/WorkorderRepoImpl.scala index 23741d0ea..ae22a06d0 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/workorder/WorkorderRepoImpl.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/workorder/WorkorderRepoImpl.scala @@ -12,6 +12,7 @@ import adventureworks.customtypes.TypoLocalDateTime import adventureworks.customtypes.TypoShort import adventureworks.production.product.ProductId import adventureworks.production.scrapreason.ScrapreasonId +import anorm.BatchSql import anorm.NamedParameter import anorm.ParameterValue import anorm.RowParser @@ -20,6 +21,7 @@ import anorm.SimpleSql import anorm.SqlStringInterpolation import anorm.ToStatement import java.sql.Connection +import scala.annotation.nowarn import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder import typo.dsl.SelectBuilderSql @@ -158,4 +160,60 @@ class WorkorderRepoImpl extends WorkorderRepo { .executeInsert(WorkorderRow.rowParser(1).single) } + override def upsertBatch(unsaved: Iterable[WorkorderRow])(implicit c: Connection): List[WorkorderRow] = { + def toNamedParameter(row: WorkorderRow): List[NamedParameter] = List( + NamedParameter("workorderid", ParameterValue(row.workorderid, null, WorkorderId.toStatement)), + NamedParameter("productid", ParameterValue(row.productid, null, ProductId.toStatement)), + NamedParameter("orderqty", ParameterValue(row.orderqty, null, ToStatement.intToStatement)), + NamedParameter("scrappedqty", ParameterValue(row.scrappedqty, null, TypoShort.toStatement)), + NamedParameter("startdate", ParameterValue(row.startdate, null, TypoLocalDateTime.toStatement)), + NamedParameter("enddate", ParameterValue(row.enddate, null, ToStatement.optionToStatement(TypoLocalDateTime.toStatement, TypoLocalDateTime.parameterMetadata))), + NamedParameter("duedate", ParameterValue(row.duedate, null, TypoLocalDateTime.toStatement)), + NamedParameter("scrapreasonid", ParameterValue(row.scrapreasonid, null, ToStatement.optionToStatement(ScrapreasonId.toStatement, ScrapreasonId.parameterMetadata))), + NamedParameter("modifieddate", ParameterValue(row.modifieddate, null, TypoLocalDateTime.toStatement)) + ) + unsaved.toList match { + case Nil => Nil + case head :: rest => + new anorm.adventureworks.ExecuteReturningSyntax.Ops( + BatchSql( + s"""insert into production.workorder("workorderid", "productid", "orderqty", "scrappedqty", "startdate", "enddate", "duedate", "scrapreasonid", "modifieddate") + values ({workorderid}::int4, {productid}::int4, {orderqty}::int4, {scrappedqty}::int2, {startdate}::timestamp, {enddate}::timestamp, {duedate}::timestamp, {scrapreasonid}::int2, {modifieddate}::timestamp) + on conflict ("workorderid") + do update set + "productid" = EXCLUDED."productid", + "orderqty" = EXCLUDED."orderqty", + "scrappedqty" = EXCLUDED."scrappedqty", + "startdate" = EXCLUDED."startdate", + "enddate" = EXCLUDED."enddate", + "duedate" = EXCLUDED."duedate", + "scrapreasonid" = EXCLUDED."scrapreasonid", + "modifieddate" = EXCLUDED."modifieddate" + returning "workorderid", "productid", "orderqty", "scrappedqty", "startdate"::text, "enddate"::text, "duedate"::text, "scrapreasonid", "modifieddate"::text + """, + toNamedParameter(head), + rest.map(toNamedParameter)* + ) + ).executeReturning(WorkorderRow.rowParser(1).*) + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[WorkorderRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + SQL"create temporary table workorder_TEMP (like production.workorder) on commit drop".execute(): @nowarn + streamingInsert(s"""copy workorder_TEMP("workorderid", "productid", "orderqty", "scrappedqty", "startdate", "enddate", "duedate", "scrapreasonid", "modifieddate") from stdin""", batchSize, unsaved)(WorkorderRow.text, c): @nowarn + SQL"""insert into production.workorder("workorderid", "productid", "orderqty", "scrappedqty", "startdate", "enddate", "duedate", "scrapreasonid", "modifieddate") + select * from workorder_TEMP + on conflict ("workorderid") + do update set + "productid" = EXCLUDED."productid", + "orderqty" = EXCLUDED."orderqty", + "scrappedqty" = EXCLUDED."scrappedqty", + "startdate" = EXCLUDED."startdate", + "enddate" = EXCLUDED."enddate", + "duedate" = EXCLUDED."duedate", + "scrapreasonid" = EXCLUDED."scrapreasonid", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table workorder_TEMP;""".executeUpdate() + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/workorder/WorkorderRepoMock.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/workorder/WorkorderRepoMock.scala index 7e787c8f2..f21b6e49a 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/workorder/WorkorderRepoMock.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/workorder/WorkorderRepoMock.scala @@ -87,4 +87,17 @@ class WorkorderRepoMock(toRow: Function1[WorkorderRowUnsaved, WorkorderRow], map.put(unsaved.workorderid, unsaved): @nowarn unsaved } + override def upsertBatch(unsaved: Iterable[WorkorderRow])(implicit c: Connection): List[WorkorderRow] = { + unsaved.map { row => + map += (row.workorderid -> row) + row + }.toList + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[WorkorderRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + unsaved.foreach { row => + map += (row.workorderid -> row) + } + unsaved.size + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/workorderrouting/WorkorderroutingRepo.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/workorderrouting/WorkorderroutingRepo.scala index a7b98aa54..d87cb2cae 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/workorderrouting/WorkorderroutingRepo.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/workorderrouting/WorkorderroutingRepo.scala @@ -29,4 +29,7 @@ trait WorkorderroutingRepo { def update: UpdateBuilder[WorkorderroutingFields, WorkorderroutingRow] def update(row: WorkorderroutingRow)(implicit c: Connection): Boolean def upsert(unsaved: WorkorderroutingRow)(implicit c: Connection): WorkorderroutingRow + def upsertBatch(unsaved: Iterable[WorkorderroutingRow])(implicit c: Connection): List[WorkorderroutingRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Iterator[WorkorderroutingRow], batchSize: Int = 10000)(implicit c: Connection): Int } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/workorderrouting/WorkorderroutingRepoImpl.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/workorderrouting/WorkorderroutingRepoImpl.scala index 7c3081d79..13dff9a30 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/workorderrouting/WorkorderroutingRepoImpl.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/workorderrouting/WorkorderroutingRepoImpl.scala @@ -12,6 +12,7 @@ import adventureworks.customtypes.TypoLocalDateTime import adventureworks.customtypes.TypoShort import adventureworks.production.location.LocationId import adventureworks.production.workorder.WorkorderId +import anorm.BatchSql import anorm.NamedParameter import anorm.ParameterMetaData import anorm.ParameterValue @@ -21,6 +22,7 @@ import anorm.SimpleSql import anorm.SqlStringInterpolation import anorm.ToStatement import java.sql.Connection +import scala.annotation.nowarn import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder import typo.dsl.SelectBuilderSql @@ -172,4 +174,65 @@ class WorkorderroutingRepoImpl extends WorkorderroutingRepo { .executeInsert(WorkorderroutingRow.rowParser(1).single) } + override def upsertBatch(unsaved: Iterable[WorkorderroutingRow])(implicit c: Connection): List[WorkorderroutingRow] = { + def toNamedParameter(row: WorkorderroutingRow): List[NamedParameter] = List( + NamedParameter("workorderid", ParameterValue(row.workorderid, null, WorkorderId.toStatement)), + NamedParameter("productid", ParameterValue(row.productid, null, ToStatement.intToStatement)), + NamedParameter("operationsequence", ParameterValue(row.operationsequence, null, TypoShort.toStatement)), + NamedParameter("locationid", ParameterValue(row.locationid, null, LocationId.toStatement)), + NamedParameter("scheduledstartdate", ParameterValue(row.scheduledstartdate, null, TypoLocalDateTime.toStatement)), + NamedParameter("scheduledenddate", ParameterValue(row.scheduledenddate, null, TypoLocalDateTime.toStatement)), + NamedParameter("actualstartdate", ParameterValue(row.actualstartdate, null, ToStatement.optionToStatement(TypoLocalDateTime.toStatement, TypoLocalDateTime.parameterMetadata))), + NamedParameter("actualenddate", ParameterValue(row.actualenddate, null, ToStatement.optionToStatement(TypoLocalDateTime.toStatement, TypoLocalDateTime.parameterMetadata))), + NamedParameter("actualresourcehrs", ParameterValue(row.actualresourcehrs, null, ToStatement.optionToStatement(ToStatement.scalaBigDecimalToStatement, ParameterMetaData.BigDecimalParameterMetaData))), + NamedParameter("plannedcost", ParameterValue(row.plannedcost, null, ToStatement.scalaBigDecimalToStatement)), + NamedParameter("actualcost", ParameterValue(row.actualcost, null, ToStatement.optionToStatement(ToStatement.scalaBigDecimalToStatement, ParameterMetaData.BigDecimalParameterMetaData))), + NamedParameter("modifieddate", ParameterValue(row.modifieddate, null, TypoLocalDateTime.toStatement)) + ) + unsaved.toList match { + case Nil => Nil + case head :: rest => + new anorm.adventureworks.ExecuteReturningSyntax.Ops( + BatchSql( + s"""insert into production.workorderrouting("workorderid", "productid", "operationsequence", "locationid", "scheduledstartdate", "scheduledenddate", "actualstartdate", "actualenddate", "actualresourcehrs", "plannedcost", "actualcost", "modifieddate") + values ({workorderid}::int4, {productid}::int4, {operationsequence}::int2, {locationid}::int2, {scheduledstartdate}::timestamp, {scheduledenddate}::timestamp, {actualstartdate}::timestamp, {actualenddate}::timestamp, {actualresourcehrs}::numeric, {plannedcost}::numeric, {actualcost}::numeric, {modifieddate}::timestamp) + on conflict ("workorderid", "productid", "operationsequence") + do update set + "locationid" = EXCLUDED."locationid", + "scheduledstartdate" = EXCLUDED."scheduledstartdate", + "scheduledenddate" = EXCLUDED."scheduledenddate", + "actualstartdate" = EXCLUDED."actualstartdate", + "actualenddate" = EXCLUDED."actualenddate", + "actualresourcehrs" = EXCLUDED."actualresourcehrs", + "plannedcost" = EXCLUDED."plannedcost", + "actualcost" = EXCLUDED."actualcost", + "modifieddate" = EXCLUDED."modifieddate" + returning "workorderid", "productid", "operationsequence", "locationid", "scheduledstartdate"::text, "scheduledenddate"::text, "actualstartdate"::text, "actualenddate"::text, "actualresourcehrs", "plannedcost", "actualcost", "modifieddate"::text + """, + toNamedParameter(head), + rest.map(toNamedParameter)* + ) + ).executeReturning(WorkorderroutingRow.rowParser(1).*) + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[WorkorderroutingRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + SQL"create temporary table workorderrouting_TEMP (like production.workorderrouting) on commit drop".execute(): @nowarn + streamingInsert(s"""copy workorderrouting_TEMP("workorderid", "productid", "operationsequence", "locationid", "scheduledstartdate", "scheduledenddate", "actualstartdate", "actualenddate", "actualresourcehrs", "plannedcost", "actualcost", "modifieddate") from stdin""", batchSize, unsaved)(WorkorderroutingRow.text, c): @nowarn + SQL"""insert into production.workorderrouting("workorderid", "productid", "operationsequence", "locationid", "scheduledstartdate", "scheduledenddate", "actualstartdate", "actualenddate", "actualresourcehrs", "plannedcost", "actualcost", "modifieddate") + select * from workorderrouting_TEMP + on conflict ("workorderid", "productid", "operationsequence") + do update set + "locationid" = EXCLUDED."locationid", + "scheduledstartdate" = EXCLUDED."scheduledstartdate", + "scheduledenddate" = EXCLUDED."scheduledenddate", + "actualstartdate" = EXCLUDED."actualstartdate", + "actualenddate" = EXCLUDED."actualenddate", + "actualresourcehrs" = EXCLUDED."actualresourcehrs", + "plannedcost" = EXCLUDED."plannedcost", + "actualcost" = EXCLUDED."actualcost", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table workorderrouting_TEMP;""".executeUpdate() + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/workorderrouting/WorkorderroutingRepoMock.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/workorderrouting/WorkorderroutingRepoMock.scala index d302a12ed..d17a8660d 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/workorderrouting/WorkorderroutingRepoMock.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/workorderrouting/WorkorderroutingRepoMock.scala @@ -87,4 +87,17 @@ class WorkorderroutingRepoMock(toRow: Function1[WorkorderroutingRowUnsaved, Work map.put(unsaved.compositeId, unsaved): @nowarn unsaved } + override def upsertBatch(unsaved: Iterable[WorkorderroutingRow])(implicit c: Connection): List[WorkorderroutingRow] = { + unsaved.map { row => + map += (row.compositeId -> row) + row + }.toList + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[WorkorderroutingRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + unsaved.foreach { row => + map += (row.compositeId -> row) + } + unsaved.size + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/public/flaff/FlaffRepo.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/public/flaff/FlaffRepo.scala index 609b5fb3e..caf14d62b 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/public/flaff/FlaffRepo.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/public/flaff/FlaffRepo.scala @@ -26,4 +26,7 @@ trait FlaffRepo { def update: UpdateBuilder[FlaffFields, FlaffRow] def update(row: FlaffRow)(implicit c: Connection): Boolean def upsert(unsaved: FlaffRow)(implicit c: Connection): FlaffRow + def upsertBatch(unsaved: Iterable[FlaffRow])(implicit c: Connection): List[FlaffRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Iterator[FlaffRow], batchSize: Int = 10000)(implicit c: Connection): Int } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/public/flaff/FlaffRepoImpl.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/public/flaff/FlaffRepoImpl.scala index 71e7cab75..281c56705 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/public/flaff/FlaffRepoImpl.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/public/flaff/FlaffRepoImpl.scala @@ -7,10 +7,13 @@ package adventureworks package public package flaff +import anorm.BatchSql +import anorm.NamedParameter import anorm.ParameterValue import anorm.SqlStringInterpolation import anorm.ToStatement import java.sql.Connection +import scala.annotation.nowarn import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder import typo.dsl.SelectBuilderSql @@ -103,4 +106,42 @@ class FlaffRepoImpl extends FlaffRepo { .executeInsert(FlaffRow.rowParser(1).single) } + override def upsertBatch(unsaved: Iterable[FlaffRow])(implicit c: Connection): List[FlaffRow] = { + def toNamedParameter(row: FlaffRow): List[NamedParameter] = List( + NamedParameter("code", ParameterValue(row.code, null, ShortText.toStatement)), + NamedParameter("another_code", ParameterValue(row.anotherCode, null, ToStatement.stringToStatement)), + NamedParameter("some_number", ParameterValue(row.someNumber, null, ToStatement.intToStatement)), + NamedParameter("specifier", ParameterValue(row.specifier, null, ShortText.toStatement)), + NamedParameter("parentspecifier", ParameterValue(row.parentspecifier, null, ToStatement.optionToStatement(ShortText.toStatement, ShortText.parameterMetadata))) + ) + unsaved.toList match { + case Nil => Nil + case head :: rest => + new anorm.adventureworks.ExecuteReturningSyntax.Ops( + BatchSql( + s"""insert into public.flaff("code", "another_code", "some_number", "specifier", "parentspecifier") + values ({code}::text, {another_code}, {some_number}::int4, {specifier}::text, {parentspecifier}::text) + on conflict ("code", "another_code", "some_number", "specifier") + do update set + "parentspecifier" = EXCLUDED."parentspecifier" + returning "code", "another_code", "some_number", "specifier", "parentspecifier" + """, + toNamedParameter(head), + rest.map(toNamedParameter)* + ) + ).executeReturning(FlaffRow.rowParser(1).*) + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[FlaffRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + SQL"create temporary table flaff_TEMP (like public.flaff) on commit drop".execute(): @nowarn + streamingInsert(s"""copy flaff_TEMP("code", "another_code", "some_number", "specifier", "parentspecifier") from stdin""", batchSize, unsaved)(FlaffRow.text, c): @nowarn + SQL"""insert into public.flaff("code", "another_code", "some_number", "specifier", "parentspecifier") + select * from flaff_TEMP + on conflict ("code", "another_code", "some_number", "specifier") + do update set + "parentspecifier" = EXCLUDED."parentspecifier" + ; + drop table flaff_TEMP;""".executeUpdate() + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/public/flaff/FlaffRepoMock.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/public/flaff/FlaffRepoMock.scala index 59eae40c9..58a5d388d 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/public/flaff/FlaffRepoMock.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/public/flaff/FlaffRepoMock.scala @@ -75,4 +75,17 @@ class FlaffRepoMock(map: scala.collection.mutable.Map[FlaffId, FlaffRow] = scala map.put(unsaved.compositeId, unsaved): @nowarn unsaved } + override def upsertBatch(unsaved: Iterable[FlaffRow])(implicit c: Connection): List[FlaffRow] = { + unsaved.map { row => + map += (row.compositeId -> row) + row + }.toList + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[FlaffRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + unsaved.foreach { row => + map += (row.compositeId -> row) + } + unsaved.size + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/public/identity_test/IdentityTestRepo.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/public/identity_test/IdentityTestRepo.scala index f684ba288..7f7eca706 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/public/identity_test/IdentityTestRepo.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/public/identity_test/IdentityTestRepo.scala @@ -29,4 +29,7 @@ trait IdentityTestRepo { def update: UpdateBuilder[IdentityTestFields, IdentityTestRow] def update(row: IdentityTestRow)(implicit c: Connection): Boolean def upsert(unsaved: IdentityTestRow)(implicit c: Connection): IdentityTestRow + def upsertBatch(unsaved: Iterable[IdentityTestRow])(implicit c: Connection): List[IdentityTestRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Iterator[IdentityTestRow], batchSize: Int = 10000)(implicit c: Connection): Int } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/public/identity_test/IdentityTestRepoImpl.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/public/identity_test/IdentityTestRepoImpl.scala index 91b4026e5..9658a7aa2 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/public/identity_test/IdentityTestRepoImpl.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/public/identity_test/IdentityTestRepoImpl.scala @@ -8,6 +8,7 @@ package public package identity_test import adventureworks.customtypes.Defaulted +import anorm.BatchSql import anorm.NamedParameter import anorm.ParameterValue import anorm.RowParser @@ -16,6 +17,7 @@ import anorm.SimpleSql import anorm.SqlStringInterpolation import anorm.ToStatement import java.sql.Connection +import scala.annotation.nowarn import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder import typo.dsl.SelectBuilderSql @@ -126,4 +128,42 @@ class IdentityTestRepoImpl extends IdentityTestRepo { .executeInsert(IdentityTestRow.rowParser(1).single) } + override def upsertBatch(unsaved: Iterable[IdentityTestRow])(implicit c: Connection): List[IdentityTestRow] = { + def toNamedParameter(row: IdentityTestRow): List[NamedParameter] = List( + NamedParameter("always_generated", ParameterValue(row.alwaysGenerated, null, ToStatement.intToStatement)), + NamedParameter("default_generated", ParameterValue(row.defaultGenerated, null, ToStatement.intToStatement)), + NamedParameter("name", ParameterValue(row.name, null, IdentityTestId.toStatement)) + ) + unsaved.toList match { + case Nil => Nil + case head :: rest => + new anorm.adventureworks.ExecuteReturningSyntax.Ops( + BatchSql( + s"""insert into public.identity-test("always_generated", "default_generated", "name") + values ({always_generated}::int4, {default_generated}::int4, {name}) + on conflict ("name") + do update set + "always_generated" = EXCLUDED."always_generated", + "default_generated" = EXCLUDED."default_generated" + returning "always_generated", "default_generated", "name" + """, + toNamedParameter(head), + rest.map(toNamedParameter)* + ) + ).executeReturning(IdentityTestRow.rowParser(1).*) + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[IdentityTestRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + SQL"create temporary table identity-test_TEMP (like public.identity-test) on commit drop".execute(): @nowarn + streamingInsert(s"""copy identity-test_TEMP("always_generated", "default_generated", "name") from stdin""", batchSize, unsaved)(IdentityTestRow.text, c): @nowarn + SQL"""insert into public.identity-test("always_generated", "default_generated", "name") + select * from identity-test_TEMP + on conflict ("name") + do update set + "always_generated" = EXCLUDED."always_generated", + "default_generated" = EXCLUDED."default_generated" + ; + drop table identity-test_TEMP;""".executeUpdate() + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/public/identity_test/IdentityTestRepoMock.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/public/identity_test/IdentityTestRepoMock.scala index b52d7335a..f360aff44 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/public/identity_test/IdentityTestRepoMock.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/public/identity_test/IdentityTestRepoMock.scala @@ -87,4 +87,17 @@ class IdentityTestRepoMock(toRow: Function1[IdentityTestRowUnsaved, IdentityTest map.put(unsaved.name, unsaved): @nowarn unsaved } + override def upsertBatch(unsaved: Iterable[IdentityTestRow])(implicit c: Connection): List[IdentityTestRow] = { + unsaved.map { row => + map += (row.name -> row) + row + }.toList + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[IdentityTestRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + unsaved.foreach { row => + map += (row.name -> row) + } + unsaved.size + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/public/users/UsersRepo.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/public/users/UsersRepo.scala index b91d7dbd7..9f7963d1a 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/public/users/UsersRepo.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/public/users/UsersRepo.scala @@ -31,4 +31,7 @@ trait UsersRepo { def update: UpdateBuilder[UsersFields, UsersRow] def update(row: UsersRow)(implicit c: Connection): Boolean def upsert(unsaved: UsersRow)(implicit c: Connection): UsersRow + def upsertBatch(unsaved: Iterable[UsersRow])(implicit c: Connection): List[UsersRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Iterator[UsersRow], batchSize: Int = 10000)(implicit c: Connection): Int } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/public/users/UsersRepoImpl.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/public/users/UsersRepoImpl.scala index 1734741fd..2339840ea 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/public/users/UsersRepoImpl.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/public/users/UsersRepoImpl.scala @@ -10,6 +10,7 @@ package users import adventureworks.customtypes.Defaulted import adventureworks.customtypes.TypoInstant import adventureworks.customtypes.TypoUnknownCitext +import anorm.BatchSql import anorm.NamedParameter import anorm.ParameterMetaData import anorm.ParameterValue @@ -19,6 +20,7 @@ import anorm.SimpleSql import anorm.SqlStringInterpolation import anorm.ToStatement import java.sql.Connection +import scala.annotation.nowarn import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder import typo.dsl.SelectBuilderSql @@ -153,4 +155,54 @@ class UsersRepoImpl extends UsersRepo { .executeInsert(UsersRow.rowParser(1).single) } + override def upsertBatch(unsaved: Iterable[UsersRow])(implicit c: Connection): List[UsersRow] = { + def toNamedParameter(row: UsersRow): List[NamedParameter] = List( + NamedParameter("user_id", ParameterValue(row.userId, null, UsersId.toStatement)), + NamedParameter("name", ParameterValue(row.name, null, ToStatement.stringToStatement)), + NamedParameter("last_name", ParameterValue(row.lastName, null, ToStatement.optionToStatement(ToStatement.stringToStatement, ParameterMetaData.StringParameterMetaData))), + NamedParameter("email", ParameterValue(row.email, null, TypoUnknownCitext.toStatement)), + NamedParameter("password", ParameterValue(row.password, null, ToStatement.stringToStatement)), + NamedParameter("created_at", ParameterValue(row.createdAt, null, TypoInstant.toStatement)), + NamedParameter("verified_on", ParameterValue(row.verifiedOn, null, ToStatement.optionToStatement(TypoInstant.toStatement, TypoInstant.parameterMetadata))) + ) + unsaved.toList match { + case Nil => Nil + case head :: rest => + new anorm.adventureworks.ExecuteReturningSyntax.Ops( + BatchSql( + s"""insert into public.users("user_id", "name", "last_name", "email", "password", "created_at", "verified_on") + values ({user_id}::uuid, {name}, {last_name}, {email}::citext, {password}, {created_at}::timestamptz, {verified_on}::timestamptz) + on conflict ("user_id") + do update set + "name" = EXCLUDED."name", + "last_name" = EXCLUDED."last_name", + "email" = EXCLUDED."email", + "password" = EXCLUDED."password", + "created_at" = EXCLUDED."created_at", + "verified_on" = EXCLUDED."verified_on" + returning "user_id", "name", "last_name", "email"::text, "password", "created_at"::text, "verified_on"::text + """, + toNamedParameter(head), + rest.map(toNamedParameter)* + ) + ).executeReturning(UsersRow.rowParser(1).*) + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[UsersRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + SQL"create temporary table users_TEMP (like public.users) on commit drop".execute(): @nowarn + streamingInsert(s"""copy users_TEMP("user_id", "name", "last_name", "email", "password", "created_at", "verified_on") from stdin""", batchSize, unsaved)(UsersRow.text, c): @nowarn + SQL"""insert into public.users("user_id", "name", "last_name", "email", "password", "created_at", "verified_on") + select * from users_TEMP + on conflict ("user_id") + do update set + "name" = EXCLUDED."name", + "last_name" = EXCLUDED."last_name", + "email" = EXCLUDED."email", + "password" = EXCLUDED."password", + "created_at" = EXCLUDED."created_at", + "verified_on" = EXCLUDED."verified_on" + ; + drop table users_TEMP;""".executeUpdate() + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/public/users/UsersRepoMock.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/public/users/UsersRepoMock.scala index 5c36bc2de..24abf7a4a 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/public/users/UsersRepoMock.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/public/users/UsersRepoMock.scala @@ -91,4 +91,17 @@ class UsersRepoMock(toRow: Function1[UsersRowUnsaved, UsersRow], map.put(unsaved.userId, unsaved): @nowarn unsaved } + override def upsertBatch(unsaved: Iterable[UsersRow])(implicit c: Connection): List[UsersRow] = { + unsaved.map { row => + map += (row.userId -> row) + row + }.toList + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[UsersRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + unsaved.foreach { row => + map += (row.userId -> row) + } + unsaved.size + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/purchasing/productvendor/ProductvendorRepo.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/purchasing/productvendor/ProductvendorRepo.scala index 6be936523..f485e415e 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/purchasing/productvendor/ProductvendorRepo.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/purchasing/productvendor/ProductvendorRepo.scala @@ -29,4 +29,7 @@ trait ProductvendorRepo { def update: UpdateBuilder[ProductvendorFields, ProductvendorRow] def update(row: ProductvendorRow)(implicit c: Connection): Boolean def upsert(unsaved: ProductvendorRow)(implicit c: Connection): ProductvendorRow + def upsertBatch(unsaved: Iterable[ProductvendorRow])(implicit c: Connection): List[ProductvendorRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Iterator[ProductvendorRow], batchSize: Int = 10000)(implicit c: Connection): Int } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/purchasing/productvendor/ProductvendorRepoImpl.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/purchasing/productvendor/ProductvendorRepoImpl.scala index 0be92ff9e..c5588cb26 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/purchasing/productvendor/ProductvendorRepoImpl.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/purchasing/productvendor/ProductvendorRepoImpl.scala @@ -12,6 +12,7 @@ import adventureworks.customtypes.TypoLocalDateTime import adventureworks.person.businessentity.BusinessentityId import adventureworks.production.product.ProductId import adventureworks.production.unitmeasure.UnitmeasureId +import anorm.BatchSql import anorm.NamedParameter import anorm.ParameterMetaData import anorm.ParameterValue @@ -21,6 +22,7 @@ import anorm.SimpleSql import anorm.SqlStringInterpolation import anorm.ToStatement import java.sql.Connection +import scala.annotation.nowarn import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder import typo.dsl.SelectBuilderSql @@ -168,4 +170,64 @@ class ProductvendorRepoImpl extends ProductvendorRepo { .executeInsert(ProductvendorRow.rowParser(1).single) } + override def upsertBatch(unsaved: Iterable[ProductvendorRow])(implicit c: Connection): List[ProductvendorRow] = { + def toNamedParameter(row: ProductvendorRow): List[NamedParameter] = List( + NamedParameter("productid", ParameterValue(row.productid, null, ProductId.toStatement)), + NamedParameter("businessentityid", ParameterValue(row.businessentityid, null, BusinessentityId.toStatement)), + NamedParameter("averageleadtime", ParameterValue(row.averageleadtime, null, ToStatement.intToStatement)), + NamedParameter("standardprice", ParameterValue(row.standardprice, null, ToStatement.scalaBigDecimalToStatement)), + NamedParameter("lastreceiptcost", ParameterValue(row.lastreceiptcost, null, ToStatement.optionToStatement(ToStatement.scalaBigDecimalToStatement, ParameterMetaData.BigDecimalParameterMetaData))), + NamedParameter("lastreceiptdate", ParameterValue(row.lastreceiptdate, null, ToStatement.optionToStatement(TypoLocalDateTime.toStatement, TypoLocalDateTime.parameterMetadata))), + NamedParameter("minorderqty", ParameterValue(row.minorderqty, null, ToStatement.intToStatement)), + NamedParameter("maxorderqty", ParameterValue(row.maxorderqty, null, ToStatement.intToStatement)), + NamedParameter("onorderqty", ParameterValue(row.onorderqty, null, ToStatement.optionToStatement(ToStatement.intToStatement, ParameterMetaData.IntParameterMetaData))), + NamedParameter("unitmeasurecode", ParameterValue(row.unitmeasurecode, null, UnitmeasureId.toStatement)), + NamedParameter("modifieddate", ParameterValue(row.modifieddate, null, TypoLocalDateTime.toStatement)) + ) + unsaved.toList match { + case Nil => Nil + case head :: rest => + new anorm.adventureworks.ExecuteReturningSyntax.Ops( + BatchSql( + s"""insert into purchasing.productvendor("productid", "businessentityid", "averageleadtime", "standardprice", "lastreceiptcost", "lastreceiptdate", "minorderqty", "maxorderqty", "onorderqty", "unitmeasurecode", "modifieddate") + values ({productid}::int4, {businessentityid}::int4, {averageleadtime}::int4, {standardprice}::numeric, {lastreceiptcost}::numeric, {lastreceiptdate}::timestamp, {minorderqty}::int4, {maxorderqty}::int4, {onorderqty}::int4, {unitmeasurecode}::bpchar, {modifieddate}::timestamp) + on conflict ("productid", "businessentityid") + do update set + "averageleadtime" = EXCLUDED."averageleadtime", + "standardprice" = EXCLUDED."standardprice", + "lastreceiptcost" = EXCLUDED."lastreceiptcost", + "lastreceiptdate" = EXCLUDED."lastreceiptdate", + "minorderqty" = EXCLUDED."minorderqty", + "maxorderqty" = EXCLUDED."maxorderqty", + "onorderqty" = EXCLUDED."onorderqty", + "unitmeasurecode" = EXCLUDED."unitmeasurecode", + "modifieddate" = EXCLUDED."modifieddate" + returning "productid", "businessentityid", "averageleadtime", "standardprice", "lastreceiptcost", "lastreceiptdate"::text, "minorderqty", "maxorderqty", "onorderqty", "unitmeasurecode", "modifieddate"::text + """, + toNamedParameter(head), + rest.map(toNamedParameter)* + ) + ).executeReturning(ProductvendorRow.rowParser(1).*) + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[ProductvendorRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + SQL"create temporary table productvendor_TEMP (like purchasing.productvendor) on commit drop".execute(): @nowarn + streamingInsert(s"""copy productvendor_TEMP("productid", "businessentityid", "averageleadtime", "standardprice", "lastreceiptcost", "lastreceiptdate", "minorderqty", "maxorderqty", "onorderqty", "unitmeasurecode", "modifieddate") from stdin""", batchSize, unsaved)(ProductvendorRow.text, c): @nowarn + SQL"""insert into purchasing.productvendor("productid", "businessentityid", "averageleadtime", "standardprice", "lastreceiptcost", "lastreceiptdate", "minorderqty", "maxorderqty", "onorderqty", "unitmeasurecode", "modifieddate") + select * from productvendor_TEMP + on conflict ("productid", "businessentityid") + do update set + "averageleadtime" = EXCLUDED."averageleadtime", + "standardprice" = EXCLUDED."standardprice", + "lastreceiptcost" = EXCLUDED."lastreceiptcost", + "lastreceiptdate" = EXCLUDED."lastreceiptdate", + "minorderqty" = EXCLUDED."minorderqty", + "maxorderqty" = EXCLUDED."maxorderqty", + "onorderqty" = EXCLUDED."onorderqty", + "unitmeasurecode" = EXCLUDED."unitmeasurecode", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table productvendor_TEMP;""".executeUpdate() + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/purchasing/productvendor/ProductvendorRepoMock.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/purchasing/productvendor/ProductvendorRepoMock.scala index c478d4ffd..5d51e3fe3 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/purchasing/productvendor/ProductvendorRepoMock.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/purchasing/productvendor/ProductvendorRepoMock.scala @@ -87,4 +87,17 @@ class ProductvendorRepoMock(toRow: Function1[ProductvendorRowUnsaved, Productven map.put(unsaved.compositeId, unsaved): @nowarn unsaved } + override def upsertBatch(unsaved: Iterable[ProductvendorRow])(implicit c: Connection): List[ProductvendorRow] = { + unsaved.map { row => + map += (row.compositeId -> row) + row + }.toList + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[ProductvendorRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + unsaved.foreach { row => + map += (row.compositeId -> row) + } + unsaved.size + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/purchasing/purchaseorderheader/PurchaseorderheaderRepo.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/purchasing/purchaseorderheader/PurchaseorderheaderRepo.scala index 6eff633cc..35c18c78c 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/purchasing/purchaseorderheader/PurchaseorderheaderRepo.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/purchasing/purchaseorderheader/PurchaseorderheaderRepo.scala @@ -29,4 +29,7 @@ trait PurchaseorderheaderRepo { def update: UpdateBuilder[PurchaseorderheaderFields, PurchaseorderheaderRow] def update(row: PurchaseorderheaderRow)(implicit c: Connection): Boolean def upsert(unsaved: PurchaseorderheaderRow)(implicit c: Connection): PurchaseorderheaderRow + def upsertBatch(unsaved: Iterable[PurchaseorderheaderRow])(implicit c: Connection): List[PurchaseorderheaderRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Iterator[PurchaseorderheaderRow], batchSize: Int = 10000)(implicit c: Connection): Int } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/purchasing/purchaseorderheader/PurchaseorderheaderRepoImpl.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/purchasing/purchaseorderheader/PurchaseorderheaderRepoImpl.scala index 9b71be81c..53c084b13 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/purchasing/purchaseorderheader/PurchaseorderheaderRepoImpl.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/purchasing/purchaseorderheader/PurchaseorderheaderRepoImpl.scala @@ -12,6 +12,7 @@ import adventureworks.customtypes.TypoLocalDateTime import adventureworks.customtypes.TypoShort import adventureworks.person.businessentity.BusinessentityId import adventureworks.purchasing.shipmethod.ShipmethodId +import anorm.BatchSql import anorm.NamedParameter import anorm.ParameterValue import anorm.RowParser @@ -20,6 +21,7 @@ import anorm.SimpleSql import anorm.SqlStringInterpolation import anorm.ToStatement import java.sql.Connection +import scala.annotation.nowarn import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder import typo.dsl.SelectBuilderSql @@ -188,4 +190,69 @@ class PurchaseorderheaderRepoImpl extends PurchaseorderheaderRepo { .executeInsert(PurchaseorderheaderRow.rowParser(1).single) } + override def upsertBatch(unsaved: Iterable[PurchaseorderheaderRow])(implicit c: Connection): List[PurchaseorderheaderRow] = { + def toNamedParameter(row: PurchaseorderheaderRow): List[NamedParameter] = List( + NamedParameter("purchaseorderid", ParameterValue(row.purchaseorderid, null, PurchaseorderheaderId.toStatement)), + NamedParameter("revisionnumber", ParameterValue(row.revisionnumber, null, TypoShort.toStatement)), + NamedParameter("status", ParameterValue(row.status, null, TypoShort.toStatement)), + NamedParameter("employeeid", ParameterValue(row.employeeid, null, BusinessentityId.toStatement)), + NamedParameter("vendorid", ParameterValue(row.vendorid, null, BusinessentityId.toStatement)), + NamedParameter("shipmethodid", ParameterValue(row.shipmethodid, null, ShipmethodId.toStatement)), + NamedParameter("orderdate", ParameterValue(row.orderdate, null, TypoLocalDateTime.toStatement)), + NamedParameter("shipdate", ParameterValue(row.shipdate, null, ToStatement.optionToStatement(TypoLocalDateTime.toStatement, TypoLocalDateTime.parameterMetadata))), + NamedParameter("subtotal", ParameterValue(row.subtotal, null, ToStatement.scalaBigDecimalToStatement)), + NamedParameter("taxamt", ParameterValue(row.taxamt, null, ToStatement.scalaBigDecimalToStatement)), + NamedParameter("freight", ParameterValue(row.freight, null, ToStatement.scalaBigDecimalToStatement)), + NamedParameter("modifieddate", ParameterValue(row.modifieddate, null, TypoLocalDateTime.toStatement)) + ) + unsaved.toList match { + case Nil => Nil + case head :: rest => + new anorm.adventureworks.ExecuteReturningSyntax.Ops( + BatchSql( + s"""insert into purchasing.purchaseorderheader("purchaseorderid", "revisionnumber", "status", "employeeid", "vendorid", "shipmethodid", "orderdate", "shipdate", "subtotal", "taxamt", "freight", "modifieddate") + values ({purchaseorderid}::int4, {revisionnumber}::int2, {status}::int2, {employeeid}::int4, {vendorid}::int4, {shipmethodid}::int4, {orderdate}::timestamp, {shipdate}::timestamp, {subtotal}::numeric, {taxamt}::numeric, {freight}::numeric, {modifieddate}::timestamp) + on conflict ("purchaseorderid") + do update set + "revisionnumber" = EXCLUDED."revisionnumber", + "status" = EXCLUDED."status", + "employeeid" = EXCLUDED."employeeid", + "vendorid" = EXCLUDED."vendorid", + "shipmethodid" = EXCLUDED."shipmethodid", + "orderdate" = EXCLUDED."orderdate", + "shipdate" = EXCLUDED."shipdate", + "subtotal" = EXCLUDED."subtotal", + "taxamt" = EXCLUDED."taxamt", + "freight" = EXCLUDED."freight", + "modifieddate" = EXCLUDED."modifieddate" + returning "purchaseorderid", "revisionnumber", "status", "employeeid", "vendorid", "shipmethodid", "orderdate"::text, "shipdate"::text, "subtotal", "taxamt", "freight", "modifieddate"::text + """, + toNamedParameter(head), + rest.map(toNamedParameter)* + ) + ).executeReturning(PurchaseorderheaderRow.rowParser(1).*) + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[PurchaseorderheaderRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + SQL"create temporary table purchaseorderheader_TEMP (like purchasing.purchaseorderheader) on commit drop".execute(): @nowarn + streamingInsert(s"""copy purchaseorderheader_TEMP("purchaseorderid", "revisionnumber", "status", "employeeid", "vendorid", "shipmethodid", "orderdate", "shipdate", "subtotal", "taxamt", "freight", "modifieddate") from stdin""", batchSize, unsaved)(PurchaseorderheaderRow.text, c): @nowarn + SQL"""insert into purchasing.purchaseorderheader("purchaseorderid", "revisionnumber", "status", "employeeid", "vendorid", "shipmethodid", "orderdate", "shipdate", "subtotal", "taxamt", "freight", "modifieddate") + select * from purchaseorderheader_TEMP + on conflict ("purchaseorderid") + do update set + "revisionnumber" = EXCLUDED."revisionnumber", + "status" = EXCLUDED."status", + "employeeid" = EXCLUDED."employeeid", + "vendorid" = EXCLUDED."vendorid", + "shipmethodid" = EXCLUDED."shipmethodid", + "orderdate" = EXCLUDED."orderdate", + "shipdate" = EXCLUDED."shipdate", + "subtotal" = EXCLUDED."subtotal", + "taxamt" = EXCLUDED."taxamt", + "freight" = EXCLUDED."freight", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table purchaseorderheader_TEMP;""".executeUpdate() + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/purchasing/purchaseorderheader/PurchaseorderheaderRepoMock.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/purchasing/purchaseorderheader/PurchaseorderheaderRepoMock.scala index b87ed685b..f94480732 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/purchasing/purchaseorderheader/PurchaseorderheaderRepoMock.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/purchasing/purchaseorderheader/PurchaseorderheaderRepoMock.scala @@ -87,4 +87,17 @@ class PurchaseorderheaderRepoMock(toRow: Function1[PurchaseorderheaderRowUnsaved map.put(unsaved.purchaseorderid, unsaved): @nowarn unsaved } + override def upsertBatch(unsaved: Iterable[PurchaseorderheaderRow])(implicit c: Connection): List[PurchaseorderheaderRow] = { + unsaved.map { row => + map += (row.purchaseorderid -> row) + row + }.toList + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[PurchaseorderheaderRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + unsaved.foreach { row => + map += (row.purchaseorderid -> row) + } + unsaved.size + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/purchasing/shipmethod/ShipmethodRepo.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/purchasing/shipmethod/ShipmethodRepo.scala index 2bcc28e36..a0a41b3e8 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/purchasing/shipmethod/ShipmethodRepo.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/purchasing/shipmethod/ShipmethodRepo.scala @@ -29,4 +29,7 @@ trait ShipmethodRepo { def update: UpdateBuilder[ShipmethodFields, ShipmethodRow] def update(row: ShipmethodRow)(implicit c: Connection): Boolean def upsert(unsaved: ShipmethodRow)(implicit c: Connection): ShipmethodRow + def upsertBatch(unsaved: Iterable[ShipmethodRow])(implicit c: Connection): List[ShipmethodRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Iterator[ShipmethodRow], batchSize: Int = 10000)(implicit c: Connection): Int } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/purchasing/shipmethod/ShipmethodRepoImpl.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/purchasing/shipmethod/ShipmethodRepoImpl.scala index 6b41ceec7..baf2f33b8 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/purchasing/shipmethod/ShipmethodRepoImpl.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/purchasing/shipmethod/ShipmethodRepoImpl.scala @@ -11,6 +11,7 @@ import adventureworks.customtypes.Defaulted import adventureworks.customtypes.TypoLocalDateTime import adventureworks.customtypes.TypoUUID import adventureworks.public.Name +import anorm.BatchSql import anorm.NamedParameter import anorm.ParameterValue import anorm.RowParser @@ -19,6 +20,7 @@ import anorm.SimpleSql import anorm.SqlStringInterpolation import anorm.ToStatement import java.sql.Connection +import scala.annotation.nowarn import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder import typo.dsl.SelectBuilderSql @@ -154,4 +156,51 @@ class ShipmethodRepoImpl extends ShipmethodRepo { .executeInsert(ShipmethodRow.rowParser(1).single) } + override def upsertBatch(unsaved: Iterable[ShipmethodRow])(implicit c: Connection): List[ShipmethodRow] = { + def toNamedParameter(row: ShipmethodRow): List[NamedParameter] = List( + NamedParameter("shipmethodid", ParameterValue(row.shipmethodid, null, ShipmethodId.toStatement)), + NamedParameter("name", ParameterValue(row.name, null, Name.toStatement)), + NamedParameter("shipbase", ParameterValue(row.shipbase, null, ToStatement.scalaBigDecimalToStatement)), + NamedParameter("shiprate", ParameterValue(row.shiprate, null, ToStatement.scalaBigDecimalToStatement)), + NamedParameter("rowguid", ParameterValue(row.rowguid, null, TypoUUID.toStatement)), + NamedParameter("modifieddate", ParameterValue(row.modifieddate, null, TypoLocalDateTime.toStatement)) + ) + unsaved.toList match { + case Nil => Nil + case head :: rest => + new anorm.adventureworks.ExecuteReturningSyntax.Ops( + BatchSql( + s"""insert into purchasing.shipmethod("shipmethodid", "name", "shipbase", "shiprate", "rowguid", "modifieddate") + values ({shipmethodid}::int4, {name}::varchar, {shipbase}::numeric, {shiprate}::numeric, {rowguid}::uuid, {modifieddate}::timestamp) + on conflict ("shipmethodid") + do update set + "name" = EXCLUDED."name", + "shipbase" = EXCLUDED."shipbase", + "shiprate" = EXCLUDED."shiprate", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + returning "shipmethodid", "name", "shipbase", "shiprate", "rowguid", "modifieddate"::text + """, + toNamedParameter(head), + rest.map(toNamedParameter)* + ) + ).executeReturning(ShipmethodRow.rowParser(1).*) + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[ShipmethodRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + SQL"create temporary table shipmethod_TEMP (like purchasing.shipmethod) on commit drop".execute(): @nowarn + streamingInsert(s"""copy shipmethod_TEMP("shipmethodid", "name", "shipbase", "shiprate", "rowguid", "modifieddate") from stdin""", batchSize, unsaved)(ShipmethodRow.text, c): @nowarn + SQL"""insert into purchasing.shipmethod("shipmethodid", "name", "shipbase", "shiprate", "rowguid", "modifieddate") + select * from shipmethod_TEMP + on conflict ("shipmethodid") + do update set + "name" = EXCLUDED."name", + "shipbase" = EXCLUDED."shipbase", + "shiprate" = EXCLUDED."shiprate", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table shipmethod_TEMP;""".executeUpdate() + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/purchasing/shipmethod/ShipmethodRepoMock.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/purchasing/shipmethod/ShipmethodRepoMock.scala index e20971743..0786b7a24 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/purchasing/shipmethod/ShipmethodRepoMock.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/purchasing/shipmethod/ShipmethodRepoMock.scala @@ -87,4 +87,17 @@ class ShipmethodRepoMock(toRow: Function1[ShipmethodRowUnsaved, ShipmethodRow], map.put(unsaved.shipmethodid, unsaved): @nowarn unsaved } + override def upsertBatch(unsaved: Iterable[ShipmethodRow])(implicit c: Connection): List[ShipmethodRow] = { + unsaved.map { row => + map += (row.shipmethodid -> row) + row + }.toList + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[ShipmethodRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + unsaved.foreach { row => + map += (row.shipmethodid -> row) + } + unsaved.size + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/purchasing/vendor/VendorRepo.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/purchasing/vendor/VendorRepo.scala index 1c6e1739c..539bc68ba 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/purchasing/vendor/VendorRepo.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/purchasing/vendor/VendorRepo.scala @@ -30,4 +30,7 @@ trait VendorRepo { def update: UpdateBuilder[VendorFields, VendorRow] def update(row: VendorRow)(implicit c: Connection): Boolean def upsert(unsaved: VendorRow)(implicit c: Connection): VendorRow + def upsertBatch(unsaved: Iterable[VendorRow])(implicit c: Connection): List[VendorRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Iterator[VendorRow], batchSize: Int = 10000)(implicit c: Connection): Int } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/purchasing/vendor/VendorRepoImpl.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/purchasing/vendor/VendorRepoImpl.scala index 7808f0197..9e5a53d5c 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/purchasing/vendor/VendorRepoImpl.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/purchasing/vendor/VendorRepoImpl.scala @@ -14,6 +14,7 @@ import adventureworks.person.businessentity.BusinessentityId import adventureworks.public.AccountNumber import adventureworks.public.Flag import adventureworks.public.Name +import anorm.BatchSql import anorm.NamedParameter import anorm.ParameterMetaData import anorm.ParameterValue @@ -23,6 +24,7 @@ import anorm.SimpleSql import anorm.SqlStringInterpolation import anorm.ToStatement import java.sql.Connection +import scala.annotation.nowarn import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder import typo.dsl.SelectBuilderSql @@ -160,4 +162,57 @@ class VendorRepoImpl extends VendorRepo { .executeInsert(VendorRow.rowParser(1).single) } + override def upsertBatch(unsaved: Iterable[VendorRow])(implicit c: Connection): List[VendorRow] = { + def toNamedParameter(row: VendorRow): List[NamedParameter] = List( + NamedParameter("businessentityid", ParameterValue(row.businessentityid, null, BusinessentityId.toStatement)), + NamedParameter("accountnumber", ParameterValue(row.accountnumber, null, AccountNumber.toStatement)), + NamedParameter("name", ParameterValue(row.name, null, Name.toStatement)), + NamedParameter("creditrating", ParameterValue(row.creditrating, null, TypoShort.toStatement)), + NamedParameter("preferredvendorstatus", ParameterValue(row.preferredvendorstatus, null, Flag.toStatement)), + NamedParameter("activeflag", ParameterValue(row.activeflag, null, Flag.toStatement)), + NamedParameter("purchasingwebserviceurl", ParameterValue(row.purchasingwebserviceurl, null, ToStatement.optionToStatement(ToStatement.stringToStatement, ParameterMetaData.StringParameterMetaData))), + NamedParameter("modifieddate", ParameterValue(row.modifieddate, null, TypoLocalDateTime.toStatement)) + ) + unsaved.toList match { + case Nil => Nil + case head :: rest => + new anorm.adventureworks.ExecuteReturningSyntax.Ops( + BatchSql( + s"""insert into purchasing.vendor("businessentityid", "accountnumber", "name", "creditrating", "preferredvendorstatus", "activeflag", "purchasingwebserviceurl", "modifieddate") + values ({businessentityid}::int4, {accountnumber}::varchar, {name}::varchar, {creditrating}::int2, {preferredvendorstatus}::bool, {activeflag}::bool, {purchasingwebserviceurl}, {modifieddate}::timestamp) + on conflict ("businessentityid") + do update set + "accountnumber" = EXCLUDED."accountnumber", + "name" = EXCLUDED."name", + "creditrating" = EXCLUDED."creditrating", + "preferredvendorstatus" = EXCLUDED."preferredvendorstatus", + "activeflag" = EXCLUDED."activeflag", + "purchasingwebserviceurl" = EXCLUDED."purchasingwebserviceurl", + "modifieddate" = EXCLUDED."modifieddate" + returning "businessentityid", "accountnumber", "name", "creditrating", "preferredvendorstatus", "activeflag", "purchasingwebserviceurl", "modifieddate"::text + """, + toNamedParameter(head), + rest.map(toNamedParameter)* + ) + ).executeReturning(VendorRow.rowParser(1).*) + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[VendorRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + SQL"create temporary table vendor_TEMP (like purchasing.vendor) on commit drop".execute(): @nowarn + streamingInsert(s"""copy vendor_TEMP("businessentityid", "accountnumber", "name", "creditrating", "preferredvendorstatus", "activeflag", "purchasingwebserviceurl", "modifieddate") from stdin""", batchSize, unsaved)(VendorRow.text, c): @nowarn + SQL"""insert into purchasing.vendor("businessentityid", "accountnumber", "name", "creditrating", "preferredvendorstatus", "activeflag", "purchasingwebserviceurl", "modifieddate") + select * from vendor_TEMP + on conflict ("businessentityid") + do update set + "accountnumber" = EXCLUDED."accountnumber", + "name" = EXCLUDED."name", + "creditrating" = EXCLUDED."creditrating", + "preferredvendorstatus" = EXCLUDED."preferredvendorstatus", + "activeflag" = EXCLUDED."activeflag", + "purchasingwebserviceurl" = EXCLUDED."purchasingwebserviceurl", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table vendor_TEMP;""".executeUpdate() + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/purchasing/vendor/VendorRepoMock.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/purchasing/vendor/VendorRepoMock.scala index 75443f4d8..efc42e397 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/purchasing/vendor/VendorRepoMock.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/purchasing/vendor/VendorRepoMock.scala @@ -88,4 +88,17 @@ class VendorRepoMock(toRow: Function1[VendorRowUnsaved, VendorRow], map.put(unsaved.businessentityid, unsaved): @nowarn unsaved } + override def upsertBatch(unsaved: Iterable[VendorRow])(implicit c: Connection): List[VendorRow] = { + unsaved.map { row => + map += (row.businessentityid -> row) + row + }.toList + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[VendorRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + unsaved.foreach { row => + map += (row.businessentityid -> row) + } + unsaved.size + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/countryregioncurrency/CountryregioncurrencyRepo.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/countryregioncurrency/CountryregioncurrencyRepo.scala index b688c3055..3c7005e55 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/countryregioncurrency/CountryregioncurrencyRepo.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/countryregioncurrency/CountryregioncurrencyRepo.scala @@ -29,4 +29,7 @@ trait CountryregioncurrencyRepo { def update: UpdateBuilder[CountryregioncurrencyFields, CountryregioncurrencyRow] def update(row: CountryregioncurrencyRow)(implicit c: Connection): Boolean def upsert(unsaved: CountryregioncurrencyRow)(implicit c: Connection): CountryregioncurrencyRow + def upsertBatch(unsaved: Iterable[CountryregioncurrencyRow])(implicit c: Connection): List[CountryregioncurrencyRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Iterator[CountryregioncurrencyRow], batchSize: Int = 10000)(implicit c: Connection): Int } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/countryregioncurrency/CountryregioncurrencyRepoImpl.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/countryregioncurrency/CountryregioncurrencyRepoImpl.scala index 5d3284f75..87e8c0675 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/countryregioncurrency/CountryregioncurrencyRepoImpl.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/countryregioncurrency/CountryregioncurrencyRepoImpl.scala @@ -11,6 +11,7 @@ import adventureworks.customtypes.Defaulted import adventureworks.customtypes.TypoLocalDateTime import adventureworks.person.countryregion.CountryregionId import adventureworks.sales.currency.CurrencyId +import anorm.BatchSql import anorm.NamedParameter import anorm.ParameterValue import anorm.RowParser @@ -18,6 +19,7 @@ import anorm.SQL import anorm.SimpleSql import anorm.SqlStringInterpolation import java.sql.Connection +import scala.annotation.nowarn import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder import typo.dsl.SelectBuilderSql @@ -133,4 +135,40 @@ class CountryregioncurrencyRepoImpl extends CountryregioncurrencyRepo { .executeInsert(CountryregioncurrencyRow.rowParser(1).single) } + override def upsertBatch(unsaved: Iterable[CountryregioncurrencyRow])(implicit c: Connection): List[CountryregioncurrencyRow] = { + def toNamedParameter(row: CountryregioncurrencyRow): List[NamedParameter] = List( + NamedParameter("countryregioncode", ParameterValue(row.countryregioncode, null, CountryregionId.toStatement)), + NamedParameter("currencycode", ParameterValue(row.currencycode, null, CurrencyId.toStatement)), + NamedParameter("modifieddate", ParameterValue(row.modifieddate, null, TypoLocalDateTime.toStatement)) + ) + unsaved.toList match { + case Nil => Nil + case head :: rest => + new anorm.adventureworks.ExecuteReturningSyntax.Ops( + BatchSql( + s"""insert into sales.countryregioncurrency("countryregioncode", "currencycode", "modifieddate") + values ({countryregioncode}, {currencycode}::bpchar, {modifieddate}::timestamp) + on conflict ("countryregioncode", "currencycode") + do update set + "modifieddate" = EXCLUDED."modifieddate" + returning "countryregioncode", "currencycode", "modifieddate"::text + """, + toNamedParameter(head), + rest.map(toNamedParameter)* + ) + ).executeReturning(CountryregioncurrencyRow.rowParser(1).*) + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[CountryregioncurrencyRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + SQL"create temporary table countryregioncurrency_TEMP (like sales.countryregioncurrency) on commit drop".execute(): @nowarn + streamingInsert(s"""copy countryregioncurrency_TEMP("countryregioncode", "currencycode", "modifieddate") from stdin""", batchSize, unsaved)(CountryregioncurrencyRow.text, c): @nowarn + SQL"""insert into sales.countryregioncurrency("countryregioncode", "currencycode", "modifieddate") + select * from countryregioncurrency_TEMP + on conflict ("countryregioncode", "currencycode") + do update set + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table countryregioncurrency_TEMP;""".executeUpdate() + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/countryregioncurrency/CountryregioncurrencyRepoMock.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/countryregioncurrency/CountryregioncurrencyRepoMock.scala index 0c568d54e..534ddb8c1 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/countryregioncurrency/CountryregioncurrencyRepoMock.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/countryregioncurrency/CountryregioncurrencyRepoMock.scala @@ -87,4 +87,17 @@ class CountryregioncurrencyRepoMock(toRow: Function1[CountryregioncurrencyRowUns map.put(unsaved.compositeId, unsaved): @nowarn unsaved } + override def upsertBatch(unsaved: Iterable[CountryregioncurrencyRow])(implicit c: Connection): List[CountryregioncurrencyRow] = { + unsaved.map { row => + map += (row.compositeId -> row) + row + }.toList + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[CountryregioncurrencyRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + unsaved.foreach { row => + map += (row.compositeId -> row) + } + unsaved.size + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/creditcard/CreditcardRepo.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/creditcard/CreditcardRepo.scala index 51ba3e86f..926492190 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/creditcard/CreditcardRepo.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/creditcard/CreditcardRepo.scala @@ -31,4 +31,7 @@ trait CreditcardRepo { def update: UpdateBuilder[CreditcardFields, CreditcardRow] def update(row: CreditcardRow)(implicit c: Connection): Boolean def upsert(unsaved: CreditcardRow)(implicit c: Connection): CreditcardRow + def upsertBatch(unsaved: Iterable[CreditcardRow])(implicit c: Connection): List[CreditcardRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Iterator[CreditcardRow], batchSize: Int = 10000)(implicit c: Connection): Int } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/creditcard/CreditcardRepoImpl.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/creditcard/CreditcardRepoImpl.scala index 123da8791..279a95d23 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/creditcard/CreditcardRepoImpl.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/creditcard/CreditcardRepoImpl.scala @@ -11,6 +11,7 @@ import adventureworks.customtypes.Defaulted import adventureworks.customtypes.TypoLocalDateTime import adventureworks.customtypes.TypoShort import adventureworks.userdefined.CustomCreditcardId +import anorm.BatchSql import anorm.NamedParameter import anorm.ParameterValue import anorm.RowParser @@ -19,6 +20,7 @@ import anorm.SimpleSql import anorm.SqlStringInterpolation import anorm.ToStatement import java.sql.Connection +import scala.annotation.nowarn import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder import typo.dsl.SelectBuilderSql @@ -145,4 +147,51 @@ class CreditcardRepoImpl extends CreditcardRepo { .executeInsert(CreditcardRow.rowParser(1).single) } + override def upsertBatch(unsaved: Iterable[CreditcardRow])(implicit c: Connection): List[CreditcardRow] = { + def toNamedParameter(row: CreditcardRow): List[NamedParameter] = List( + NamedParameter("creditcardid", ParameterValue(row.creditcardid, null, /* user-picked */ CustomCreditcardId.toStatement)), + NamedParameter("cardtype", ParameterValue(row.cardtype, null, ToStatement.stringToStatement)), + NamedParameter("cardnumber", ParameterValue(row.cardnumber, null, ToStatement.stringToStatement)), + NamedParameter("expmonth", ParameterValue(row.expmonth, null, TypoShort.toStatement)), + NamedParameter("expyear", ParameterValue(row.expyear, null, TypoShort.toStatement)), + NamedParameter("modifieddate", ParameterValue(row.modifieddate, null, TypoLocalDateTime.toStatement)) + ) + unsaved.toList match { + case Nil => Nil + case head :: rest => + new anorm.adventureworks.ExecuteReturningSyntax.Ops( + BatchSql( + s"""insert into sales.creditcard("creditcardid", "cardtype", "cardnumber", "expmonth", "expyear", "modifieddate") + values ({creditcardid}::int4, {cardtype}, {cardnumber}, {expmonth}::int2, {expyear}::int2, {modifieddate}::timestamp) + on conflict ("creditcardid") + do update set + "cardtype" = EXCLUDED."cardtype", + "cardnumber" = EXCLUDED."cardnumber", + "expmonth" = EXCLUDED."expmonth", + "expyear" = EXCLUDED."expyear", + "modifieddate" = EXCLUDED."modifieddate" + returning "creditcardid", "cardtype", "cardnumber", "expmonth", "expyear", "modifieddate"::text + """, + toNamedParameter(head), + rest.map(toNamedParameter)* + ) + ).executeReturning(CreditcardRow.rowParser(1).*) + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[CreditcardRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + SQL"create temporary table creditcard_TEMP (like sales.creditcard) on commit drop".execute(): @nowarn + streamingInsert(s"""copy creditcard_TEMP("creditcardid", "cardtype", "cardnumber", "expmonth", "expyear", "modifieddate") from stdin""", batchSize, unsaved)(CreditcardRow.text, c): @nowarn + SQL"""insert into sales.creditcard("creditcardid", "cardtype", "cardnumber", "expmonth", "expyear", "modifieddate") + select * from creditcard_TEMP + on conflict ("creditcardid") + do update set + "cardtype" = EXCLUDED."cardtype", + "cardnumber" = EXCLUDED."cardnumber", + "expmonth" = EXCLUDED."expmonth", + "expyear" = EXCLUDED."expyear", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table creditcard_TEMP;""".executeUpdate() + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/creditcard/CreditcardRepoMock.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/creditcard/CreditcardRepoMock.scala index 5ecef2b20..0cfc1066d 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/creditcard/CreditcardRepoMock.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/creditcard/CreditcardRepoMock.scala @@ -89,4 +89,17 @@ class CreditcardRepoMock(toRow: Function1[CreditcardRowUnsaved, CreditcardRow], map.put(unsaved.creditcardid, unsaved): @nowarn unsaved } + override def upsertBatch(unsaved: Iterable[CreditcardRow])(implicit c: Connection): List[CreditcardRow] = { + unsaved.map { row => + map += (row.creditcardid -> row) + row + }.toList + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[CreditcardRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + unsaved.foreach { row => + map += (row.creditcardid -> row) + } + unsaved.size + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/currency/CurrencyRepo.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/currency/CurrencyRepo.scala index 5fe703c79..5b394ac6b 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/currency/CurrencyRepo.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/currency/CurrencyRepo.scala @@ -29,4 +29,7 @@ trait CurrencyRepo { def update: UpdateBuilder[CurrencyFields, CurrencyRow] def update(row: CurrencyRow)(implicit c: Connection): Boolean def upsert(unsaved: CurrencyRow)(implicit c: Connection): CurrencyRow + def upsertBatch(unsaved: Iterable[CurrencyRow])(implicit c: Connection): List[CurrencyRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Iterator[CurrencyRow], batchSize: Int = 10000)(implicit c: Connection): Int } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/currency/CurrencyRepoImpl.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/currency/CurrencyRepoImpl.scala index 7ba724bee..84193857e 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/currency/CurrencyRepoImpl.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/currency/CurrencyRepoImpl.scala @@ -10,6 +10,7 @@ package currency import adventureworks.customtypes.Defaulted import adventureworks.customtypes.TypoLocalDateTime import adventureworks.public.Name +import anorm.BatchSql import anorm.NamedParameter import anorm.ParameterValue import anorm.RowParser @@ -17,6 +18,7 @@ import anorm.SQL import anorm.SimpleSql import anorm.SqlStringInterpolation import java.sql.Connection +import scala.annotation.nowarn import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder import typo.dsl.SelectBuilderSql @@ -128,4 +130,42 @@ class CurrencyRepoImpl extends CurrencyRepo { .executeInsert(CurrencyRow.rowParser(1).single) } + override def upsertBatch(unsaved: Iterable[CurrencyRow])(implicit c: Connection): List[CurrencyRow] = { + def toNamedParameter(row: CurrencyRow): List[NamedParameter] = List( + NamedParameter("currencycode", ParameterValue(row.currencycode, null, CurrencyId.toStatement)), + NamedParameter("name", ParameterValue(row.name, null, Name.toStatement)), + NamedParameter("modifieddate", ParameterValue(row.modifieddate, null, TypoLocalDateTime.toStatement)) + ) + unsaved.toList match { + case Nil => Nil + case head :: rest => + new anorm.adventureworks.ExecuteReturningSyntax.Ops( + BatchSql( + s"""insert into sales.currency("currencycode", "name", "modifieddate") + values ({currencycode}::bpchar, {name}::varchar, {modifieddate}::timestamp) + on conflict ("currencycode") + do update set + "name" = EXCLUDED."name", + "modifieddate" = EXCLUDED."modifieddate" + returning "currencycode", "name", "modifieddate"::text + """, + toNamedParameter(head), + rest.map(toNamedParameter)* + ) + ).executeReturning(CurrencyRow.rowParser(1).*) + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[CurrencyRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + SQL"create temporary table currency_TEMP (like sales.currency) on commit drop".execute(): @nowarn + streamingInsert(s"""copy currency_TEMP("currencycode", "name", "modifieddate") from stdin""", batchSize, unsaved)(CurrencyRow.text, c): @nowarn + SQL"""insert into sales.currency("currencycode", "name", "modifieddate") + select * from currency_TEMP + on conflict ("currencycode") + do update set + "name" = EXCLUDED."name", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table currency_TEMP;""".executeUpdate() + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/currency/CurrencyRepoMock.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/currency/CurrencyRepoMock.scala index 5bf8164da..f486159d6 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/currency/CurrencyRepoMock.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/currency/CurrencyRepoMock.scala @@ -87,4 +87,17 @@ class CurrencyRepoMock(toRow: Function1[CurrencyRowUnsaved, CurrencyRow], map.put(unsaved.currencycode, unsaved): @nowarn unsaved } + override def upsertBatch(unsaved: Iterable[CurrencyRow])(implicit c: Connection): List[CurrencyRow] = { + unsaved.map { row => + map += (row.currencycode -> row) + row + }.toList + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[CurrencyRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + unsaved.foreach { row => + map += (row.currencycode -> row) + } + unsaved.size + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/currencyrate/CurrencyrateRepo.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/currencyrate/CurrencyrateRepo.scala index 51c409f6f..6c5512626 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/currencyrate/CurrencyrateRepo.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/currencyrate/CurrencyrateRepo.scala @@ -29,4 +29,7 @@ trait CurrencyrateRepo { def update: UpdateBuilder[CurrencyrateFields, CurrencyrateRow] def update(row: CurrencyrateRow)(implicit c: Connection): Boolean def upsert(unsaved: CurrencyrateRow)(implicit c: Connection): CurrencyrateRow + def upsertBatch(unsaved: Iterable[CurrencyrateRow])(implicit c: Connection): List[CurrencyrateRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Iterator[CurrencyrateRow], batchSize: Int = 10000)(implicit c: Connection): Int } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/currencyrate/CurrencyrateRepoImpl.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/currencyrate/CurrencyrateRepoImpl.scala index 71aab263f..4fd34febd 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/currencyrate/CurrencyrateRepoImpl.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/currencyrate/CurrencyrateRepoImpl.scala @@ -10,6 +10,7 @@ package currencyrate import adventureworks.customtypes.Defaulted import adventureworks.customtypes.TypoLocalDateTime import adventureworks.sales.currency.CurrencyId +import anorm.BatchSql import anorm.NamedParameter import anorm.ParameterValue import anorm.RowParser @@ -18,6 +19,7 @@ import anorm.SimpleSql import anorm.SqlStringInterpolation import anorm.ToStatement import java.sql.Connection +import scala.annotation.nowarn import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder import typo.dsl.SelectBuilderSql @@ -148,4 +150,54 @@ class CurrencyrateRepoImpl extends CurrencyrateRepo { .executeInsert(CurrencyrateRow.rowParser(1).single) } + override def upsertBatch(unsaved: Iterable[CurrencyrateRow])(implicit c: Connection): List[CurrencyrateRow] = { + def toNamedParameter(row: CurrencyrateRow): List[NamedParameter] = List( + NamedParameter("currencyrateid", ParameterValue(row.currencyrateid, null, CurrencyrateId.toStatement)), + NamedParameter("currencyratedate", ParameterValue(row.currencyratedate, null, TypoLocalDateTime.toStatement)), + NamedParameter("fromcurrencycode", ParameterValue(row.fromcurrencycode, null, CurrencyId.toStatement)), + NamedParameter("tocurrencycode", ParameterValue(row.tocurrencycode, null, CurrencyId.toStatement)), + NamedParameter("averagerate", ParameterValue(row.averagerate, null, ToStatement.scalaBigDecimalToStatement)), + NamedParameter("endofdayrate", ParameterValue(row.endofdayrate, null, ToStatement.scalaBigDecimalToStatement)), + NamedParameter("modifieddate", ParameterValue(row.modifieddate, null, TypoLocalDateTime.toStatement)) + ) + unsaved.toList match { + case Nil => Nil + case head :: rest => + new anorm.adventureworks.ExecuteReturningSyntax.Ops( + BatchSql( + s"""insert into sales.currencyrate("currencyrateid", "currencyratedate", "fromcurrencycode", "tocurrencycode", "averagerate", "endofdayrate", "modifieddate") + values ({currencyrateid}::int4, {currencyratedate}::timestamp, {fromcurrencycode}::bpchar, {tocurrencycode}::bpchar, {averagerate}::numeric, {endofdayrate}::numeric, {modifieddate}::timestamp) + on conflict ("currencyrateid") + do update set + "currencyratedate" = EXCLUDED."currencyratedate", + "fromcurrencycode" = EXCLUDED."fromcurrencycode", + "tocurrencycode" = EXCLUDED."tocurrencycode", + "averagerate" = EXCLUDED."averagerate", + "endofdayrate" = EXCLUDED."endofdayrate", + "modifieddate" = EXCLUDED."modifieddate" + returning "currencyrateid", "currencyratedate"::text, "fromcurrencycode", "tocurrencycode", "averagerate", "endofdayrate", "modifieddate"::text + """, + toNamedParameter(head), + rest.map(toNamedParameter)* + ) + ).executeReturning(CurrencyrateRow.rowParser(1).*) + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[CurrencyrateRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + SQL"create temporary table currencyrate_TEMP (like sales.currencyrate) on commit drop".execute(): @nowarn + streamingInsert(s"""copy currencyrate_TEMP("currencyrateid", "currencyratedate", "fromcurrencycode", "tocurrencycode", "averagerate", "endofdayrate", "modifieddate") from stdin""", batchSize, unsaved)(CurrencyrateRow.text, c): @nowarn + SQL"""insert into sales.currencyrate("currencyrateid", "currencyratedate", "fromcurrencycode", "tocurrencycode", "averagerate", "endofdayrate", "modifieddate") + select * from currencyrate_TEMP + on conflict ("currencyrateid") + do update set + "currencyratedate" = EXCLUDED."currencyratedate", + "fromcurrencycode" = EXCLUDED."fromcurrencycode", + "tocurrencycode" = EXCLUDED."tocurrencycode", + "averagerate" = EXCLUDED."averagerate", + "endofdayrate" = EXCLUDED."endofdayrate", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table currencyrate_TEMP;""".executeUpdate() + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/currencyrate/CurrencyrateRepoMock.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/currencyrate/CurrencyrateRepoMock.scala index 4f17e13bd..19c9ef7ba 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/currencyrate/CurrencyrateRepoMock.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/currencyrate/CurrencyrateRepoMock.scala @@ -87,4 +87,17 @@ class CurrencyrateRepoMock(toRow: Function1[CurrencyrateRowUnsaved, Currencyrate map.put(unsaved.currencyrateid, unsaved): @nowarn unsaved } + override def upsertBatch(unsaved: Iterable[CurrencyrateRow])(implicit c: Connection): List[CurrencyrateRow] = { + unsaved.map { row => + map += (row.currencyrateid -> row) + row + }.toList + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[CurrencyrateRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + unsaved.foreach { row => + map += (row.currencyrateid -> row) + } + unsaved.size + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/customer/CustomerRepo.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/customer/CustomerRepo.scala index 1c04b7e16..1b7a03b0e 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/customer/CustomerRepo.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/customer/CustomerRepo.scala @@ -29,4 +29,7 @@ trait CustomerRepo { def update: UpdateBuilder[CustomerFields, CustomerRow] def update(row: CustomerRow)(implicit c: Connection): Boolean def upsert(unsaved: CustomerRow)(implicit c: Connection): CustomerRow + def upsertBatch(unsaved: Iterable[CustomerRow])(implicit c: Connection): List[CustomerRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Iterator[CustomerRow], batchSize: Int = 10000)(implicit c: Connection): Int } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/customer/CustomerRepoImpl.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/customer/CustomerRepoImpl.scala index 06e88c2a2..f0a524144 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/customer/CustomerRepoImpl.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/customer/CustomerRepoImpl.scala @@ -12,6 +12,7 @@ import adventureworks.customtypes.TypoLocalDateTime import adventureworks.customtypes.TypoUUID import adventureworks.person.businessentity.BusinessentityId import adventureworks.sales.salesterritory.SalesterritoryId +import anorm.BatchSql import anorm.NamedParameter import anorm.ParameterValue import anorm.RowParser @@ -20,6 +21,7 @@ import anorm.SimpleSql import anorm.SqlStringInterpolation import anorm.ToStatement import java.sql.Connection +import scala.annotation.nowarn import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder import typo.dsl.SelectBuilderSql @@ -149,4 +151,51 @@ class CustomerRepoImpl extends CustomerRepo { .executeInsert(CustomerRow.rowParser(1).single) } + override def upsertBatch(unsaved: Iterable[CustomerRow])(implicit c: Connection): List[CustomerRow] = { + def toNamedParameter(row: CustomerRow): List[NamedParameter] = List( + NamedParameter("customerid", ParameterValue(row.customerid, null, CustomerId.toStatement)), + NamedParameter("personid", ParameterValue(row.personid, null, ToStatement.optionToStatement(BusinessentityId.toStatement, BusinessentityId.parameterMetadata))), + NamedParameter("storeid", ParameterValue(row.storeid, null, ToStatement.optionToStatement(BusinessentityId.toStatement, BusinessentityId.parameterMetadata))), + NamedParameter("territoryid", ParameterValue(row.territoryid, null, ToStatement.optionToStatement(SalesterritoryId.toStatement, SalesterritoryId.parameterMetadata))), + NamedParameter("rowguid", ParameterValue(row.rowguid, null, TypoUUID.toStatement)), + NamedParameter("modifieddate", ParameterValue(row.modifieddate, null, TypoLocalDateTime.toStatement)) + ) + unsaved.toList match { + case Nil => Nil + case head :: rest => + new anorm.adventureworks.ExecuteReturningSyntax.Ops( + BatchSql( + s"""insert into sales.customer("customerid", "personid", "storeid", "territoryid", "rowguid", "modifieddate") + values ({customerid}::int4, {personid}::int4, {storeid}::int4, {territoryid}::int4, {rowguid}::uuid, {modifieddate}::timestamp) + on conflict ("customerid") + do update set + "personid" = EXCLUDED."personid", + "storeid" = EXCLUDED."storeid", + "territoryid" = EXCLUDED."territoryid", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + returning "customerid", "personid", "storeid", "territoryid", "rowguid", "modifieddate"::text + """, + toNamedParameter(head), + rest.map(toNamedParameter)* + ) + ).executeReturning(CustomerRow.rowParser(1).*) + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[CustomerRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + SQL"create temporary table customer_TEMP (like sales.customer) on commit drop".execute(): @nowarn + streamingInsert(s"""copy customer_TEMP("customerid", "personid", "storeid", "territoryid", "rowguid", "modifieddate") from stdin""", batchSize, unsaved)(CustomerRow.text, c): @nowarn + SQL"""insert into sales.customer("customerid", "personid", "storeid", "territoryid", "rowguid", "modifieddate") + select * from customer_TEMP + on conflict ("customerid") + do update set + "personid" = EXCLUDED."personid", + "storeid" = EXCLUDED."storeid", + "territoryid" = EXCLUDED."territoryid", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table customer_TEMP;""".executeUpdate() + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/customer/CustomerRepoMock.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/customer/CustomerRepoMock.scala index 909601c34..e50c0773e 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/customer/CustomerRepoMock.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/customer/CustomerRepoMock.scala @@ -87,4 +87,17 @@ class CustomerRepoMock(toRow: Function1[CustomerRowUnsaved, CustomerRow], map.put(unsaved.customerid, unsaved): @nowarn unsaved } + override def upsertBatch(unsaved: Iterable[CustomerRow])(implicit c: Connection): List[CustomerRow] = { + unsaved.map { row => + map += (row.customerid -> row) + row + }.toList + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[CustomerRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + unsaved.foreach { row => + map += (row.customerid -> row) + } + unsaved.size + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/personcreditcard/PersoncreditcardRepo.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/personcreditcard/PersoncreditcardRepo.scala index 2748f91ad..0aeb4a01d 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/personcreditcard/PersoncreditcardRepo.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/personcreditcard/PersoncreditcardRepo.scala @@ -31,4 +31,7 @@ trait PersoncreditcardRepo { def update: UpdateBuilder[PersoncreditcardFields, PersoncreditcardRow] def update(row: PersoncreditcardRow)(implicit c: Connection): Boolean def upsert(unsaved: PersoncreditcardRow)(implicit c: Connection): PersoncreditcardRow + def upsertBatch(unsaved: Iterable[PersoncreditcardRow])(implicit c: Connection): List[PersoncreditcardRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Iterator[PersoncreditcardRow], batchSize: Int = 10000)(implicit c: Connection): Int } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/personcreditcard/PersoncreditcardRepoImpl.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/personcreditcard/PersoncreditcardRepoImpl.scala index 4e6597cb3..80244eedb 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/personcreditcard/PersoncreditcardRepoImpl.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/personcreditcard/PersoncreditcardRepoImpl.scala @@ -11,6 +11,7 @@ import adventureworks.customtypes.Defaulted import adventureworks.customtypes.TypoLocalDateTime import adventureworks.person.businessentity.BusinessentityId import adventureworks.userdefined.CustomCreditcardId +import anorm.BatchSql import anorm.NamedParameter import anorm.ParameterValue import anorm.RowParser @@ -19,6 +20,7 @@ import anorm.SimpleSql import anorm.SqlStringInterpolation import anorm.ToStatement import java.sql.Connection +import scala.annotation.nowarn import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder import typo.dsl.SelectBuilderSql @@ -134,4 +136,40 @@ class PersoncreditcardRepoImpl extends PersoncreditcardRepo { .executeInsert(PersoncreditcardRow.rowParser(1).single) } + override def upsertBatch(unsaved: Iterable[PersoncreditcardRow])(implicit c: Connection): List[PersoncreditcardRow] = { + def toNamedParameter(row: PersoncreditcardRow): List[NamedParameter] = List( + NamedParameter("businessentityid", ParameterValue(row.businessentityid, null, BusinessentityId.toStatement)), + NamedParameter("creditcardid", ParameterValue(row.creditcardid, null, /* user-picked */ CustomCreditcardId.toStatement)), + NamedParameter("modifieddate", ParameterValue(row.modifieddate, null, TypoLocalDateTime.toStatement)) + ) + unsaved.toList match { + case Nil => Nil + case head :: rest => + new anorm.adventureworks.ExecuteReturningSyntax.Ops( + BatchSql( + s"""insert into sales.personcreditcard("businessentityid", "creditcardid", "modifieddate") + values ({businessentityid}::int4, {creditcardid}::int4, {modifieddate}::timestamp) + on conflict ("businessentityid", "creditcardid") + do update set + "modifieddate" = EXCLUDED."modifieddate" + returning "businessentityid", "creditcardid", "modifieddate"::text + """, + toNamedParameter(head), + rest.map(toNamedParameter)* + ) + ).executeReturning(PersoncreditcardRow.rowParser(1).*) + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[PersoncreditcardRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + SQL"create temporary table personcreditcard_TEMP (like sales.personcreditcard) on commit drop".execute(): @nowarn + streamingInsert(s"""copy personcreditcard_TEMP("businessentityid", "creditcardid", "modifieddate") from stdin""", batchSize, unsaved)(PersoncreditcardRow.text, c): @nowarn + SQL"""insert into sales.personcreditcard("businessentityid", "creditcardid", "modifieddate") + select * from personcreditcard_TEMP + on conflict ("businessentityid", "creditcardid") + do update set + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table personcreditcard_TEMP;""".executeUpdate() + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/personcreditcard/PersoncreditcardRepoMock.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/personcreditcard/PersoncreditcardRepoMock.scala index 9f2dba0e4..ae1bb5b94 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/personcreditcard/PersoncreditcardRepoMock.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/personcreditcard/PersoncreditcardRepoMock.scala @@ -89,4 +89,17 @@ class PersoncreditcardRepoMock(toRow: Function1[PersoncreditcardRowUnsaved, Pers map.put(unsaved.compositeId, unsaved): @nowarn unsaved } + override def upsertBatch(unsaved: Iterable[PersoncreditcardRow])(implicit c: Connection): List[PersoncreditcardRow] = { + unsaved.map { row => + map += (row.compositeId -> row) + row + }.toList + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[PersoncreditcardRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + unsaved.foreach { row => + map += (row.compositeId -> row) + } + unsaved.size + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/salesorderdetail/SalesorderdetailRepo.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/salesorderdetail/SalesorderdetailRepo.scala index a6d48fa80..0b1e009ea 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/salesorderdetail/SalesorderdetailRepo.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/salesorderdetail/SalesorderdetailRepo.scala @@ -29,4 +29,7 @@ trait SalesorderdetailRepo { def update: UpdateBuilder[SalesorderdetailFields, SalesorderdetailRow] def update(row: SalesorderdetailRow)(implicit c: Connection): Boolean def upsert(unsaved: SalesorderdetailRow)(implicit c: Connection): SalesorderdetailRow + def upsertBatch(unsaved: Iterable[SalesorderdetailRow])(implicit c: Connection): List[SalesorderdetailRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Iterator[SalesorderdetailRow], batchSize: Int = 10000)(implicit c: Connection): Int } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/salesorderdetail/SalesorderdetailRepoImpl.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/salesorderdetail/SalesorderdetailRepoImpl.scala index 6f2d7ee79..200f66212 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/salesorderdetail/SalesorderdetailRepoImpl.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/salesorderdetail/SalesorderdetailRepoImpl.scala @@ -14,6 +14,7 @@ import adventureworks.customtypes.TypoUUID import adventureworks.production.product.ProductId import adventureworks.sales.salesorderheader.SalesorderheaderId import adventureworks.sales.specialoffer.SpecialofferId +import anorm.BatchSql import anorm.NamedParameter import anorm.ParameterMetaData import anorm.ParameterValue @@ -23,6 +24,7 @@ import anorm.SimpleSql import anorm.SqlStringInterpolation import anorm.ToStatement import java.sql.Connection +import scala.annotation.nowarn import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder import typo.dsl.SelectBuilderSql @@ -175,4 +177,61 @@ class SalesorderdetailRepoImpl extends SalesorderdetailRepo { .executeInsert(SalesorderdetailRow.rowParser(1).single) } + override def upsertBatch(unsaved: Iterable[SalesorderdetailRow])(implicit c: Connection): List[SalesorderdetailRow] = { + def toNamedParameter(row: SalesorderdetailRow): List[NamedParameter] = List( + NamedParameter("salesorderid", ParameterValue(row.salesorderid, null, SalesorderheaderId.toStatement)), + NamedParameter("salesorderdetailid", ParameterValue(row.salesorderdetailid, null, ToStatement.intToStatement)), + NamedParameter("carriertrackingnumber", ParameterValue(row.carriertrackingnumber, null, ToStatement.optionToStatement(ToStatement.stringToStatement, ParameterMetaData.StringParameterMetaData))), + NamedParameter("orderqty", ParameterValue(row.orderqty, null, TypoShort.toStatement)), + NamedParameter("productid", ParameterValue(row.productid, null, ProductId.toStatement)), + NamedParameter("specialofferid", ParameterValue(row.specialofferid, null, SpecialofferId.toStatement)), + NamedParameter("unitprice", ParameterValue(row.unitprice, null, ToStatement.scalaBigDecimalToStatement)), + NamedParameter("unitpricediscount", ParameterValue(row.unitpricediscount, null, ToStatement.scalaBigDecimalToStatement)), + NamedParameter("rowguid", ParameterValue(row.rowguid, null, TypoUUID.toStatement)), + NamedParameter("modifieddate", ParameterValue(row.modifieddate, null, TypoLocalDateTime.toStatement)) + ) + unsaved.toList match { + case Nil => Nil + case head :: rest => + new anorm.adventureworks.ExecuteReturningSyntax.Ops( + BatchSql( + s"""insert into sales.salesorderdetail("salesorderid", "salesorderdetailid", "carriertrackingnumber", "orderqty", "productid", "specialofferid", "unitprice", "unitpricediscount", "rowguid", "modifieddate") + values ({salesorderid}::int4, {salesorderdetailid}::int4, {carriertrackingnumber}, {orderqty}::int2, {productid}::int4, {specialofferid}::int4, {unitprice}::numeric, {unitpricediscount}::numeric, {rowguid}::uuid, {modifieddate}::timestamp) + on conflict ("salesorderid", "salesorderdetailid") + do update set + "carriertrackingnumber" = EXCLUDED."carriertrackingnumber", + "orderqty" = EXCLUDED."orderqty", + "productid" = EXCLUDED."productid", + "specialofferid" = EXCLUDED."specialofferid", + "unitprice" = EXCLUDED."unitprice", + "unitpricediscount" = EXCLUDED."unitpricediscount", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + returning "salesorderid", "salesorderdetailid", "carriertrackingnumber", "orderqty", "productid", "specialofferid", "unitprice", "unitpricediscount", "rowguid", "modifieddate"::text + """, + toNamedParameter(head), + rest.map(toNamedParameter)* + ) + ).executeReturning(SalesorderdetailRow.rowParser(1).*) + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[SalesorderdetailRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + SQL"create temporary table salesorderdetail_TEMP (like sales.salesorderdetail) on commit drop".execute(): @nowarn + streamingInsert(s"""copy salesorderdetail_TEMP("salesorderid", "salesorderdetailid", "carriertrackingnumber", "orderqty", "productid", "specialofferid", "unitprice", "unitpricediscount", "rowguid", "modifieddate") from stdin""", batchSize, unsaved)(SalesorderdetailRow.text, c): @nowarn + SQL"""insert into sales.salesorderdetail("salesorderid", "salesorderdetailid", "carriertrackingnumber", "orderqty", "productid", "specialofferid", "unitprice", "unitpricediscount", "rowguid", "modifieddate") + select * from salesorderdetail_TEMP + on conflict ("salesorderid", "salesorderdetailid") + do update set + "carriertrackingnumber" = EXCLUDED."carriertrackingnumber", + "orderqty" = EXCLUDED."orderqty", + "productid" = EXCLUDED."productid", + "specialofferid" = EXCLUDED."specialofferid", + "unitprice" = EXCLUDED."unitprice", + "unitpricediscount" = EXCLUDED."unitpricediscount", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table salesorderdetail_TEMP;""".executeUpdate() + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/salesorderdetail/SalesorderdetailRepoMock.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/salesorderdetail/SalesorderdetailRepoMock.scala index b2df97004..53b9aada2 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/salesorderdetail/SalesorderdetailRepoMock.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/salesorderdetail/SalesorderdetailRepoMock.scala @@ -87,4 +87,17 @@ class SalesorderdetailRepoMock(toRow: Function1[SalesorderdetailRowUnsaved, Sale map.put(unsaved.compositeId, unsaved): @nowarn unsaved } + override def upsertBatch(unsaved: Iterable[SalesorderdetailRow])(implicit c: Connection): List[SalesorderdetailRow] = { + unsaved.map { row => + map += (row.compositeId -> row) + row + }.toList + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[SalesorderdetailRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + unsaved.foreach { row => + map += (row.compositeId -> row) + } + unsaved.size + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/salesorderheader/SalesorderheaderRepo.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/salesorderheader/SalesorderheaderRepo.scala index f9ee388f1..c5a3c7c10 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/salesorderheader/SalesorderheaderRepo.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/salesorderheader/SalesorderheaderRepo.scala @@ -29,4 +29,7 @@ trait SalesorderheaderRepo { def update: UpdateBuilder[SalesorderheaderFields, SalesorderheaderRow] def update(row: SalesorderheaderRow)(implicit c: Connection): Boolean def upsert(unsaved: SalesorderheaderRow)(implicit c: Connection): SalesorderheaderRow + def upsertBatch(unsaved: Iterable[SalesorderheaderRow])(implicit c: Connection): List[SalesorderheaderRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Iterator[SalesorderheaderRow], batchSize: Int = 10000)(implicit c: Connection): Int } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/salesorderheader/SalesorderheaderRepoImpl.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/salesorderheader/SalesorderheaderRepoImpl.scala index 7d63f09f7..0c3db5ea1 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/salesorderheader/SalesorderheaderRepoImpl.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/salesorderheader/SalesorderheaderRepoImpl.scala @@ -21,6 +21,7 @@ import adventureworks.sales.currencyrate.CurrencyrateId import adventureworks.sales.customer.CustomerId import adventureworks.sales.salesterritory.SalesterritoryId import adventureworks.userdefined.CustomCreditcardId +import anorm.BatchSql import anorm.NamedParameter import anorm.ParameterMetaData import anorm.ParameterValue @@ -30,6 +31,7 @@ import anorm.SimpleSql import anorm.SqlStringInterpolation import anorm.ToStatement import java.sql.Connection +import scala.annotation.nowarn import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder import typo.dsl.SelectBuilderSql @@ -256,4 +258,108 @@ class SalesorderheaderRepoImpl extends SalesorderheaderRepo { .executeInsert(SalesorderheaderRow.rowParser(1).single) } + override def upsertBatch(unsaved: Iterable[SalesorderheaderRow])(implicit c: Connection): List[SalesorderheaderRow] = { + def toNamedParameter(row: SalesorderheaderRow): List[NamedParameter] = List( + NamedParameter("salesorderid", ParameterValue(row.salesorderid, null, SalesorderheaderId.toStatement)), + NamedParameter("revisionnumber", ParameterValue(row.revisionnumber, null, TypoShort.toStatement)), + NamedParameter("orderdate", ParameterValue(row.orderdate, null, TypoLocalDateTime.toStatement)), + NamedParameter("duedate", ParameterValue(row.duedate, null, TypoLocalDateTime.toStatement)), + NamedParameter("shipdate", ParameterValue(row.shipdate, null, ToStatement.optionToStatement(TypoLocalDateTime.toStatement, TypoLocalDateTime.parameterMetadata))), + NamedParameter("status", ParameterValue(row.status, null, TypoShort.toStatement)), + NamedParameter("onlineorderflag", ParameterValue(row.onlineorderflag, null, Flag.toStatement)), + NamedParameter("purchaseordernumber", ParameterValue(row.purchaseordernumber, null, ToStatement.optionToStatement(OrderNumber.toStatement, OrderNumber.parameterMetadata))), + NamedParameter("accountnumber", ParameterValue(row.accountnumber, null, ToStatement.optionToStatement(AccountNumber.toStatement, AccountNumber.parameterMetadata))), + NamedParameter("customerid", ParameterValue(row.customerid, null, CustomerId.toStatement)), + NamedParameter("salespersonid", ParameterValue(row.salespersonid, null, ToStatement.optionToStatement(BusinessentityId.toStatement, BusinessentityId.parameterMetadata))), + NamedParameter("territoryid", ParameterValue(row.territoryid, null, ToStatement.optionToStatement(SalesterritoryId.toStatement, SalesterritoryId.parameterMetadata))), + NamedParameter("billtoaddressid", ParameterValue(row.billtoaddressid, null, AddressId.toStatement)), + NamedParameter("shiptoaddressid", ParameterValue(row.shiptoaddressid, null, AddressId.toStatement)), + NamedParameter("shipmethodid", ParameterValue(row.shipmethodid, null, ShipmethodId.toStatement)), + NamedParameter("creditcardid", ParameterValue(row.creditcardid, null, ToStatement.optionToStatement(CustomCreditcardId.toStatement, CustomCreditcardId.parameterMetadata))), + NamedParameter("creditcardapprovalcode", ParameterValue(row.creditcardapprovalcode, null, ToStatement.optionToStatement(ToStatement.stringToStatement, ParameterMetaData.StringParameterMetaData))), + NamedParameter("currencyrateid", ParameterValue(row.currencyrateid, null, ToStatement.optionToStatement(CurrencyrateId.toStatement, CurrencyrateId.parameterMetadata))), + NamedParameter("subtotal", ParameterValue(row.subtotal, null, ToStatement.scalaBigDecimalToStatement)), + NamedParameter("taxamt", ParameterValue(row.taxamt, null, ToStatement.scalaBigDecimalToStatement)), + NamedParameter("freight", ParameterValue(row.freight, null, ToStatement.scalaBigDecimalToStatement)), + NamedParameter("totaldue", ParameterValue(row.totaldue, null, ToStatement.optionToStatement(ToStatement.scalaBigDecimalToStatement, ParameterMetaData.BigDecimalParameterMetaData))), + NamedParameter("comment", ParameterValue(row.comment, null, ToStatement.optionToStatement(ToStatement.stringToStatement, ParameterMetaData.StringParameterMetaData))), + NamedParameter("rowguid", ParameterValue(row.rowguid, null, TypoUUID.toStatement)), + NamedParameter("modifieddate", ParameterValue(row.modifieddate, null, TypoLocalDateTime.toStatement)) + ) + unsaved.toList match { + case Nil => Nil + case head :: rest => + new anorm.adventureworks.ExecuteReturningSyntax.Ops( + BatchSql( + s"""insert into sales.salesorderheader("salesorderid", "revisionnumber", "orderdate", "duedate", "shipdate", "status", "onlineorderflag", "purchaseordernumber", "accountnumber", "customerid", "salespersonid", "territoryid", "billtoaddressid", "shiptoaddressid", "shipmethodid", "creditcardid", "creditcardapprovalcode", "currencyrateid", "subtotal", "taxamt", "freight", "totaldue", "comment", "rowguid", "modifieddate") + values ({salesorderid}::int4, {revisionnumber}::int2, {orderdate}::timestamp, {duedate}::timestamp, {shipdate}::timestamp, {status}::int2, {onlineorderflag}::bool, {purchaseordernumber}::varchar, {accountnumber}::varchar, {customerid}::int4, {salespersonid}::int4, {territoryid}::int4, {billtoaddressid}::int4, {shiptoaddressid}::int4, {shipmethodid}::int4, {creditcardid}::int4, {creditcardapprovalcode}, {currencyrateid}::int4, {subtotal}::numeric, {taxamt}::numeric, {freight}::numeric, {totaldue}::numeric, {comment}, {rowguid}::uuid, {modifieddate}::timestamp) + on conflict ("salesorderid") + do update set + "revisionnumber" = EXCLUDED."revisionnumber", + "orderdate" = EXCLUDED."orderdate", + "duedate" = EXCLUDED."duedate", + "shipdate" = EXCLUDED."shipdate", + "status" = EXCLUDED."status", + "onlineorderflag" = EXCLUDED."onlineorderflag", + "purchaseordernumber" = EXCLUDED."purchaseordernumber", + "accountnumber" = EXCLUDED."accountnumber", + "customerid" = EXCLUDED."customerid", + "salespersonid" = EXCLUDED."salespersonid", + "territoryid" = EXCLUDED."territoryid", + "billtoaddressid" = EXCLUDED."billtoaddressid", + "shiptoaddressid" = EXCLUDED."shiptoaddressid", + "shipmethodid" = EXCLUDED."shipmethodid", + "creditcardid" = EXCLUDED."creditcardid", + "creditcardapprovalcode" = EXCLUDED."creditcardapprovalcode", + "currencyrateid" = EXCLUDED."currencyrateid", + "subtotal" = EXCLUDED."subtotal", + "taxamt" = EXCLUDED."taxamt", + "freight" = EXCLUDED."freight", + "totaldue" = EXCLUDED."totaldue", + "comment" = EXCLUDED."comment", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + returning "salesorderid", "revisionnumber", "orderdate"::text, "duedate"::text, "shipdate"::text, "status", "onlineorderflag", "purchaseordernumber", "accountnumber", "customerid", "salespersonid", "territoryid", "billtoaddressid", "shiptoaddressid", "shipmethodid", "creditcardid", "creditcardapprovalcode", "currencyrateid", "subtotal", "taxamt", "freight", "totaldue", "comment", "rowguid", "modifieddate"::text + """, + toNamedParameter(head), + rest.map(toNamedParameter)* + ) + ).executeReturning(SalesorderheaderRow.rowParser(1).*) + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[SalesorderheaderRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + SQL"create temporary table salesorderheader_TEMP (like sales.salesorderheader) on commit drop".execute(): @nowarn + streamingInsert(s"""copy salesorderheader_TEMP("salesorderid", "revisionnumber", "orderdate", "duedate", "shipdate", "status", "onlineorderflag", "purchaseordernumber", "accountnumber", "customerid", "salespersonid", "territoryid", "billtoaddressid", "shiptoaddressid", "shipmethodid", "creditcardid", "creditcardapprovalcode", "currencyrateid", "subtotal", "taxamt", "freight", "totaldue", "comment", "rowguid", "modifieddate") from stdin""", batchSize, unsaved)(SalesorderheaderRow.text, c): @nowarn + SQL"""insert into sales.salesorderheader("salesorderid", "revisionnumber", "orderdate", "duedate", "shipdate", "status", "onlineorderflag", "purchaseordernumber", "accountnumber", "customerid", "salespersonid", "territoryid", "billtoaddressid", "shiptoaddressid", "shipmethodid", "creditcardid", "creditcardapprovalcode", "currencyrateid", "subtotal", "taxamt", "freight", "totaldue", "comment", "rowguid", "modifieddate") + select * from salesorderheader_TEMP + on conflict ("salesorderid") + do update set + "revisionnumber" = EXCLUDED."revisionnumber", + "orderdate" = EXCLUDED."orderdate", + "duedate" = EXCLUDED."duedate", + "shipdate" = EXCLUDED."shipdate", + "status" = EXCLUDED."status", + "onlineorderflag" = EXCLUDED."onlineorderflag", + "purchaseordernumber" = EXCLUDED."purchaseordernumber", + "accountnumber" = EXCLUDED."accountnumber", + "customerid" = EXCLUDED."customerid", + "salespersonid" = EXCLUDED."salespersonid", + "territoryid" = EXCLUDED."territoryid", + "billtoaddressid" = EXCLUDED."billtoaddressid", + "shiptoaddressid" = EXCLUDED."shiptoaddressid", + "shipmethodid" = EXCLUDED."shipmethodid", + "creditcardid" = EXCLUDED."creditcardid", + "creditcardapprovalcode" = EXCLUDED."creditcardapprovalcode", + "currencyrateid" = EXCLUDED."currencyrateid", + "subtotal" = EXCLUDED."subtotal", + "taxamt" = EXCLUDED."taxamt", + "freight" = EXCLUDED."freight", + "totaldue" = EXCLUDED."totaldue", + "comment" = EXCLUDED."comment", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table salesorderheader_TEMP;""".executeUpdate() + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/salesorderheader/SalesorderheaderRepoMock.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/salesorderheader/SalesorderheaderRepoMock.scala index e72ac1386..ba7cbbc6a 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/salesorderheader/SalesorderheaderRepoMock.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/salesorderheader/SalesorderheaderRepoMock.scala @@ -87,4 +87,17 @@ class SalesorderheaderRepoMock(toRow: Function1[SalesorderheaderRowUnsaved, Sale map.put(unsaved.salesorderid, unsaved): @nowarn unsaved } + override def upsertBatch(unsaved: Iterable[SalesorderheaderRow])(implicit c: Connection): List[SalesorderheaderRow] = { + unsaved.map { row => + map += (row.salesorderid -> row) + row + }.toList + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[SalesorderheaderRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + unsaved.foreach { row => + map += (row.salesorderid -> row) + } + unsaved.size + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/salesorderheadersalesreason/SalesorderheadersalesreasonRepo.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/salesorderheadersalesreason/SalesorderheadersalesreasonRepo.scala index a180bb4b1..af2bcd495 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/salesorderheadersalesreason/SalesorderheadersalesreasonRepo.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/salesorderheadersalesreason/SalesorderheadersalesreasonRepo.scala @@ -29,4 +29,7 @@ trait SalesorderheadersalesreasonRepo { def update: UpdateBuilder[SalesorderheadersalesreasonFields, SalesorderheadersalesreasonRow] def update(row: SalesorderheadersalesreasonRow)(implicit c: Connection): Boolean def upsert(unsaved: SalesorderheadersalesreasonRow)(implicit c: Connection): SalesorderheadersalesreasonRow + def upsertBatch(unsaved: Iterable[SalesorderheadersalesreasonRow])(implicit c: Connection): List[SalesorderheadersalesreasonRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Iterator[SalesorderheadersalesreasonRow], batchSize: Int = 10000)(implicit c: Connection): Int } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/salesorderheadersalesreason/SalesorderheadersalesreasonRepoImpl.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/salesorderheadersalesreason/SalesorderheadersalesreasonRepoImpl.scala index 31804a9eb..156b993da 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/salesorderheadersalesreason/SalesorderheadersalesreasonRepoImpl.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/salesorderheadersalesreason/SalesorderheadersalesreasonRepoImpl.scala @@ -11,6 +11,7 @@ import adventureworks.customtypes.Defaulted import adventureworks.customtypes.TypoLocalDateTime import adventureworks.sales.salesorderheader.SalesorderheaderId import adventureworks.sales.salesreason.SalesreasonId +import anorm.BatchSql import anorm.NamedParameter import anorm.ParameterValue import anorm.RowParser @@ -18,6 +19,7 @@ import anorm.SQL import anorm.SimpleSql import anorm.SqlStringInterpolation import java.sql.Connection +import scala.annotation.nowarn import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder import typo.dsl.SelectBuilderSql @@ -133,4 +135,40 @@ class SalesorderheadersalesreasonRepoImpl extends SalesorderheadersalesreasonRep .executeInsert(SalesorderheadersalesreasonRow.rowParser(1).single) } + override def upsertBatch(unsaved: Iterable[SalesorderheadersalesreasonRow])(implicit c: Connection): List[SalesorderheadersalesreasonRow] = { + def toNamedParameter(row: SalesorderheadersalesreasonRow): List[NamedParameter] = List( + NamedParameter("salesorderid", ParameterValue(row.salesorderid, null, SalesorderheaderId.toStatement)), + NamedParameter("salesreasonid", ParameterValue(row.salesreasonid, null, SalesreasonId.toStatement)), + NamedParameter("modifieddate", ParameterValue(row.modifieddate, null, TypoLocalDateTime.toStatement)) + ) + unsaved.toList match { + case Nil => Nil + case head :: rest => + new anorm.adventureworks.ExecuteReturningSyntax.Ops( + BatchSql( + s"""insert into sales.salesorderheadersalesreason("salesorderid", "salesreasonid", "modifieddate") + values ({salesorderid}::int4, {salesreasonid}::int4, {modifieddate}::timestamp) + on conflict ("salesorderid", "salesreasonid") + do update set + "modifieddate" = EXCLUDED."modifieddate" + returning "salesorderid", "salesreasonid", "modifieddate"::text + """, + toNamedParameter(head), + rest.map(toNamedParameter)* + ) + ).executeReturning(SalesorderheadersalesreasonRow.rowParser(1).*) + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[SalesorderheadersalesreasonRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + SQL"create temporary table salesorderheadersalesreason_TEMP (like sales.salesorderheadersalesreason) on commit drop".execute(): @nowarn + streamingInsert(s"""copy salesorderheadersalesreason_TEMP("salesorderid", "salesreasonid", "modifieddate") from stdin""", batchSize, unsaved)(SalesorderheadersalesreasonRow.text, c): @nowarn + SQL"""insert into sales.salesorderheadersalesreason("salesorderid", "salesreasonid", "modifieddate") + select * from salesorderheadersalesreason_TEMP + on conflict ("salesorderid", "salesreasonid") + do update set + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table salesorderheadersalesreason_TEMP;""".executeUpdate() + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/salesorderheadersalesreason/SalesorderheadersalesreasonRepoMock.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/salesorderheadersalesreason/SalesorderheadersalesreasonRepoMock.scala index 986b889e5..2e5885ff3 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/salesorderheadersalesreason/SalesorderheadersalesreasonRepoMock.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/salesorderheadersalesreason/SalesorderheadersalesreasonRepoMock.scala @@ -87,4 +87,17 @@ class SalesorderheadersalesreasonRepoMock(toRow: Function1[Salesorderheadersales map.put(unsaved.compositeId, unsaved): @nowarn unsaved } + override def upsertBatch(unsaved: Iterable[SalesorderheadersalesreasonRow])(implicit c: Connection): List[SalesorderheadersalesreasonRow] = { + unsaved.map { row => + map += (row.compositeId -> row) + row + }.toList + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[SalesorderheadersalesreasonRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + unsaved.foreach { row => + map += (row.compositeId -> row) + } + unsaved.size + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/salesperson/SalespersonRepo.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/salesperson/SalespersonRepo.scala index 4b97eb12b..a4a6408d4 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/salesperson/SalespersonRepo.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/salesperson/SalespersonRepo.scala @@ -30,4 +30,7 @@ trait SalespersonRepo { def update: UpdateBuilder[SalespersonFields, SalespersonRow] def update(row: SalespersonRow)(implicit c: Connection): Boolean def upsert(unsaved: SalespersonRow)(implicit c: Connection): SalespersonRow + def upsertBatch(unsaved: Iterable[SalespersonRow])(implicit c: Connection): List[SalespersonRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Iterator[SalespersonRow], batchSize: Int = 10000)(implicit c: Connection): Int } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/salesperson/SalespersonRepoImpl.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/salesperson/SalespersonRepoImpl.scala index 449aba6f3..cd62aa4c3 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/salesperson/SalespersonRepoImpl.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/salesperson/SalespersonRepoImpl.scala @@ -12,6 +12,7 @@ import adventureworks.customtypes.TypoLocalDateTime import adventureworks.customtypes.TypoUUID import adventureworks.person.businessentity.BusinessentityId import adventureworks.sales.salesterritory.SalesterritoryId +import anorm.BatchSql import anorm.NamedParameter import anorm.ParameterMetaData import anorm.ParameterValue @@ -21,6 +22,7 @@ import anorm.SimpleSql import anorm.SqlStringInterpolation import anorm.ToStatement import java.sql.Connection +import scala.annotation.nowarn import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder import typo.dsl.SelectBuilderSql @@ -171,4 +173,60 @@ class SalespersonRepoImpl extends SalespersonRepo { .executeInsert(SalespersonRow.rowParser(1).single) } + override def upsertBatch(unsaved: Iterable[SalespersonRow])(implicit c: Connection): List[SalespersonRow] = { + def toNamedParameter(row: SalespersonRow): List[NamedParameter] = List( + NamedParameter("businessentityid", ParameterValue(row.businessentityid, null, BusinessentityId.toStatement)), + NamedParameter("territoryid", ParameterValue(row.territoryid, null, ToStatement.optionToStatement(SalesterritoryId.toStatement, SalesterritoryId.parameterMetadata))), + NamedParameter("salesquota", ParameterValue(row.salesquota, null, ToStatement.optionToStatement(ToStatement.scalaBigDecimalToStatement, ParameterMetaData.BigDecimalParameterMetaData))), + NamedParameter("bonus", ParameterValue(row.bonus, null, ToStatement.scalaBigDecimalToStatement)), + NamedParameter("commissionpct", ParameterValue(row.commissionpct, null, ToStatement.scalaBigDecimalToStatement)), + NamedParameter("salesytd", ParameterValue(row.salesytd, null, ToStatement.scalaBigDecimalToStatement)), + NamedParameter("saleslastyear", ParameterValue(row.saleslastyear, null, ToStatement.scalaBigDecimalToStatement)), + NamedParameter("rowguid", ParameterValue(row.rowguid, null, TypoUUID.toStatement)), + NamedParameter("modifieddate", ParameterValue(row.modifieddate, null, TypoLocalDateTime.toStatement)) + ) + unsaved.toList match { + case Nil => Nil + case head :: rest => + new anorm.adventureworks.ExecuteReturningSyntax.Ops( + BatchSql( + s"""insert into sales.salesperson("businessentityid", "territoryid", "salesquota", "bonus", "commissionpct", "salesytd", "saleslastyear", "rowguid", "modifieddate") + values ({businessentityid}::int4, {territoryid}::int4, {salesquota}::numeric, {bonus}::numeric, {commissionpct}::numeric, {salesytd}::numeric, {saleslastyear}::numeric, {rowguid}::uuid, {modifieddate}::timestamp) + on conflict ("businessentityid") + do update set + "territoryid" = EXCLUDED."territoryid", + "salesquota" = EXCLUDED."salesquota", + "bonus" = EXCLUDED."bonus", + "commissionpct" = EXCLUDED."commissionpct", + "salesytd" = EXCLUDED."salesytd", + "saleslastyear" = EXCLUDED."saleslastyear", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + returning "businessentityid", "territoryid", "salesquota", "bonus", "commissionpct", "salesytd", "saleslastyear", "rowguid", "modifieddate"::text + """, + toNamedParameter(head), + rest.map(toNamedParameter)* + ) + ).executeReturning(SalespersonRow.rowParser(1).*) + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[SalespersonRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + SQL"create temporary table salesperson_TEMP (like sales.salesperson) on commit drop".execute(): @nowarn + streamingInsert(s"""copy salesperson_TEMP("businessentityid", "territoryid", "salesquota", "bonus", "commissionpct", "salesytd", "saleslastyear", "rowguid", "modifieddate") from stdin""", batchSize, unsaved)(SalespersonRow.text, c): @nowarn + SQL"""insert into sales.salesperson("businessentityid", "territoryid", "salesquota", "bonus", "commissionpct", "salesytd", "saleslastyear", "rowguid", "modifieddate") + select * from salesperson_TEMP + on conflict ("businessentityid") + do update set + "territoryid" = EXCLUDED."territoryid", + "salesquota" = EXCLUDED."salesquota", + "bonus" = EXCLUDED."bonus", + "commissionpct" = EXCLUDED."commissionpct", + "salesytd" = EXCLUDED."salesytd", + "saleslastyear" = EXCLUDED."saleslastyear", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table salesperson_TEMP;""".executeUpdate() + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/salesperson/SalespersonRepoMock.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/salesperson/SalespersonRepoMock.scala index 0a057283e..e85ebc3e7 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/salesperson/SalespersonRepoMock.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/salesperson/SalespersonRepoMock.scala @@ -88,4 +88,17 @@ class SalespersonRepoMock(toRow: Function1[SalespersonRowUnsaved, SalespersonRow map.put(unsaved.businessentityid, unsaved): @nowarn unsaved } + override def upsertBatch(unsaved: Iterable[SalespersonRow])(implicit c: Connection): List[SalespersonRow] = { + unsaved.map { row => + map += (row.businessentityid -> row) + row + }.toList + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[SalespersonRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + unsaved.foreach { row => + map += (row.businessentityid -> row) + } + unsaved.size + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/salespersonquotahistory/SalespersonquotahistoryRepo.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/salespersonquotahistory/SalespersonquotahistoryRepo.scala index 07ec8d4a0..ed542cb20 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/salespersonquotahistory/SalespersonquotahistoryRepo.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/salespersonquotahistory/SalespersonquotahistoryRepo.scala @@ -29,4 +29,7 @@ trait SalespersonquotahistoryRepo { def update: UpdateBuilder[SalespersonquotahistoryFields, SalespersonquotahistoryRow] def update(row: SalespersonquotahistoryRow)(implicit c: Connection): Boolean def upsert(unsaved: SalespersonquotahistoryRow)(implicit c: Connection): SalespersonquotahistoryRow + def upsertBatch(unsaved: Iterable[SalespersonquotahistoryRow])(implicit c: Connection): List[SalespersonquotahistoryRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Iterator[SalespersonquotahistoryRow], batchSize: Int = 10000)(implicit c: Connection): Int } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/salespersonquotahistory/SalespersonquotahistoryRepoImpl.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/salespersonquotahistory/SalespersonquotahistoryRepoImpl.scala index 722bcc9fe..ead9ed068 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/salespersonquotahistory/SalespersonquotahistoryRepoImpl.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/salespersonquotahistory/SalespersonquotahistoryRepoImpl.scala @@ -11,6 +11,7 @@ import adventureworks.customtypes.Defaulted import adventureworks.customtypes.TypoLocalDateTime import adventureworks.customtypes.TypoUUID import adventureworks.person.businessentity.BusinessentityId +import anorm.BatchSql import anorm.NamedParameter import anorm.ParameterValue import anorm.RowParser @@ -19,6 +20,7 @@ import anorm.SimpleSql import anorm.SqlStringInterpolation import anorm.ToStatement import java.sql.Connection +import scala.annotation.nowarn import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder import typo.dsl.SelectBuilderSql @@ -145,4 +147,46 @@ class SalespersonquotahistoryRepoImpl extends SalespersonquotahistoryRepo { .executeInsert(SalespersonquotahistoryRow.rowParser(1).single) } + override def upsertBatch(unsaved: Iterable[SalespersonquotahistoryRow])(implicit c: Connection): List[SalespersonquotahistoryRow] = { + def toNamedParameter(row: SalespersonquotahistoryRow): List[NamedParameter] = List( + NamedParameter("businessentityid", ParameterValue(row.businessentityid, null, BusinessentityId.toStatement)), + NamedParameter("quotadate", ParameterValue(row.quotadate, null, TypoLocalDateTime.toStatement)), + NamedParameter("salesquota", ParameterValue(row.salesquota, null, ToStatement.scalaBigDecimalToStatement)), + NamedParameter("rowguid", ParameterValue(row.rowguid, null, TypoUUID.toStatement)), + NamedParameter("modifieddate", ParameterValue(row.modifieddate, null, TypoLocalDateTime.toStatement)) + ) + unsaved.toList match { + case Nil => Nil + case head :: rest => + new anorm.adventureworks.ExecuteReturningSyntax.Ops( + BatchSql( + s"""insert into sales.salespersonquotahistory("businessentityid", "quotadate", "salesquota", "rowguid", "modifieddate") + values ({businessentityid}::int4, {quotadate}::timestamp, {salesquota}::numeric, {rowguid}::uuid, {modifieddate}::timestamp) + on conflict ("businessentityid", "quotadate") + do update set + "salesquota" = EXCLUDED."salesquota", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + returning "businessentityid", "quotadate"::text, "salesquota", "rowguid", "modifieddate"::text + """, + toNamedParameter(head), + rest.map(toNamedParameter)* + ) + ).executeReturning(SalespersonquotahistoryRow.rowParser(1).*) + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[SalespersonquotahistoryRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + SQL"create temporary table salespersonquotahistory_TEMP (like sales.salespersonquotahistory) on commit drop".execute(): @nowarn + streamingInsert(s"""copy salespersonquotahistory_TEMP("businessentityid", "quotadate", "salesquota", "rowguid", "modifieddate") from stdin""", batchSize, unsaved)(SalespersonquotahistoryRow.text, c): @nowarn + SQL"""insert into sales.salespersonquotahistory("businessentityid", "quotadate", "salesquota", "rowguid", "modifieddate") + select * from salespersonquotahistory_TEMP + on conflict ("businessentityid", "quotadate") + do update set + "salesquota" = EXCLUDED."salesquota", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table salespersonquotahistory_TEMP;""".executeUpdate() + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/salespersonquotahistory/SalespersonquotahistoryRepoMock.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/salespersonquotahistory/SalespersonquotahistoryRepoMock.scala index a16a8e84b..a48b8a6cc 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/salespersonquotahistory/SalespersonquotahistoryRepoMock.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/salespersonquotahistory/SalespersonquotahistoryRepoMock.scala @@ -87,4 +87,17 @@ class SalespersonquotahistoryRepoMock(toRow: Function1[SalespersonquotahistoryRo map.put(unsaved.compositeId, unsaved): @nowarn unsaved } + override def upsertBatch(unsaved: Iterable[SalespersonquotahistoryRow])(implicit c: Connection): List[SalespersonquotahistoryRow] = { + unsaved.map { row => + map += (row.compositeId -> row) + row + }.toList + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[SalespersonquotahistoryRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + unsaved.foreach { row => + map += (row.compositeId -> row) + } + unsaved.size + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/salesreason/SalesreasonRepo.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/salesreason/SalesreasonRepo.scala index 16b7c307b..9bf894689 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/salesreason/SalesreasonRepo.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/salesreason/SalesreasonRepo.scala @@ -29,4 +29,7 @@ trait SalesreasonRepo { def update: UpdateBuilder[SalesreasonFields, SalesreasonRow] def update(row: SalesreasonRow)(implicit c: Connection): Boolean def upsert(unsaved: SalesreasonRow)(implicit c: Connection): SalesreasonRow + def upsertBatch(unsaved: Iterable[SalesreasonRow])(implicit c: Connection): List[SalesreasonRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Iterator[SalesreasonRow], batchSize: Int = 10000)(implicit c: Connection): Int } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/salesreason/SalesreasonRepoImpl.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/salesreason/SalesreasonRepoImpl.scala index 5cae0a888..29d790336 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/salesreason/SalesreasonRepoImpl.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/salesreason/SalesreasonRepoImpl.scala @@ -10,6 +10,7 @@ package salesreason import adventureworks.customtypes.Defaulted import adventureworks.customtypes.TypoLocalDateTime import adventureworks.public.Name +import anorm.BatchSql import anorm.NamedParameter import anorm.ParameterValue import anorm.RowParser @@ -17,6 +18,7 @@ import anorm.SQL import anorm.SimpleSql import anorm.SqlStringInterpolation import java.sql.Connection +import scala.annotation.nowarn import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder import typo.dsl.SelectBuilderSql @@ -135,4 +137,45 @@ class SalesreasonRepoImpl extends SalesreasonRepo { .executeInsert(SalesreasonRow.rowParser(1).single) } + override def upsertBatch(unsaved: Iterable[SalesreasonRow])(implicit c: Connection): List[SalesreasonRow] = { + def toNamedParameter(row: SalesreasonRow): List[NamedParameter] = List( + NamedParameter("salesreasonid", ParameterValue(row.salesreasonid, null, SalesreasonId.toStatement)), + NamedParameter("name", ParameterValue(row.name, null, Name.toStatement)), + NamedParameter("reasontype", ParameterValue(row.reasontype, null, Name.toStatement)), + NamedParameter("modifieddate", ParameterValue(row.modifieddate, null, TypoLocalDateTime.toStatement)) + ) + unsaved.toList match { + case Nil => Nil + case head :: rest => + new anorm.adventureworks.ExecuteReturningSyntax.Ops( + BatchSql( + s"""insert into sales.salesreason("salesreasonid", "name", "reasontype", "modifieddate") + values ({salesreasonid}::int4, {name}::varchar, {reasontype}::varchar, {modifieddate}::timestamp) + on conflict ("salesreasonid") + do update set + "name" = EXCLUDED."name", + "reasontype" = EXCLUDED."reasontype", + "modifieddate" = EXCLUDED."modifieddate" + returning "salesreasonid", "name", "reasontype", "modifieddate"::text + """, + toNamedParameter(head), + rest.map(toNamedParameter)* + ) + ).executeReturning(SalesreasonRow.rowParser(1).*) + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[SalesreasonRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + SQL"create temporary table salesreason_TEMP (like sales.salesreason) on commit drop".execute(): @nowarn + streamingInsert(s"""copy salesreason_TEMP("salesreasonid", "name", "reasontype", "modifieddate") from stdin""", batchSize, unsaved)(SalesreasonRow.text, c): @nowarn + SQL"""insert into sales.salesreason("salesreasonid", "name", "reasontype", "modifieddate") + select * from salesreason_TEMP + on conflict ("salesreasonid") + do update set + "name" = EXCLUDED."name", + "reasontype" = EXCLUDED."reasontype", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table salesreason_TEMP;""".executeUpdate() + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/salesreason/SalesreasonRepoMock.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/salesreason/SalesreasonRepoMock.scala index a9926ec34..c318e40fa 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/salesreason/SalesreasonRepoMock.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/salesreason/SalesreasonRepoMock.scala @@ -87,4 +87,17 @@ class SalesreasonRepoMock(toRow: Function1[SalesreasonRowUnsaved, SalesreasonRow map.put(unsaved.salesreasonid, unsaved): @nowarn unsaved } + override def upsertBatch(unsaved: Iterable[SalesreasonRow])(implicit c: Connection): List[SalesreasonRow] = { + unsaved.map { row => + map += (row.salesreasonid -> row) + row + }.toList + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[SalesreasonRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + unsaved.foreach { row => + map += (row.salesreasonid -> row) + } + unsaved.size + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/salestaxrate/SalestaxrateRepo.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/salestaxrate/SalestaxrateRepo.scala index cd72e6ef1..4bbc19adb 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/salestaxrate/SalestaxrateRepo.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/salestaxrate/SalestaxrateRepo.scala @@ -29,4 +29,7 @@ trait SalestaxrateRepo { def update: UpdateBuilder[SalestaxrateFields, SalestaxrateRow] def update(row: SalestaxrateRow)(implicit c: Connection): Boolean def upsert(unsaved: SalestaxrateRow)(implicit c: Connection): SalestaxrateRow + def upsertBatch(unsaved: Iterable[SalestaxrateRow])(implicit c: Connection): List[SalestaxrateRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Iterator[SalestaxrateRow], batchSize: Int = 10000)(implicit c: Connection): Int } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/salestaxrate/SalestaxrateRepoImpl.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/salestaxrate/SalestaxrateRepoImpl.scala index b965bdfd2..03a0621a9 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/salestaxrate/SalestaxrateRepoImpl.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/salestaxrate/SalestaxrateRepoImpl.scala @@ -13,6 +13,7 @@ import adventureworks.customtypes.TypoShort import adventureworks.customtypes.TypoUUID import adventureworks.person.stateprovince.StateprovinceId import adventureworks.public.Name +import anorm.BatchSql import anorm.NamedParameter import anorm.ParameterValue import anorm.RowParser @@ -21,6 +22,7 @@ import anorm.SimpleSql import anorm.SqlStringInterpolation import anorm.ToStatement import java.sql.Connection +import scala.annotation.nowarn import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder import typo.dsl.SelectBuilderSql @@ -157,4 +159,54 @@ class SalestaxrateRepoImpl extends SalestaxrateRepo { .executeInsert(SalestaxrateRow.rowParser(1).single) } + override def upsertBatch(unsaved: Iterable[SalestaxrateRow])(implicit c: Connection): List[SalestaxrateRow] = { + def toNamedParameter(row: SalestaxrateRow): List[NamedParameter] = List( + NamedParameter("salestaxrateid", ParameterValue(row.salestaxrateid, null, SalestaxrateId.toStatement)), + NamedParameter("stateprovinceid", ParameterValue(row.stateprovinceid, null, StateprovinceId.toStatement)), + NamedParameter("taxtype", ParameterValue(row.taxtype, null, TypoShort.toStatement)), + NamedParameter("taxrate", ParameterValue(row.taxrate, null, ToStatement.scalaBigDecimalToStatement)), + NamedParameter("name", ParameterValue(row.name, null, Name.toStatement)), + NamedParameter("rowguid", ParameterValue(row.rowguid, null, TypoUUID.toStatement)), + NamedParameter("modifieddate", ParameterValue(row.modifieddate, null, TypoLocalDateTime.toStatement)) + ) + unsaved.toList match { + case Nil => Nil + case head :: rest => + new anorm.adventureworks.ExecuteReturningSyntax.Ops( + BatchSql( + s"""insert into sales.salestaxrate("salestaxrateid", "stateprovinceid", "taxtype", "taxrate", "name", "rowguid", "modifieddate") + values ({salestaxrateid}::int4, {stateprovinceid}::int4, {taxtype}::int2, {taxrate}::numeric, {name}::varchar, {rowguid}::uuid, {modifieddate}::timestamp) + on conflict ("salestaxrateid") + do update set + "stateprovinceid" = EXCLUDED."stateprovinceid", + "taxtype" = EXCLUDED."taxtype", + "taxrate" = EXCLUDED."taxrate", + "name" = EXCLUDED."name", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + returning "salestaxrateid", "stateprovinceid", "taxtype", "taxrate", "name", "rowguid", "modifieddate"::text + """, + toNamedParameter(head), + rest.map(toNamedParameter)* + ) + ).executeReturning(SalestaxrateRow.rowParser(1).*) + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[SalestaxrateRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + SQL"create temporary table salestaxrate_TEMP (like sales.salestaxrate) on commit drop".execute(): @nowarn + streamingInsert(s"""copy salestaxrate_TEMP("salestaxrateid", "stateprovinceid", "taxtype", "taxrate", "name", "rowguid", "modifieddate") from stdin""", batchSize, unsaved)(SalestaxrateRow.text, c): @nowarn + SQL"""insert into sales.salestaxrate("salestaxrateid", "stateprovinceid", "taxtype", "taxrate", "name", "rowguid", "modifieddate") + select * from salestaxrate_TEMP + on conflict ("salestaxrateid") + do update set + "stateprovinceid" = EXCLUDED."stateprovinceid", + "taxtype" = EXCLUDED."taxtype", + "taxrate" = EXCLUDED."taxrate", + "name" = EXCLUDED."name", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table salestaxrate_TEMP;""".executeUpdate() + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/salestaxrate/SalestaxrateRepoMock.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/salestaxrate/SalestaxrateRepoMock.scala index 48ef8c5c1..2f0a9fc17 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/salestaxrate/SalestaxrateRepoMock.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/salestaxrate/SalestaxrateRepoMock.scala @@ -87,4 +87,17 @@ class SalestaxrateRepoMock(toRow: Function1[SalestaxrateRowUnsaved, Salestaxrate map.put(unsaved.salestaxrateid, unsaved): @nowarn unsaved } + override def upsertBatch(unsaved: Iterable[SalestaxrateRow])(implicit c: Connection): List[SalestaxrateRow] = { + unsaved.map { row => + map += (row.salestaxrateid -> row) + row + }.toList + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[SalestaxrateRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + unsaved.foreach { row => + map += (row.salestaxrateid -> row) + } + unsaved.size + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/salesterritory/SalesterritoryRepo.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/salesterritory/SalesterritoryRepo.scala index eac709b2f..a589bdd6f 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/salesterritory/SalesterritoryRepo.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/salesterritory/SalesterritoryRepo.scala @@ -29,4 +29,7 @@ trait SalesterritoryRepo { def update: UpdateBuilder[SalesterritoryFields, SalesterritoryRow] def update(row: SalesterritoryRow)(implicit c: Connection): Boolean def upsert(unsaved: SalesterritoryRow)(implicit c: Connection): SalesterritoryRow + def upsertBatch(unsaved: Iterable[SalesterritoryRow])(implicit c: Connection): List[SalesterritoryRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Iterator[SalesterritoryRow], batchSize: Int = 10000)(implicit c: Connection): Int } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/salesterritory/SalesterritoryRepoImpl.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/salesterritory/SalesterritoryRepoImpl.scala index 5174fa28b..05c48271b 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/salesterritory/SalesterritoryRepoImpl.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/salesterritory/SalesterritoryRepoImpl.scala @@ -12,6 +12,7 @@ import adventureworks.customtypes.TypoLocalDateTime import adventureworks.customtypes.TypoUUID import adventureworks.person.countryregion.CountryregionId import adventureworks.public.Name +import anorm.BatchSql import anorm.NamedParameter import anorm.ParameterValue import anorm.RowParser @@ -20,6 +21,7 @@ import anorm.SimpleSql import anorm.SqlStringInterpolation import anorm.ToStatement import java.sql.Connection +import scala.annotation.nowarn import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder import typo.dsl.SelectBuilderSql @@ -177,4 +179,63 @@ class SalesterritoryRepoImpl extends SalesterritoryRepo { .executeInsert(SalesterritoryRow.rowParser(1).single) } + override def upsertBatch(unsaved: Iterable[SalesterritoryRow])(implicit c: Connection): List[SalesterritoryRow] = { + def toNamedParameter(row: SalesterritoryRow): List[NamedParameter] = List( + NamedParameter("territoryid", ParameterValue(row.territoryid, null, SalesterritoryId.toStatement)), + NamedParameter("name", ParameterValue(row.name, null, Name.toStatement)), + NamedParameter("countryregioncode", ParameterValue(row.countryregioncode, null, CountryregionId.toStatement)), + NamedParameter("group", ParameterValue(row.group, null, ToStatement.stringToStatement)), + NamedParameter("salesytd", ParameterValue(row.salesytd, null, ToStatement.scalaBigDecimalToStatement)), + NamedParameter("saleslastyear", ParameterValue(row.saleslastyear, null, ToStatement.scalaBigDecimalToStatement)), + NamedParameter("costytd", ParameterValue(row.costytd, null, ToStatement.scalaBigDecimalToStatement)), + NamedParameter("costlastyear", ParameterValue(row.costlastyear, null, ToStatement.scalaBigDecimalToStatement)), + NamedParameter("rowguid", ParameterValue(row.rowguid, null, TypoUUID.toStatement)), + NamedParameter("modifieddate", ParameterValue(row.modifieddate, null, TypoLocalDateTime.toStatement)) + ) + unsaved.toList match { + case Nil => Nil + case head :: rest => + new anorm.adventureworks.ExecuteReturningSyntax.Ops( + BatchSql( + s"""insert into sales.salesterritory("territoryid", "name", "countryregioncode", "group", "salesytd", "saleslastyear", "costytd", "costlastyear", "rowguid", "modifieddate") + values ({territoryid}::int4, {name}::varchar, {countryregioncode}, {group}, {salesytd}::numeric, {saleslastyear}::numeric, {costytd}::numeric, {costlastyear}::numeric, {rowguid}::uuid, {modifieddate}::timestamp) + on conflict ("territoryid") + do update set + "name" = EXCLUDED."name", + "countryregioncode" = EXCLUDED."countryregioncode", + "group" = EXCLUDED."group", + "salesytd" = EXCLUDED."salesytd", + "saleslastyear" = EXCLUDED."saleslastyear", + "costytd" = EXCLUDED."costytd", + "costlastyear" = EXCLUDED."costlastyear", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + returning "territoryid", "name", "countryregioncode", "group", "salesytd", "saleslastyear", "costytd", "costlastyear", "rowguid", "modifieddate"::text + """, + toNamedParameter(head), + rest.map(toNamedParameter)* + ) + ).executeReturning(SalesterritoryRow.rowParser(1).*) + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[SalesterritoryRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + SQL"create temporary table salesterritory_TEMP (like sales.salesterritory) on commit drop".execute(): @nowarn + streamingInsert(s"""copy salesterritory_TEMP("territoryid", "name", "countryregioncode", "group", "salesytd", "saleslastyear", "costytd", "costlastyear", "rowguid", "modifieddate") from stdin""", batchSize, unsaved)(SalesterritoryRow.text, c): @nowarn + SQL"""insert into sales.salesterritory("territoryid", "name", "countryregioncode", "group", "salesytd", "saleslastyear", "costytd", "costlastyear", "rowguid", "modifieddate") + select * from salesterritory_TEMP + on conflict ("territoryid") + do update set + "name" = EXCLUDED."name", + "countryregioncode" = EXCLUDED."countryregioncode", + "group" = EXCLUDED."group", + "salesytd" = EXCLUDED."salesytd", + "saleslastyear" = EXCLUDED."saleslastyear", + "costytd" = EXCLUDED."costytd", + "costlastyear" = EXCLUDED."costlastyear", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table salesterritory_TEMP;""".executeUpdate() + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/salesterritory/SalesterritoryRepoMock.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/salesterritory/SalesterritoryRepoMock.scala index 214dff433..aa2ee9e26 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/salesterritory/SalesterritoryRepoMock.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/salesterritory/SalesterritoryRepoMock.scala @@ -87,4 +87,17 @@ class SalesterritoryRepoMock(toRow: Function1[SalesterritoryRowUnsaved, Salester map.put(unsaved.territoryid, unsaved): @nowarn unsaved } + override def upsertBatch(unsaved: Iterable[SalesterritoryRow])(implicit c: Connection): List[SalesterritoryRow] = { + unsaved.map { row => + map += (row.territoryid -> row) + row + }.toList + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[SalesterritoryRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + unsaved.foreach { row => + map += (row.territoryid -> row) + } + unsaved.size + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/salesterritoryhistory/SalesterritoryhistoryRepo.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/salesterritoryhistory/SalesterritoryhistoryRepo.scala index e1b55d569..18089225f 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/salesterritoryhistory/SalesterritoryhistoryRepo.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/salesterritoryhistory/SalesterritoryhistoryRepo.scala @@ -29,4 +29,7 @@ trait SalesterritoryhistoryRepo { def update: UpdateBuilder[SalesterritoryhistoryFields, SalesterritoryhistoryRow] def update(row: SalesterritoryhistoryRow)(implicit c: Connection): Boolean def upsert(unsaved: SalesterritoryhistoryRow)(implicit c: Connection): SalesterritoryhistoryRow + def upsertBatch(unsaved: Iterable[SalesterritoryhistoryRow])(implicit c: Connection): List[SalesterritoryhistoryRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Iterator[SalesterritoryhistoryRow], batchSize: Int = 10000)(implicit c: Connection): Int } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/salesterritoryhistory/SalesterritoryhistoryRepoImpl.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/salesterritoryhistory/SalesterritoryhistoryRepoImpl.scala index 5d8edda1c..74cc47eb6 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/salesterritoryhistory/SalesterritoryhistoryRepoImpl.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/salesterritoryhistory/SalesterritoryhistoryRepoImpl.scala @@ -12,6 +12,7 @@ import adventureworks.customtypes.TypoLocalDateTime import adventureworks.customtypes.TypoUUID import adventureworks.person.businessentity.BusinessentityId import adventureworks.sales.salesterritory.SalesterritoryId +import anorm.BatchSql import anorm.NamedParameter import anorm.ParameterValue import anorm.RowParser @@ -20,6 +21,7 @@ import anorm.SimpleSql import anorm.SqlStringInterpolation import anorm.ToStatement import java.sql.Connection +import scala.annotation.nowarn import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder import typo.dsl.SelectBuilderSql @@ -150,4 +152,47 @@ class SalesterritoryhistoryRepoImpl extends SalesterritoryhistoryRepo { .executeInsert(SalesterritoryhistoryRow.rowParser(1).single) } + override def upsertBatch(unsaved: Iterable[SalesterritoryhistoryRow])(implicit c: Connection): List[SalesterritoryhistoryRow] = { + def toNamedParameter(row: SalesterritoryhistoryRow): List[NamedParameter] = List( + NamedParameter("businessentityid", ParameterValue(row.businessentityid, null, BusinessentityId.toStatement)), + NamedParameter("territoryid", ParameterValue(row.territoryid, null, SalesterritoryId.toStatement)), + NamedParameter("startdate", ParameterValue(row.startdate, null, TypoLocalDateTime.toStatement)), + NamedParameter("enddate", ParameterValue(row.enddate, null, ToStatement.optionToStatement(TypoLocalDateTime.toStatement, TypoLocalDateTime.parameterMetadata))), + NamedParameter("rowguid", ParameterValue(row.rowguid, null, TypoUUID.toStatement)), + NamedParameter("modifieddate", ParameterValue(row.modifieddate, null, TypoLocalDateTime.toStatement)) + ) + unsaved.toList match { + case Nil => Nil + case head :: rest => + new anorm.adventureworks.ExecuteReturningSyntax.Ops( + BatchSql( + s"""insert into sales.salesterritoryhistory("businessentityid", "territoryid", "startdate", "enddate", "rowguid", "modifieddate") + values ({businessentityid}::int4, {territoryid}::int4, {startdate}::timestamp, {enddate}::timestamp, {rowguid}::uuid, {modifieddate}::timestamp) + on conflict ("businessentityid", "startdate", "territoryid") + do update set + "enddate" = EXCLUDED."enddate", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + returning "businessentityid", "territoryid", "startdate"::text, "enddate"::text, "rowguid", "modifieddate"::text + """, + toNamedParameter(head), + rest.map(toNamedParameter)* + ) + ).executeReturning(SalesterritoryhistoryRow.rowParser(1).*) + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[SalesterritoryhistoryRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + SQL"create temporary table salesterritoryhistory_TEMP (like sales.salesterritoryhistory) on commit drop".execute(): @nowarn + streamingInsert(s"""copy salesterritoryhistory_TEMP("businessentityid", "territoryid", "startdate", "enddate", "rowguid", "modifieddate") from stdin""", batchSize, unsaved)(SalesterritoryhistoryRow.text, c): @nowarn + SQL"""insert into sales.salesterritoryhistory("businessentityid", "territoryid", "startdate", "enddate", "rowguid", "modifieddate") + select * from salesterritoryhistory_TEMP + on conflict ("businessentityid", "startdate", "territoryid") + do update set + "enddate" = EXCLUDED."enddate", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table salesterritoryhistory_TEMP;""".executeUpdate() + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/salesterritoryhistory/SalesterritoryhistoryRepoMock.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/salesterritoryhistory/SalesterritoryhistoryRepoMock.scala index 4d90d0ac3..3163857d0 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/salesterritoryhistory/SalesterritoryhistoryRepoMock.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/salesterritoryhistory/SalesterritoryhistoryRepoMock.scala @@ -87,4 +87,17 @@ class SalesterritoryhistoryRepoMock(toRow: Function1[SalesterritoryhistoryRowUns map.put(unsaved.compositeId, unsaved): @nowarn unsaved } + override def upsertBatch(unsaved: Iterable[SalesterritoryhistoryRow])(implicit c: Connection): List[SalesterritoryhistoryRow] = { + unsaved.map { row => + map += (row.compositeId -> row) + row + }.toList + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[SalesterritoryhistoryRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + unsaved.foreach { row => + map += (row.compositeId -> row) + } + unsaved.size + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/shoppingcartitem/ShoppingcartitemRepo.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/shoppingcartitem/ShoppingcartitemRepo.scala index fd54f6963..521676b4d 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/shoppingcartitem/ShoppingcartitemRepo.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/shoppingcartitem/ShoppingcartitemRepo.scala @@ -29,4 +29,7 @@ trait ShoppingcartitemRepo { def update: UpdateBuilder[ShoppingcartitemFields, ShoppingcartitemRow] def update(row: ShoppingcartitemRow)(implicit c: Connection): Boolean def upsert(unsaved: ShoppingcartitemRow)(implicit c: Connection): ShoppingcartitemRow + def upsertBatch(unsaved: Iterable[ShoppingcartitemRow])(implicit c: Connection): List[ShoppingcartitemRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Iterator[ShoppingcartitemRow], batchSize: Int = 10000)(implicit c: Connection): Int } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/shoppingcartitem/ShoppingcartitemRepoImpl.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/shoppingcartitem/ShoppingcartitemRepoImpl.scala index 43897887f..45632f36d 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/shoppingcartitem/ShoppingcartitemRepoImpl.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/shoppingcartitem/ShoppingcartitemRepoImpl.scala @@ -10,6 +10,7 @@ package shoppingcartitem import adventureworks.customtypes.Defaulted import adventureworks.customtypes.TypoLocalDateTime import adventureworks.production.product.ProductId +import anorm.BatchSql import anorm.NamedParameter import anorm.ParameterValue import anorm.RowParser @@ -18,6 +19,7 @@ import anorm.SimpleSql import anorm.SqlStringInterpolation import anorm.ToStatement import java.sql.Connection +import scala.annotation.nowarn import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder import typo.dsl.SelectBuilderSql @@ -150,4 +152,51 @@ class ShoppingcartitemRepoImpl extends ShoppingcartitemRepo { .executeInsert(ShoppingcartitemRow.rowParser(1).single) } + override def upsertBatch(unsaved: Iterable[ShoppingcartitemRow])(implicit c: Connection): List[ShoppingcartitemRow] = { + def toNamedParameter(row: ShoppingcartitemRow): List[NamedParameter] = List( + NamedParameter("shoppingcartitemid", ParameterValue(row.shoppingcartitemid, null, ShoppingcartitemId.toStatement)), + NamedParameter("shoppingcartid", ParameterValue(row.shoppingcartid, null, ToStatement.stringToStatement)), + NamedParameter("quantity", ParameterValue(row.quantity, null, ToStatement.intToStatement)), + NamedParameter("productid", ParameterValue(row.productid, null, ProductId.toStatement)), + NamedParameter("datecreated", ParameterValue(row.datecreated, null, TypoLocalDateTime.toStatement)), + NamedParameter("modifieddate", ParameterValue(row.modifieddate, null, TypoLocalDateTime.toStatement)) + ) + unsaved.toList match { + case Nil => Nil + case head :: rest => + new anorm.adventureworks.ExecuteReturningSyntax.Ops( + BatchSql( + s"""insert into sales.shoppingcartitem("shoppingcartitemid", "shoppingcartid", "quantity", "productid", "datecreated", "modifieddate") + values ({shoppingcartitemid}::int4, {shoppingcartid}, {quantity}::int4, {productid}::int4, {datecreated}::timestamp, {modifieddate}::timestamp) + on conflict ("shoppingcartitemid") + do update set + "shoppingcartid" = EXCLUDED."shoppingcartid", + "quantity" = EXCLUDED."quantity", + "productid" = EXCLUDED."productid", + "datecreated" = EXCLUDED."datecreated", + "modifieddate" = EXCLUDED."modifieddate" + returning "shoppingcartitemid", "shoppingcartid", "quantity", "productid", "datecreated"::text, "modifieddate"::text + """, + toNamedParameter(head), + rest.map(toNamedParameter)* + ) + ).executeReturning(ShoppingcartitemRow.rowParser(1).*) + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[ShoppingcartitemRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + SQL"create temporary table shoppingcartitem_TEMP (like sales.shoppingcartitem) on commit drop".execute(): @nowarn + streamingInsert(s"""copy shoppingcartitem_TEMP("shoppingcartitemid", "shoppingcartid", "quantity", "productid", "datecreated", "modifieddate") from stdin""", batchSize, unsaved)(ShoppingcartitemRow.text, c): @nowarn + SQL"""insert into sales.shoppingcartitem("shoppingcartitemid", "shoppingcartid", "quantity", "productid", "datecreated", "modifieddate") + select * from shoppingcartitem_TEMP + on conflict ("shoppingcartitemid") + do update set + "shoppingcartid" = EXCLUDED."shoppingcartid", + "quantity" = EXCLUDED."quantity", + "productid" = EXCLUDED."productid", + "datecreated" = EXCLUDED."datecreated", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table shoppingcartitem_TEMP;""".executeUpdate() + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/shoppingcartitem/ShoppingcartitemRepoMock.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/shoppingcartitem/ShoppingcartitemRepoMock.scala index 9c5120481..6ac6f7838 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/shoppingcartitem/ShoppingcartitemRepoMock.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/shoppingcartitem/ShoppingcartitemRepoMock.scala @@ -87,4 +87,17 @@ class ShoppingcartitemRepoMock(toRow: Function1[ShoppingcartitemRowUnsaved, Shop map.put(unsaved.shoppingcartitemid, unsaved): @nowarn unsaved } + override def upsertBatch(unsaved: Iterable[ShoppingcartitemRow])(implicit c: Connection): List[ShoppingcartitemRow] = { + unsaved.map { row => + map += (row.shoppingcartitemid -> row) + row + }.toList + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[ShoppingcartitemRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + unsaved.foreach { row => + map += (row.shoppingcartitemid -> row) + } + unsaved.size + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/specialoffer/SpecialofferRepo.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/specialoffer/SpecialofferRepo.scala index f3af8fbdc..0f2b28cbe 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/specialoffer/SpecialofferRepo.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/specialoffer/SpecialofferRepo.scala @@ -29,4 +29,7 @@ trait SpecialofferRepo { def update: UpdateBuilder[SpecialofferFields, SpecialofferRow] def update(row: SpecialofferRow)(implicit c: Connection): Boolean def upsert(unsaved: SpecialofferRow)(implicit c: Connection): SpecialofferRow + def upsertBatch(unsaved: Iterable[SpecialofferRow])(implicit c: Connection): List[SpecialofferRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Iterator[SpecialofferRow], batchSize: Int = 10000)(implicit c: Connection): Int } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/specialoffer/SpecialofferRepoImpl.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/specialoffer/SpecialofferRepoImpl.scala index 08a94e9b4..56a8c0edb 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/specialoffer/SpecialofferRepoImpl.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/specialoffer/SpecialofferRepoImpl.scala @@ -10,6 +10,7 @@ package specialoffer import adventureworks.customtypes.Defaulted import adventureworks.customtypes.TypoLocalDateTime import adventureworks.customtypes.TypoUUID +import anorm.BatchSql import anorm.NamedParameter import anorm.ParameterMetaData import anorm.ParameterValue @@ -19,6 +20,7 @@ import anorm.SimpleSql import anorm.SqlStringInterpolation import anorm.ToStatement import java.sql.Connection +import scala.annotation.nowarn import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder import typo.dsl.SelectBuilderSql @@ -174,4 +176,66 @@ class SpecialofferRepoImpl extends SpecialofferRepo { .executeInsert(SpecialofferRow.rowParser(1).single) } + override def upsertBatch(unsaved: Iterable[SpecialofferRow])(implicit c: Connection): List[SpecialofferRow] = { + def toNamedParameter(row: SpecialofferRow): List[NamedParameter] = List( + NamedParameter("specialofferid", ParameterValue(row.specialofferid, null, SpecialofferId.toStatement)), + NamedParameter("description", ParameterValue(row.description, null, ToStatement.stringToStatement)), + NamedParameter("discountpct", ParameterValue(row.discountpct, null, ToStatement.scalaBigDecimalToStatement)), + NamedParameter("type", ParameterValue(row.`type`, null, ToStatement.stringToStatement)), + NamedParameter("category", ParameterValue(row.category, null, ToStatement.stringToStatement)), + NamedParameter("startdate", ParameterValue(row.startdate, null, TypoLocalDateTime.toStatement)), + NamedParameter("enddate", ParameterValue(row.enddate, null, TypoLocalDateTime.toStatement)), + NamedParameter("minqty", ParameterValue(row.minqty, null, ToStatement.intToStatement)), + NamedParameter("maxqty", ParameterValue(row.maxqty, null, ToStatement.optionToStatement(ToStatement.intToStatement, ParameterMetaData.IntParameterMetaData))), + NamedParameter("rowguid", ParameterValue(row.rowguid, null, TypoUUID.toStatement)), + NamedParameter("modifieddate", ParameterValue(row.modifieddate, null, TypoLocalDateTime.toStatement)) + ) + unsaved.toList match { + case Nil => Nil + case head :: rest => + new anorm.adventureworks.ExecuteReturningSyntax.Ops( + BatchSql( + s"""insert into sales.specialoffer("specialofferid", "description", "discountpct", "type", "category", "startdate", "enddate", "minqty", "maxqty", "rowguid", "modifieddate") + values ({specialofferid}::int4, {description}, {discountpct}::numeric, {type}, {category}, {startdate}::timestamp, {enddate}::timestamp, {minqty}::int4, {maxqty}::int4, {rowguid}::uuid, {modifieddate}::timestamp) + on conflict ("specialofferid") + do update set + "description" = EXCLUDED."description", + "discountpct" = EXCLUDED."discountpct", + "type" = EXCLUDED."type", + "category" = EXCLUDED."category", + "startdate" = EXCLUDED."startdate", + "enddate" = EXCLUDED."enddate", + "minqty" = EXCLUDED."minqty", + "maxqty" = EXCLUDED."maxqty", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + returning "specialofferid", "description", "discountpct", "type", "category", "startdate"::text, "enddate"::text, "minqty", "maxqty", "rowguid", "modifieddate"::text + """, + toNamedParameter(head), + rest.map(toNamedParameter)* + ) + ).executeReturning(SpecialofferRow.rowParser(1).*) + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[SpecialofferRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + SQL"create temporary table specialoffer_TEMP (like sales.specialoffer) on commit drop".execute(): @nowarn + streamingInsert(s"""copy specialoffer_TEMP("specialofferid", "description", "discountpct", "type", "category", "startdate", "enddate", "minqty", "maxqty", "rowguid", "modifieddate") from stdin""", batchSize, unsaved)(SpecialofferRow.text, c): @nowarn + SQL"""insert into sales.specialoffer("specialofferid", "description", "discountpct", "type", "category", "startdate", "enddate", "minqty", "maxqty", "rowguid", "modifieddate") + select * from specialoffer_TEMP + on conflict ("specialofferid") + do update set + "description" = EXCLUDED."description", + "discountpct" = EXCLUDED."discountpct", + "type" = EXCLUDED."type", + "category" = EXCLUDED."category", + "startdate" = EXCLUDED."startdate", + "enddate" = EXCLUDED."enddate", + "minqty" = EXCLUDED."minqty", + "maxqty" = EXCLUDED."maxqty", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table specialoffer_TEMP;""".executeUpdate() + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/specialoffer/SpecialofferRepoMock.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/specialoffer/SpecialofferRepoMock.scala index 7b2d8749c..a9ff1c23a 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/specialoffer/SpecialofferRepoMock.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/specialoffer/SpecialofferRepoMock.scala @@ -87,4 +87,17 @@ class SpecialofferRepoMock(toRow: Function1[SpecialofferRowUnsaved, Specialoffer map.put(unsaved.specialofferid, unsaved): @nowarn unsaved } + override def upsertBatch(unsaved: Iterable[SpecialofferRow])(implicit c: Connection): List[SpecialofferRow] = { + unsaved.map { row => + map += (row.specialofferid -> row) + row + }.toList + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[SpecialofferRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + unsaved.foreach { row => + map += (row.specialofferid -> row) + } + unsaved.size + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/specialofferproduct/SpecialofferproductRepo.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/specialofferproduct/SpecialofferproductRepo.scala index 3eb44c9bf..b2ee0e0b3 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/specialofferproduct/SpecialofferproductRepo.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/specialofferproduct/SpecialofferproductRepo.scala @@ -29,4 +29,7 @@ trait SpecialofferproductRepo { def update: UpdateBuilder[SpecialofferproductFields, SpecialofferproductRow] def update(row: SpecialofferproductRow)(implicit c: Connection): Boolean def upsert(unsaved: SpecialofferproductRow)(implicit c: Connection): SpecialofferproductRow + def upsertBatch(unsaved: Iterable[SpecialofferproductRow])(implicit c: Connection): List[SpecialofferproductRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Iterator[SpecialofferproductRow], batchSize: Int = 10000)(implicit c: Connection): Int } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/specialofferproduct/SpecialofferproductRepoImpl.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/specialofferproduct/SpecialofferproductRepoImpl.scala index 79b327f68..f89f9a814 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/specialofferproduct/SpecialofferproductRepoImpl.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/specialofferproduct/SpecialofferproductRepoImpl.scala @@ -12,6 +12,7 @@ import adventureworks.customtypes.TypoLocalDateTime import adventureworks.customtypes.TypoUUID import adventureworks.production.product.ProductId import adventureworks.sales.specialoffer.SpecialofferId +import anorm.BatchSql import anorm.NamedParameter import anorm.ParameterValue import anorm.RowParser @@ -19,6 +20,7 @@ import anorm.SQL import anorm.SimpleSql import anorm.SqlStringInterpolation import java.sql.Connection +import scala.annotation.nowarn import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder import typo.dsl.SelectBuilderSql @@ -141,4 +143,43 @@ class SpecialofferproductRepoImpl extends SpecialofferproductRepo { .executeInsert(SpecialofferproductRow.rowParser(1).single) } + override def upsertBatch(unsaved: Iterable[SpecialofferproductRow])(implicit c: Connection): List[SpecialofferproductRow] = { + def toNamedParameter(row: SpecialofferproductRow): List[NamedParameter] = List( + NamedParameter("specialofferid", ParameterValue(row.specialofferid, null, SpecialofferId.toStatement)), + NamedParameter("productid", ParameterValue(row.productid, null, ProductId.toStatement)), + NamedParameter("rowguid", ParameterValue(row.rowguid, null, TypoUUID.toStatement)), + NamedParameter("modifieddate", ParameterValue(row.modifieddate, null, TypoLocalDateTime.toStatement)) + ) + unsaved.toList match { + case Nil => Nil + case head :: rest => + new anorm.adventureworks.ExecuteReturningSyntax.Ops( + BatchSql( + s"""insert into sales.specialofferproduct("specialofferid", "productid", "rowguid", "modifieddate") + values ({specialofferid}::int4, {productid}::int4, {rowguid}::uuid, {modifieddate}::timestamp) + on conflict ("specialofferid", "productid") + do update set + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + returning "specialofferid", "productid", "rowguid", "modifieddate"::text + """, + toNamedParameter(head), + rest.map(toNamedParameter)* + ) + ).executeReturning(SpecialofferproductRow.rowParser(1).*) + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[SpecialofferproductRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + SQL"create temporary table specialofferproduct_TEMP (like sales.specialofferproduct) on commit drop".execute(): @nowarn + streamingInsert(s"""copy specialofferproduct_TEMP("specialofferid", "productid", "rowguid", "modifieddate") from stdin""", batchSize, unsaved)(SpecialofferproductRow.text, c): @nowarn + SQL"""insert into sales.specialofferproduct("specialofferid", "productid", "rowguid", "modifieddate") + select * from specialofferproduct_TEMP + on conflict ("specialofferid", "productid") + do update set + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table specialofferproduct_TEMP;""".executeUpdate() + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/specialofferproduct/SpecialofferproductRepoMock.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/specialofferproduct/SpecialofferproductRepoMock.scala index 3b4b30e62..72eb67df5 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/specialofferproduct/SpecialofferproductRepoMock.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/specialofferproduct/SpecialofferproductRepoMock.scala @@ -87,4 +87,17 @@ class SpecialofferproductRepoMock(toRow: Function1[SpecialofferproductRowUnsaved map.put(unsaved.compositeId, unsaved): @nowarn unsaved } + override def upsertBatch(unsaved: Iterable[SpecialofferproductRow])(implicit c: Connection): List[SpecialofferproductRow] = { + unsaved.map { row => + map += (row.compositeId -> row) + row + }.toList + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[SpecialofferproductRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + unsaved.foreach { row => + map += (row.compositeId -> row) + } + unsaved.size + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/store/StoreRepo.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/store/StoreRepo.scala index b8432187b..403d3e978 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/store/StoreRepo.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/store/StoreRepo.scala @@ -30,4 +30,7 @@ trait StoreRepo { def update: UpdateBuilder[StoreFields, StoreRow] def update(row: StoreRow)(implicit c: Connection): Boolean def upsert(unsaved: StoreRow)(implicit c: Connection): StoreRow + def upsertBatch(unsaved: Iterable[StoreRow])(implicit c: Connection): List[StoreRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Iterator[StoreRow], batchSize: Int = 10000)(implicit c: Connection): Int } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/store/StoreRepoImpl.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/store/StoreRepoImpl.scala index 574da19b8..bfdaf83c5 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/store/StoreRepoImpl.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/store/StoreRepoImpl.scala @@ -13,6 +13,7 @@ import adventureworks.customtypes.TypoUUID import adventureworks.customtypes.TypoXml import adventureworks.person.businessentity.BusinessentityId import adventureworks.public.Name +import anorm.BatchSql import anorm.NamedParameter import anorm.ParameterValue import anorm.RowParser @@ -21,6 +22,7 @@ import anorm.SimpleSql import anorm.SqlStringInterpolation import anorm.ToStatement import java.sql.Connection +import scala.annotation.nowarn import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder import typo.dsl.SelectBuilderSql @@ -147,4 +149,51 @@ class StoreRepoImpl extends StoreRepo { .executeInsert(StoreRow.rowParser(1).single) } + override def upsertBatch(unsaved: Iterable[StoreRow])(implicit c: Connection): List[StoreRow] = { + def toNamedParameter(row: StoreRow): List[NamedParameter] = List( + NamedParameter("businessentityid", ParameterValue(row.businessentityid, null, BusinessentityId.toStatement)), + NamedParameter("name", ParameterValue(row.name, null, Name.toStatement)), + NamedParameter("salespersonid", ParameterValue(row.salespersonid, null, ToStatement.optionToStatement(BusinessentityId.toStatement, BusinessentityId.parameterMetadata))), + NamedParameter("demographics", ParameterValue(row.demographics, null, ToStatement.optionToStatement(TypoXml.toStatement, TypoXml.parameterMetadata))), + NamedParameter("rowguid", ParameterValue(row.rowguid, null, TypoUUID.toStatement)), + NamedParameter("modifieddate", ParameterValue(row.modifieddate, null, TypoLocalDateTime.toStatement)) + ) + unsaved.toList match { + case Nil => Nil + case head :: rest => + new anorm.adventureworks.ExecuteReturningSyntax.Ops( + BatchSql( + s"""insert into sales.store("businessentityid", "name", "salespersonid", "demographics", "rowguid", "modifieddate") + values ({businessentityid}::int4, {name}::varchar, {salespersonid}::int4, {demographics}::xml, {rowguid}::uuid, {modifieddate}::timestamp) + on conflict ("businessentityid") + do update set + "name" = EXCLUDED."name", + "salespersonid" = EXCLUDED."salespersonid", + "demographics" = EXCLUDED."demographics", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + returning "businessentityid", "name", "salespersonid", "demographics", "rowguid", "modifieddate"::text + """, + toNamedParameter(head), + rest.map(toNamedParameter)* + ) + ).executeReturning(StoreRow.rowParser(1).*) + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[StoreRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + SQL"create temporary table store_TEMP (like sales.store) on commit drop".execute(): @nowarn + streamingInsert(s"""copy store_TEMP("businessentityid", "name", "salespersonid", "demographics", "rowguid", "modifieddate") from stdin""", batchSize, unsaved)(StoreRow.text, c): @nowarn + SQL"""insert into sales.store("businessentityid", "name", "salespersonid", "demographics", "rowguid", "modifieddate") + select * from store_TEMP + on conflict ("businessentityid") + do update set + "name" = EXCLUDED."name", + "salespersonid" = EXCLUDED."salespersonid", + "demographics" = EXCLUDED."demographics", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table store_TEMP;""".executeUpdate() + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/store/StoreRepoMock.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/store/StoreRepoMock.scala index 4a992b8b4..545c28a5b 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/store/StoreRepoMock.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/store/StoreRepoMock.scala @@ -88,4 +88,17 @@ class StoreRepoMock(toRow: Function1[StoreRowUnsaved, StoreRow], map.put(unsaved.businessentityid, unsaved): @nowarn unsaved } + override def upsertBatch(unsaved: Iterable[StoreRow])(implicit c: Connection): List[StoreRow] = { + unsaved.map { row => + map += (row.businessentityid -> row) + row + }.toList + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[StoreRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + unsaved.foreach { row => + map += (row.businessentityid -> row) + } + unsaved.size + } } diff --git a/typo-tester-anorm/generated-and-checked-in/anorm/adventureworks/ExecuteReturningSyntax.scala b/typo-tester-anorm/generated-and-checked-in/anorm/adventureworks/ExecuteReturningSyntax.scala new file mode 100644 index 000000000..a52231727 --- /dev/null +++ b/typo-tester-anorm/generated-and-checked-in/anorm/adventureworks/ExecuteReturningSyntax.scala @@ -0,0 +1,28 @@ +/** + * File has been automatically generated by `typo`. + * + * IF YOU CHANGE THIS FILE YOUR CHANGES WILL BE OVERWRITTEN. + */ +package anorm +package adventureworks + +import java.sql.Connection +import resource.managed + +object ExecuteReturningSyntax { + /* add executeReturning to anorm. it needs to be inside the package, because everything is hidden */ + implicit class Ops(batchSql: BatchSql) { + def executeReturning[T](parser: ResultSetParser[T])(implicit c: Connection): T = + managed(batchSql.getFilledStatement(c, getGeneratedKeys = true))(using StatementResource, statementClassTag).acquireAndGet { ps => + ps.executeBatch() + Sql + .asTry( + parser, + managed(ps.getGeneratedKeys)(using ResultSetResource, resultSetClassTag), + onFirstRow = false, + ColumnAliaser.empty + ) + .get + } + } +} diff --git a/typo-tester-anorm/src/scala/adventureworks/production/product/RepoTest.scala b/typo-tester-anorm/src/scala/adventureworks/production/product/RepoTest.scala new file mode 100644 index 000000000..53e94f068 --- /dev/null +++ b/typo-tester-anorm/src/scala/adventureworks/production/product/RepoTest.scala @@ -0,0 +1,43 @@ +package adventureworks.production.product + +import adventureworks.customtypes.* +import adventureworks.production.unitmeasure.* +import adventureworks.public.Name +import adventureworks.{SnapshotTest, withConnection} +import org.scalatest.Assertion + +import scala.annotation.nowarn + +class RepoTest extends SnapshotTest { + def upsertStreaming(unitmeasureRepo: UnitmeasureRepo): Assertion = + withConnection { implicit c => + val um1 = UnitmeasureRow(unitmeasurecode = UnitmeasureId("kg1"), name = Name("name1"), TypoLocalDateTime.now) + val um2 = UnitmeasureRow(unitmeasurecode = UnitmeasureId("kg2"), name = Name("name2"), TypoLocalDateTime.now) + unitmeasureRepo.upsertStreaming(Iterator(um1, um2)): @nowarn + assert(List(um1, um2) == unitmeasureRepo.selectAll.sortBy(_.name)): @nowarn + val um1a = um1.copy(name = Name("name1a")) + val um2a = um2.copy(name = Name("name2a")) + unitmeasureRepo.upsertStreaming(Iterator(um1a, um2a)): @nowarn + assert(List(um1a, um2a) == unitmeasureRepo.selectAll.sortBy(_.name)) + } + + def upsertBatch(unitmeasureRepo: UnitmeasureRepo): Assertion = + withConnection { implicit c => + val um1 = UnitmeasureRow(unitmeasurecode = UnitmeasureId("kg1"), name = Name("name1"), TypoLocalDateTime.now) + val um2 = UnitmeasureRow(unitmeasurecode = UnitmeasureId("kg2"), name = Name("name2"), TypoLocalDateTime.now) + val initial = unitmeasureRepo.upsertBatch(List(um1, um2)) + assert(List(um1, um2) == initial.sortBy(_.name)): @nowarn + val um1a = um1.copy(name = Name("name1a")) + val um2a = um2.copy(name = Name("name2a")) + val returned = unitmeasureRepo.upsertBatch(List(um1a, um2a)) + assert(List(um1a, um2a) == returned.sortBy(_.name)): @nowarn + val all = unitmeasureRepo.selectAll + assert(List(um1a, um2a) == all.sortBy(_.name)) + } + + test("upsertStreaming in-memory")(upsertStreaming(new UnitmeasureRepoMock(_.toRow(TypoLocalDateTime.now)))) + test("upsertStreaming pg")(upsertStreaming(new UnitmeasureRepoImpl)) + + test("upsertBatch in-memory")(upsertBatch(new UnitmeasureRepoMock(_.toRow(TypoLocalDateTime.now)))) + test("upsertBatch pg")(upsertBatch(new UnitmeasureRepoImpl)) +} diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/humanresources/department/DepartmentRepo.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/humanresources/department/DepartmentRepo.scala index a0ad086c7..69cbb1301 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/humanresources/department/DepartmentRepo.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/humanresources/department/DepartmentRepo.scala @@ -30,4 +30,7 @@ trait DepartmentRepo { def update: UpdateBuilder[DepartmentFields, DepartmentRow] def update(row: DepartmentRow): ConnectionIO[Boolean] def upsert(unsaved: DepartmentRow): ConnectionIO[DepartmentRow] + def upsertBatch(unsaved: List[DepartmentRow]): Stream[ConnectionIO, DepartmentRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Stream[ConnectionIO, DepartmentRow], batchSize: Int = 10000): ConnectionIO[Int] } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/humanresources/department/DepartmentRepoImpl.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/humanresources/department/DepartmentRepoImpl.scala index 1a287e753..87042d6bb 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/humanresources/department/DepartmentRepoImpl.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/humanresources/department/DepartmentRepoImpl.scala @@ -10,12 +10,14 @@ package department import adventureworks.customtypes.Defaulted import adventureworks.customtypes.TypoLocalDateTime import adventureworks.public.Name +import cats.instances.list.catsStdInstancesForList import doobie.free.connection.ConnectionIO import doobie.postgres.syntax.FragmentOps import doobie.syntax.SqlInterpolator.SingleFragment.fromWrite import doobie.syntax.string.toSqlInterpolator import doobie.util.Write import doobie.util.fragment.Fragment +import doobie.util.update.Update import fs2.Stream import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder @@ -121,4 +123,33 @@ class DepartmentRepoImpl extends DepartmentRepo { returning "departmentid", "name", "groupname", "modifieddate"::text """.query(using DepartmentRow.read).unique } + override def upsertBatch(unsaved: List[DepartmentRow]): Stream[ConnectionIO, DepartmentRow] = { + Update[DepartmentRow]( + s"""insert into humanresources.department("departmentid", "name", "groupname", "modifieddate") + values (?::int4,?::varchar,?::varchar,?::timestamp) + on conflict ("departmentid") + do update set + "name" = EXCLUDED."name", + "groupname" = EXCLUDED."groupname", + "modifieddate" = EXCLUDED."modifieddate" + returning "departmentid", "name", "groupname", "modifieddate"::text""" + )(using DepartmentRow.write) + .updateManyWithGeneratedKeys[DepartmentRow]("departmentid", "name", "groupname", "modifieddate")(unsaved)(using catsStdInstancesForList, DepartmentRow.read) + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, DepartmentRow], batchSize: Int = 10000): ConnectionIO[Int] = { + for { + _ <- sql"create temporary table department_TEMP (like humanresources.department) on commit drop".update.run + _ <- new FragmentOps(sql"""copy department_TEMP("departmentid", "name", "groupname", "modifieddate") from stdin""").copyIn(unsaved, batchSize)(using DepartmentRow.text) + res <- sql"""insert into humanresources.department("departmentid", "name", "groupname", "modifieddate") + select * from department_TEMP + on conflict ("departmentid") + do update set + "name" = EXCLUDED."name", + "groupname" = EXCLUDED."groupname", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table department_TEMP;""".update.run + } yield res + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/humanresources/department/DepartmentRepoMock.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/humanresources/department/DepartmentRepoMock.scala index 49e4bbf5e..15d1de65d 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/humanresources/department/DepartmentRepoMock.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/humanresources/department/DepartmentRepoMock.scala @@ -105,4 +105,23 @@ class DepartmentRepoMock(toRow: Function1[DepartmentRowUnsaved, DepartmentRow], unsaved } } + override def upsertBatch(unsaved: List[DepartmentRow]): Stream[ConnectionIO, DepartmentRow] = { + Stream.emits { + unsaved.map { row => + map += (row.departmentid -> row) + row + } + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, DepartmentRow], batchSize: Int = 10000): ConnectionIO[Int] = { + unsaved.compile.toList.map { rows => + var num = 0 + rows.foreach { row => + map += (row.departmentid -> row) + num += 1 + } + num + } + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/humanresources/department/DepartmentRow.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/humanresources/department/DepartmentRow.scala index 179b76cb6..04a749721 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/humanresources/department/DepartmentRow.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/humanresources/department/DepartmentRow.scala @@ -13,6 +13,7 @@ import adventureworks.public.Name import doobie.enumerated.Nullability import doobie.postgres.Text import doobie.util.Read +import doobie.util.Write import io.circe.Decoder import io.circe.Encoder import java.sql.ResultSet @@ -62,4 +63,23 @@ object DepartmentRow { sb.append(Text.DELIMETER) TypoLocalDateTime.text.unsafeEncode(row.modifieddate, sb) } + implicit lazy val write: Write[DepartmentRow] = new Write[DepartmentRow]( + puts = List((DepartmentId.put, Nullability.NoNulls), + (Name.put, Nullability.NoNulls), + (Name.put, Nullability.NoNulls), + (TypoLocalDateTime.put, Nullability.NoNulls)), + toList = x => List(x.departmentid, x.name, x.groupname, x.modifieddate), + unsafeSet = (rs, i, a) => { + DepartmentId.put.unsafeSetNonNullable(rs, i + 0, a.departmentid) + Name.put.unsafeSetNonNullable(rs, i + 1, a.name) + Name.put.unsafeSetNonNullable(rs, i + 2, a.groupname) + TypoLocalDateTime.put.unsafeSetNonNullable(rs, i + 3, a.modifieddate) + }, + unsafeUpdate = (ps, i, a) => { + DepartmentId.put.unsafeUpdateNonNullable(ps, i + 0, a.departmentid) + Name.put.unsafeUpdateNonNullable(ps, i + 1, a.name) + Name.put.unsafeUpdateNonNullable(ps, i + 2, a.groupname) + TypoLocalDateTime.put.unsafeUpdateNonNullable(ps, i + 3, a.modifieddate) + } + ) } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/humanresources/employee/EmployeeRepo.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/humanresources/employee/EmployeeRepo.scala index 0a7187f89..3dd3d9fde 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/humanresources/employee/EmployeeRepo.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/humanresources/employee/EmployeeRepo.scala @@ -31,4 +31,7 @@ trait EmployeeRepo { def update: UpdateBuilder[EmployeeFields, EmployeeRow] def update(row: EmployeeRow): ConnectionIO[Boolean] def upsert(unsaved: EmployeeRow): ConnectionIO[EmployeeRow] + def upsertBatch(unsaved: List[EmployeeRow]): Stream[ConnectionIO, EmployeeRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Stream[ConnectionIO, EmployeeRow], batchSize: Int = 10000): ConnectionIO[Int] } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/humanresources/employee/EmployeeRepoImpl.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/humanresources/employee/EmployeeRepoImpl.scala index f92c80757..75b3b70e9 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/humanresources/employee/EmployeeRepoImpl.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/humanresources/employee/EmployeeRepoImpl.scala @@ -14,6 +14,7 @@ import adventureworks.customtypes.TypoShort import adventureworks.customtypes.TypoUUID import adventureworks.person.businessentity.BusinessentityId import adventureworks.public.Flag +import cats.instances.list.catsStdInstancesForList import doobie.free.connection.ConnectionIO import doobie.postgres.syntax.FragmentOps import doobie.syntax.SqlInterpolator.SingleFragment.fromWrite @@ -21,6 +22,7 @@ import doobie.syntax.string.toSqlInterpolator import doobie.util.Write import doobie.util.fragment.Fragment import doobie.util.meta.Meta +import doobie.util.update.Update import fs2.Stream import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder @@ -185,4 +187,55 @@ class EmployeeRepoImpl extends EmployeeRepo { returning "businessentityid", "nationalidnumber", "loginid", "jobtitle", "birthdate"::text, "maritalstatus", "gender", "hiredate"::text, "salariedflag", "vacationhours", "sickleavehours", "currentflag", "rowguid", "modifieddate"::text, "organizationnode" """.query(using EmployeeRow.read).unique } + override def upsertBatch(unsaved: List[EmployeeRow]): Stream[ConnectionIO, EmployeeRow] = { + Update[EmployeeRow]( + s"""insert into humanresources.employee("businessentityid", "nationalidnumber", "loginid", "jobtitle", "birthdate", "maritalstatus", "gender", "hiredate", "salariedflag", "vacationhours", "sickleavehours", "currentflag", "rowguid", "modifieddate", "organizationnode") + values (?::int4,?,?,?,?::date,?::bpchar,?::bpchar,?::date,?::bool,?::int2,?::int2,?::bool,?::uuid,?::timestamp,?) + on conflict ("businessentityid") + do update set + "nationalidnumber" = EXCLUDED."nationalidnumber", + "loginid" = EXCLUDED."loginid", + "jobtitle" = EXCLUDED."jobtitle", + "birthdate" = EXCLUDED."birthdate", + "maritalstatus" = EXCLUDED."maritalstatus", + "gender" = EXCLUDED."gender", + "hiredate" = EXCLUDED."hiredate", + "salariedflag" = EXCLUDED."salariedflag", + "vacationhours" = EXCLUDED."vacationhours", + "sickleavehours" = EXCLUDED."sickleavehours", + "currentflag" = EXCLUDED."currentflag", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate", + "organizationnode" = EXCLUDED."organizationnode" + returning "businessentityid", "nationalidnumber", "loginid", "jobtitle", "birthdate"::text, "maritalstatus", "gender", "hiredate"::text, "salariedflag", "vacationhours", "sickleavehours", "currentflag", "rowguid", "modifieddate"::text, "organizationnode"""" + )(using EmployeeRow.write) + .updateManyWithGeneratedKeys[EmployeeRow]("businessentityid", "nationalidnumber", "loginid", "jobtitle", "birthdate", "maritalstatus", "gender", "hiredate", "salariedflag", "vacationhours", "sickleavehours", "currentflag", "rowguid", "modifieddate", "organizationnode")(unsaved)(using catsStdInstancesForList, EmployeeRow.read) + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, EmployeeRow], batchSize: Int = 10000): ConnectionIO[Int] = { + for { + _ <- sql"create temporary table employee_TEMP (like humanresources.employee) on commit drop".update.run + _ <- new FragmentOps(sql"""copy employee_TEMP("businessentityid", "nationalidnumber", "loginid", "jobtitle", "birthdate", "maritalstatus", "gender", "hiredate", "salariedflag", "vacationhours", "sickleavehours", "currentflag", "rowguid", "modifieddate", "organizationnode") from stdin""").copyIn(unsaved, batchSize)(using EmployeeRow.text) + res <- sql"""insert into humanresources.employee("businessentityid", "nationalidnumber", "loginid", "jobtitle", "birthdate", "maritalstatus", "gender", "hiredate", "salariedflag", "vacationhours", "sickleavehours", "currentflag", "rowguid", "modifieddate", "organizationnode") + select * from employee_TEMP + on conflict ("businessentityid") + do update set + "nationalidnumber" = EXCLUDED."nationalidnumber", + "loginid" = EXCLUDED."loginid", + "jobtitle" = EXCLUDED."jobtitle", + "birthdate" = EXCLUDED."birthdate", + "maritalstatus" = EXCLUDED."maritalstatus", + "gender" = EXCLUDED."gender", + "hiredate" = EXCLUDED."hiredate", + "salariedflag" = EXCLUDED."salariedflag", + "vacationhours" = EXCLUDED."vacationhours", + "sickleavehours" = EXCLUDED."sickleavehours", + "currentflag" = EXCLUDED."currentflag", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate", + "organizationnode" = EXCLUDED."organizationnode" + ; + drop table employee_TEMP;""".update.run + } yield res + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/humanresources/employee/EmployeeRepoMock.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/humanresources/employee/EmployeeRepoMock.scala index 9a044c1ed..a421b6f15 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/humanresources/employee/EmployeeRepoMock.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/humanresources/employee/EmployeeRepoMock.scala @@ -106,4 +106,23 @@ class EmployeeRepoMock(toRow: Function1[EmployeeRowUnsaved, EmployeeRow], unsaved } } + override def upsertBatch(unsaved: List[EmployeeRow]): Stream[ConnectionIO, EmployeeRow] = { + Stream.emits { + unsaved.map { row => + map += (row.businessentityid -> row) + row + } + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, EmployeeRow], batchSize: Int = 10000): ConnectionIO[Int] = { + unsaved.compile.toList.map { rows => + var num = 0 + rows.foreach { row => + map += (row.businessentityid -> row) + num += 1 + } + num + } + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/humanresources/employee/EmployeeRow.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/humanresources/employee/EmployeeRow.scala index 5b20dee8c..511735f8d 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/humanresources/employee/EmployeeRow.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/humanresources/employee/EmployeeRow.scala @@ -17,6 +17,7 @@ import adventureworks.public.Flag import doobie.enumerated.Nullability import doobie.postgres.Text import doobie.util.Read +import doobie.util.Write import doobie.util.meta.Meta import io.circe.Decoder import io.circe.Encoder @@ -144,4 +145,56 @@ object EmployeeRow { sb.append(Text.DELIMETER) Text.option(Text.stringInstance).unsafeEncode(row.organizationnode, sb) } + implicit lazy val write: Write[EmployeeRow] = new Write[EmployeeRow]( + puts = List((BusinessentityId.put, Nullability.NoNulls), + (Meta.StringMeta.put, Nullability.NoNulls), + (Meta.StringMeta.put, Nullability.NoNulls), + (Meta.StringMeta.put, Nullability.NoNulls), + (TypoLocalDate.put, Nullability.NoNulls), + (Meta.StringMeta.put, Nullability.NoNulls), + (Meta.StringMeta.put, Nullability.NoNulls), + (TypoLocalDate.put, Nullability.NoNulls), + (Flag.put, Nullability.NoNulls), + (TypoShort.put, Nullability.NoNulls), + (TypoShort.put, Nullability.NoNulls), + (Flag.put, Nullability.NoNulls), + (TypoUUID.put, Nullability.NoNulls), + (TypoLocalDateTime.put, Nullability.NoNulls), + (Meta.StringMeta.put, Nullability.Nullable)), + toList = x => List(x.businessentityid, x.nationalidnumber, x.loginid, x.jobtitle, x.birthdate, x.maritalstatus, x.gender, x.hiredate, x.salariedflag, x.vacationhours, x.sickleavehours, x.currentflag, x.rowguid, x.modifieddate, x.organizationnode), + unsafeSet = (rs, i, a) => { + BusinessentityId.put.unsafeSetNonNullable(rs, i + 0, a.businessentityid) + Meta.StringMeta.put.unsafeSetNonNullable(rs, i + 1, a.nationalidnumber) + Meta.StringMeta.put.unsafeSetNonNullable(rs, i + 2, a.loginid) + Meta.StringMeta.put.unsafeSetNonNullable(rs, i + 3, a.jobtitle) + TypoLocalDate.put.unsafeSetNonNullable(rs, i + 4, a.birthdate) + Meta.StringMeta.put.unsafeSetNonNullable(rs, i + 5, a.maritalstatus) + Meta.StringMeta.put.unsafeSetNonNullable(rs, i + 6, a.gender) + TypoLocalDate.put.unsafeSetNonNullable(rs, i + 7, a.hiredate) + Flag.put.unsafeSetNonNullable(rs, i + 8, a.salariedflag) + TypoShort.put.unsafeSetNonNullable(rs, i + 9, a.vacationhours) + TypoShort.put.unsafeSetNonNullable(rs, i + 10, a.sickleavehours) + Flag.put.unsafeSetNonNullable(rs, i + 11, a.currentflag) + TypoUUID.put.unsafeSetNonNullable(rs, i + 12, a.rowguid) + TypoLocalDateTime.put.unsafeSetNonNullable(rs, i + 13, a.modifieddate) + Meta.StringMeta.put.unsafeSetNullable(rs, i + 14, a.organizationnode) + }, + unsafeUpdate = (ps, i, a) => { + BusinessentityId.put.unsafeUpdateNonNullable(ps, i + 0, a.businessentityid) + Meta.StringMeta.put.unsafeUpdateNonNullable(ps, i + 1, a.nationalidnumber) + Meta.StringMeta.put.unsafeUpdateNonNullable(ps, i + 2, a.loginid) + Meta.StringMeta.put.unsafeUpdateNonNullable(ps, i + 3, a.jobtitle) + TypoLocalDate.put.unsafeUpdateNonNullable(ps, i + 4, a.birthdate) + Meta.StringMeta.put.unsafeUpdateNonNullable(ps, i + 5, a.maritalstatus) + Meta.StringMeta.put.unsafeUpdateNonNullable(ps, i + 6, a.gender) + TypoLocalDate.put.unsafeUpdateNonNullable(ps, i + 7, a.hiredate) + Flag.put.unsafeUpdateNonNullable(ps, i + 8, a.salariedflag) + TypoShort.put.unsafeUpdateNonNullable(ps, i + 9, a.vacationhours) + TypoShort.put.unsafeUpdateNonNullable(ps, i + 10, a.sickleavehours) + Flag.put.unsafeUpdateNonNullable(ps, i + 11, a.currentflag) + TypoUUID.put.unsafeUpdateNonNullable(ps, i + 12, a.rowguid) + TypoLocalDateTime.put.unsafeUpdateNonNullable(ps, i + 13, a.modifieddate) + Meta.StringMeta.put.unsafeUpdateNullable(ps, i + 14, a.organizationnode) + } + ) } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/humanresources/employeedepartmenthistory/EmployeedepartmenthistoryRepo.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/humanresources/employeedepartmenthistory/EmployeedepartmenthistoryRepo.scala index 3b99c7ec5..17fb8442b 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/humanresources/employeedepartmenthistory/EmployeedepartmenthistoryRepo.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/humanresources/employeedepartmenthistory/EmployeedepartmenthistoryRepo.scala @@ -30,4 +30,7 @@ trait EmployeedepartmenthistoryRepo { def update: UpdateBuilder[EmployeedepartmenthistoryFields, EmployeedepartmenthistoryRow] def update(row: EmployeedepartmenthistoryRow): ConnectionIO[Boolean] def upsert(unsaved: EmployeedepartmenthistoryRow): ConnectionIO[EmployeedepartmenthistoryRow] + def upsertBatch(unsaved: List[EmployeedepartmenthistoryRow]): Stream[ConnectionIO, EmployeedepartmenthistoryRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Stream[ConnectionIO, EmployeedepartmenthistoryRow], batchSize: Int = 10000): ConnectionIO[Int] } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/humanresources/employeedepartmenthistory/EmployeedepartmenthistoryRepoImpl.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/humanresources/employeedepartmenthistory/EmployeedepartmenthistoryRepoImpl.scala index 567b7c1be..4ad24020e 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/humanresources/employeedepartmenthistory/EmployeedepartmenthistoryRepoImpl.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/humanresources/employeedepartmenthistory/EmployeedepartmenthistoryRepoImpl.scala @@ -13,12 +13,14 @@ import adventureworks.customtypes.TypoLocalDateTime import adventureworks.humanresources.department.DepartmentId import adventureworks.humanresources.shift.ShiftId import adventureworks.person.businessentity.BusinessentityId +import cats.instances.list.catsStdInstancesForList import doobie.free.connection.ConnectionIO import doobie.postgres.syntax.FragmentOps import doobie.syntax.SqlInterpolator.SingleFragment.fromWrite import doobie.syntax.string.toSqlInterpolator import doobie.util.Write import doobie.util.fragment.Fragment +import doobie.util.update.Update import fs2.Stream import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder @@ -141,4 +143,31 @@ class EmployeedepartmenthistoryRepoImpl extends EmployeedepartmenthistoryRepo { returning "businessentityid", "departmentid", "shiftid", "startdate"::text, "enddate"::text, "modifieddate"::text """.query(using EmployeedepartmenthistoryRow.read).unique } + override def upsertBatch(unsaved: List[EmployeedepartmenthistoryRow]): Stream[ConnectionIO, EmployeedepartmenthistoryRow] = { + Update[EmployeedepartmenthistoryRow]( + s"""insert into humanresources.employeedepartmenthistory("businessentityid", "departmentid", "shiftid", "startdate", "enddate", "modifieddate") + values (?::int4,?::int2,?::int2,?::date,?::date,?::timestamp) + on conflict ("businessentityid", "startdate", "departmentid", "shiftid") + do update set + "enddate" = EXCLUDED."enddate", + "modifieddate" = EXCLUDED."modifieddate" + returning "businessentityid", "departmentid", "shiftid", "startdate"::text, "enddate"::text, "modifieddate"::text""" + )(using EmployeedepartmenthistoryRow.write) + .updateManyWithGeneratedKeys[EmployeedepartmenthistoryRow]("businessentityid", "departmentid", "shiftid", "startdate", "enddate", "modifieddate")(unsaved)(using catsStdInstancesForList, EmployeedepartmenthistoryRow.read) + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, EmployeedepartmenthistoryRow], batchSize: Int = 10000): ConnectionIO[Int] = { + for { + _ <- sql"create temporary table employeedepartmenthistory_TEMP (like humanresources.employeedepartmenthistory) on commit drop".update.run + _ <- new FragmentOps(sql"""copy employeedepartmenthistory_TEMP("businessentityid", "departmentid", "shiftid", "startdate", "enddate", "modifieddate") from stdin""").copyIn(unsaved, batchSize)(using EmployeedepartmenthistoryRow.text) + res <- sql"""insert into humanresources.employeedepartmenthistory("businessentityid", "departmentid", "shiftid", "startdate", "enddate", "modifieddate") + select * from employeedepartmenthistory_TEMP + on conflict ("businessentityid", "startdate", "departmentid", "shiftid") + do update set + "enddate" = EXCLUDED."enddate", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table employeedepartmenthistory_TEMP;""".update.run + } yield res + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/humanresources/employeedepartmenthistory/EmployeedepartmenthistoryRepoMock.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/humanresources/employeedepartmenthistory/EmployeedepartmenthistoryRepoMock.scala index ae958486d..a223708a8 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/humanresources/employeedepartmenthistory/EmployeedepartmenthistoryRepoMock.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/humanresources/employeedepartmenthistory/EmployeedepartmenthistoryRepoMock.scala @@ -105,4 +105,23 @@ class EmployeedepartmenthistoryRepoMock(toRow: Function1[Employeedepartmenthisto unsaved } } + override def upsertBatch(unsaved: List[EmployeedepartmenthistoryRow]): Stream[ConnectionIO, EmployeedepartmenthistoryRow] = { + Stream.emits { + unsaved.map { row => + map += (row.compositeId -> row) + row + } + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, EmployeedepartmenthistoryRow], batchSize: Int = 10000): ConnectionIO[Int] = { + unsaved.compile.toList.map { rows => + var num = 0 + rows.foreach { row => + map += (row.compositeId -> row) + num += 1 + } + num + } + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/humanresources/employeedepartmenthistory/EmployeedepartmenthistoryRow.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/humanresources/employeedepartmenthistory/EmployeedepartmenthistoryRow.scala index 84bceb6f1..ffd88e5fd 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/humanresources/employeedepartmenthistory/EmployeedepartmenthistoryRow.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/humanresources/employeedepartmenthistory/EmployeedepartmenthistoryRow.scala @@ -16,6 +16,7 @@ import adventureworks.person.businessentity.BusinessentityId import doobie.enumerated.Nullability import doobie.postgres.Text import doobie.util.Read +import doobie.util.Write import io.circe.Decoder import io.circe.Encoder import java.sql.ResultSet @@ -84,4 +85,29 @@ object EmployeedepartmenthistoryRow { sb.append(Text.DELIMETER) TypoLocalDateTime.text.unsafeEncode(row.modifieddate, sb) } + implicit lazy val write: Write[EmployeedepartmenthistoryRow] = new Write[EmployeedepartmenthistoryRow]( + puts = List((BusinessentityId.put, Nullability.NoNulls), + (DepartmentId.put, Nullability.NoNulls), + (ShiftId.put, Nullability.NoNulls), + (TypoLocalDate.put, Nullability.NoNulls), + (TypoLocalDate.put, Nullability.Nullable), + (TypoLocalDateTime.put, Nullability.NoNulls)), + toList = x => List(x.businessentityid, x.departmentid, x.shiftid, x.startdate, x.enddate, x.modifieddate), + unsafeSet = (rs, i, a) => { + BusinessentityId.put.unsafeSetNonNullable(rs, i + 0, a.businessentityid) + DepartmentId.put.unsafeSetNonNullable(rs, i + 1, a.departmentid) + ShiftId.put.unsafeSetNonNullable(rs, i + 2, a.shiftid) + TypoLocalDate.put.unsafeSetNonNullable(rs, i + 3, a.startdate) + TypoLocalDate.put.unsafeSetNullable(rs, i + 4, a.enddate) + TypoLocalDateTime.put.unsafeSetNonNullable(rs, i + 5, a.modifieddate) + }, + unsafeUpdate = (ps, i, a) => { + BusinessentityId.put.unsafeUpdateNonNullable(ps, i + 0, a.businessentityid) + DepartmentId.put.unsafeUpdateNonNullable(ps, i + 1, a.departmentid) + ShiftId.put.unsafeUpdateNonNullable(ps, i + 2, a.shiftid) + TypoLocalDate.put.unsafeUpdateNonNullable(ps, i + 3, a.startdate) + TypoLocalDate.put.unsafeUpdateNullable(ps, i + 4, a.enddate) + TypoLocalDateTime.put.unsafeUpdateNonNullable(ps, i + 5, a.modifieddate) + } + ) } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/humanresources/employeepayhistory/EmployeepayhistoryRepo.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/humanresources/employeepayhistory/EmployeepayhistoryRepo.scala index 4da02bcb1..9d64f2793 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/humanresources/employeepayhistory/EmployeepayhistoryRepo.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/humanresources/employeepayhistory/EmployeepayhistoryRepo.scala @@ -30,4 +30,7 @@ trait EmployeepayhistoryRepo { def update: UpdateBuilder[EmployeepayhistoryFields, EmployeepayhistoryRow] def update(row: EmployeepayhistoryRow): ConnectionIO[Boolean] def upsert(unsaved: EmployeepayhistoryRow): ConnectionIO[EmployeepayhistoryRow] + def upsertBatch(unsaved: List[EmployeepayhistoryRow]): Stream[ConnectionIO, EmployeepayhistoryRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Stream[ConnectionIO, EmployeepayhistoryRow], batchSize: Int = 10000): ConnectionIO[Int] } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/humanresources/employeepayhistory/EmployeepayhistoryRepoImpl.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/humanresources/employeepayhistory/EmployeepayhistoryRepoImpl.scala index e4d294c94..266edb8d1 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/humanresources/employeepayhistory/EmployeepayhistoryRepoImpl.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/humanresources/employeepayhistory/EmployeepayhistoryRepoImpl.scala @@ -11,6 +11,7 @@ import adventureworks.customtypes.Defaulted import adventureworks.customtypes.TypoLocalDateTime import adventureworks.customtypes.TypoShort import adventureworks.person.businessentity.BusinessentityId +import cats.instances.list.catsStdInstancesForList import doobie.free.connection.ConnectionIO import doobie.postgres.syntax.FragmentOps import doobie.syntax.SqlInterpolator.SingleFragment.fromWrite @@ -18,6 +19,7 @@ import doobie.syntax.string.toSqlInterpolator import doobie.util.Write import doobie.util.fragment.Fragment import doobie.util.meta.Meta +import doobie.util.update.Update import fs2.Stream import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder @@ -136,4 +138,33 @@ class EmployeepayhistoryRepoImpl extends EmployeepayhistoryRepo { returning "businessentityid", "ratechangedate"::text, "rate", "payfrequency", "modifieddate"::text """.query(using EmployeepayhistoryRow.read).unique } + override def upsertBatch(unsaved: List[EmployeepayhistoryRow]): Stream[ConnectionIO, EmployeepayhistoryRow] = { + Update[EmployeepayhistoryRow]( + s"""insert into humanresources.employeepayhistory("businessentityid", "ratechangedate", "rate", "payfrequency", "modifieddate") + values (?::int4,?::timestamp,?::numeric,?::int2,?::timestamp) + on conflict ("businessentityid", "ratechangedate") + do update set + "rate" = EXCLUDED."rate", + "payfrequency" = EXCLUDED."payfrequency", + "modifieddate" = EXCLUDED."modifieddate" + returning "businessentityid", "ratechangedate"::text, "rate", "payfrequency", "modifieddate"::text""" + )(using EmployeepayhistoryRow.write) + .updateManyWithGeneratedKeys[EmployeepayhistoryRow]("businessentityid", "ratechangedate", "rate", "payfrequency", "modifieddate")(unsaved)(using catsStdInstancesForList, EmployeepayhistoryRow.read) + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, EmployeepayhistoryRow], batchSize: Int = 10000): ConnectionIO[Int] = { + for { + _ <- sql"create temporary table employeepayhistory_TEMP (like humanresources.employeepayhistory) on commit drop".update.run + _ <- new FragmentOps(sql"""copy employeepayhistory_TEMP("businessentityid", "ratechangedate", "rate", "payfrequency", "modifieddate") from stdin""").copyIn(unsaved, batchSize)(using EmployeepayhistoryRow.text) + res <- sql"""insert into humanresources.employeepayhistory("businessentityid", "ratechangedate", "rate", "payfrequency", "modifieddate") + select * from employeepayhistory_TEMP + on conflict ("businessentityid", "ratechangedate") + do update set + "rate" = EXCLUDED."rate", + "payfrequency" = EXCLUDED."payfrequency", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table employeepayhistory_TEMP;""".update.run + } yield res + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/humanresources/employeepayhistory/EmployeepayhistoryRepoMock.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/humanresources/employeepayhistory/EmployeepayhistoryRepoMock.scala index b00f33f63..b8ea23b61 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/humanresources/employeepayhistory/EmployeepayhistoryRepoMock.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/humanresources/employeepayhistory/EmployeepayhistoryRepoMock.scala @@ -105,4 +105,23 @@ class EmployeepayhistoryRepoMock(toRow: Function1[EmployeepayhistoryRowUnsaved, unsaved } } + override def upsertBatch(unsaved: List[EmployeepayhistoryRow]): Stream[ConnectionIO, EmployeepayhistoryRow] = { + Stream.emits { + unsaved.map { row => + map += (row.compositeId -> row) + row + } + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, EmployeepayhistoryRow], batchSize: Int = 10000): ConnectionIO[Int] = { + unsaved.compile.toList.map { rows => + var num = 0 + rows.foreach { row => + map += (row.compositeId -> row) + num += 1 + } + num + } + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/humanresources/employeepayhistory/EmployeepayhistoryRow.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/humanresources/employeepayhistory/EmployeepayhistoryRow.scala index 9aee5dc5e..a1513bb79 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/humanresources/employeepayhistory/EmployeepayhistoryRow.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/humanresources/employeepayhistory/EmployeepayhistoryRow.scala @@ -14,6 +14,7 @@ import adventureworks.person.businessentity.BusinessentityId import doobie.enumerated.Nullability import doobie.postgres.Text import doobie.util.Read +import doobie.util.Write import doobie.util.meta.Meta import io.circe.Decoder import io.circe.Encoder @@ -75,4 +76,26 @@ object EmployeepayhistoryRow { sb.append(Text.DELIMETER) TypoLocalDateTime.text.unsafeEncode(row.modifieddate, sb) } + implicit lazy val write: Write[EmployeepayhistoryRow] = new Write[EmployeepayhistoryRow]( + puts = List((BusinessentityId.put, Nullability.NoNulls), + (TypoLocalDateTime.put, Nullability.NoNulls), + (Meta.ScalaBigDecimalMeta.put, Nullability.NoNulls), + (TypoShort.put, Nullability.NoNulls), + (TypoLocalDateTime.put, Nullability.NoNulls)), + toList = x => List(x.businessentityid, x.ratechangedate, x.rate, x.payfrequency, x.modifieddate), + unsafeSet = (rs, i, a) => { + BusinessentityId.put.unsafeSetNonNullable(rs, i + 0, a.businessentityid) + TypoLocalDateTime.put.unsafeSetNonNullable(rs, i + 1, a.ratechangedate) + Meta.ScalaBigDecimalMeta.put.unsafeSetNonNullable(rs, i + 2, a.rate) + TypoShort.put.unsafeSetNonNullable(rs, i + 3, a.payfrequency) + TypoLocalDateTime.put.unsafeSetNonNullable(rs, i + 4, a.modifieddate) + }, + unsafeUpdate = (ps, i, a) => { + BusinessentityId.put.unsafeUpdateNonNullable(ps, i + 0, a.businessentityid) + TypoLocalDateTime.put.unsafeUpdateNonNullable(ps, i + 1, a.ratechangedate) + Meta.ScalaBigDecimalMeta.put.unsafeUpdateNonNullable(ps, i + 2, a.rate) + TypoShort.put.unsafeUpdateNonNullable(ps, i + 3, a.payfrequency) + TypoLocalDateTime.put.unsafeUpdateNonNullable(ps, i + 4, a.modifieddate) + } + ) } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/humanresources/jobcandidate/JobcandidateRepo.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/humanresources/jobcandidate/JobcandidateRepo.scala index 2855a8944..8d868805c 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/humanresources/jobcandidate/JobcandidateRepo.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/humanresources/jobcandidate/JobcandidateRepo.scala @@ -30,4 +30,7 @@ trait JobcandidateRepo { def update: UpdateBuilder[JobcandidateFields, JobcandidateRow] def update(row: JobcandidateRow): ConnectionIO[Boolean] def upsert(unsaved: JobcandidateRow): ConnectionIO[JobcandidateRow] + def upsertBatch(unsaved: List[JobcandidateRow]): Stream[ConnectionIO, JobcandidateRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Stream[ConnectionIO, JobcandidateRow], batchSize: Int = 10000): ConnectionIO[Int] } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/humanresources/jobcandidate/JobcandidateRepoImpl.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/humanresources/jobcandidate/JobcandidateRepoImpl.scala index 5284c26e6..fce1440ca 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/humanresources/jobcandidate/JobcandidateRepoImpl.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/humanresources/jobcandidate/JobcandidateRepoImpl.scala @@ -11,12 +11,14 @@ import adventureworks.customtypes.Defaulted import adventureworks.customtypes.TypoLocalDateTime import adventureworks.customtypes.TypoXml import adventureworks.person.businessentity.BusinessentityId +import cats.instances.list.catsStdInstancesForList import doobie.free.connection.ConnectionIO import doobie.postgres.syntax.FragmentOps import doobie.syntax.SqlInterpolator.SingleFragment.fromWrite import doobie.syntax.string.toSqlInterpolator import doobie.util.Write import doobie.util.fragment.Fragment +import doobie.util.update.Update import fs2.Stream import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder @@ -122,4 +124,33 @@ class JobcandidateRepoImpl extends JobcandidateRepo { returning "jobcandidateid", "businessentityid", "resume", "modifieddate"::text """.query(using JobcandidateRow.read).unique } + override def upsertBatch(unsaved: List[JobcandidateRow]): Stream[ConnectionIO, JobcandidateRow] = { + Update[JobcandidateRow]( + s"""insert into humanresources.jobcandidate("jobcandidateid", "businessentityid", "resume", "modifieddate") + values (?::int4,?::int4,?::xml,?::timestamp) + on conflict ("jobcandidateid") + do update set + "businessentityid" = EXCLUDED."businessentityid", + "resume" = EXCLUDED."resume", + "modifieddate" = EXCLUDED."modifieddate" + returning "jobcandidateid", "businessentityid", "resume", "modifieddate"::text""" + )(using JobcandidateRow.write) + .updateManyWithGeneratedKeys[JobcandidateRow]("jobcandidateid", "businessentityid", "resume", "modifieddate")(unsaved)(using catsStdInstancesForList, JobcandidateRow.read) + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, JobcandidateRow], batchSize: Int = 10000): ConnectionIO[Int] = { + for { + _ <- sql"create temporary table jobcandidate_TEMP (like humanresources.jobcandidate) on commit drop".update.run + _ <- new FragmentOps(sql"""copy jobcandidate_TEMP("jobcandidateid", "businessentityid", "resume", "modifieddate") from stdin""").copyIn(unsaved, batchSize)(using JobcandidateRow.text) + res <- sql"""insert into humanresources.jobcandidate("jobcandidateid", "businessentityid", "resume", "modifieddate") + select * from jobcandidate_TEMP + on conflict ("jobcandidateid") + do update set + "businessentityid" = EXCLUDED."businessentityid", + "resume" = EXCLUDED."resume", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table jobcandidate_TEMP;""".update.run + } yield res + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/humanresources/jobcandidate/JobcandidateRepoMock.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/humanresources/jobcandidate/JobcandidateRepoMock.scala index 79e5ba231..4b8aa2132 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/humanresources/jobcandidate/JobcandidateRepoMock.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/humanresources/jobcandidate/JobcandidateRepoMock.scala @@ -105,4 +105,23 @@ class JobcandidateRepoMock(toRow: Function1[JobcandidateRowUnsaved, Jobcandidate unsaved } } + override def upsertBatch(unsaved: List[JobcandidateRow]): Stream[ConnectionIO, JobcandidateRow] = { + Stream.emits { + unsaved.map { row => + map += (row.jobcandidateid -> row) + row + } + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, JobcandidateRow], batchSize: Int = 10000): ConnectionIO[Int] = { + unsaved.compile.toList.map { rows => + var num = 0 + rows.foreach { row => + map += (row.jobcandidateid -> row) + num += 1 + } + num + } + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/humanresources/jobcandidate/JobcandidateRow.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/humanresources/jobcandidate/JobcandidateRow.scala index 7ac24bdf9..6ad7150ec 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/humanresources/jobcandidate/JobcandidateRow.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/humanresources/jobcandidate/JobcandidateRow.scala @@ -14,6 +14,7 @@ import adventureworks.person.businessentity.BusinessentityId import doobie.enumerated.Nullability import doobie.postgres.Text import doobie.util.Read +import doobie.util.Write import io.circe.Decoder import io.circe.Encoder import java.sql.ResultSet @@ -64,4 +65,23 @@ object JobcandidateRow { sb.append(Text.DELIMETER) TypoLocalDateTime.text.unsafeEncode(row.modifieddate, sb) } + implicit lazy val write: Write[JobcandidateRow] = new Write[JobcandidateRow]( + puts = List((JobcandidateId.put, Nullability.NoNulls), + (BusinessentityId.put, Nullability.Nullable), + (TypoXml.put, Nullability.Nullable), + (TypoLocalDateTime.put, Nullability.NoNulls)), + toList = x => List(x.jobcandidateid, x.businessentityid, x.resume, x.modifieddate), + unsafeSet = (rs, i, a) => { + JobcandidateId.put.unsafeSetNonNullable(rs, i + 0, a.jobcandidateid) + BusinessentityId.put.unsafeSetNullable(rs, i + 1, a.businessentityid) + TypoXml.put.unsafeSetNullable(rs, i + 2, a.resume) + TypoLocalDateTime.put.unsafeSetNonNullable(rs, i + 3, a.modifieddate) + }, + unsafeUpdate = (ps, i, a) => { + JobcandidateId.put.unsafeUpdateNonNullable(ps, i + 0, a.jobcandidateid) + BusinessentityId.put.unsafeUpdateNullable(ps, i + 1, a.businessentityid) + TypoXml.put.unsafeUpdateNullable(ps, i + 2, a.resume) + TypoLocalDateTime.put.unsafeUpdateNonNullable(ps, i + 3, a.modifieddate) + } + ) } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/humanresources/shift/ShiftRepo.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/humanresources/shift/ShiftRepo.scala index 36648c9c8..705fb9806 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/humanresources/shift/ShiftRepo.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/humanresources/shift/ShiftRepo.scala @@ -30,4 +30,7 @@ trait ShiftRepo { def update: UpdateBuilder[ShiftFields, ShiftRow] def update(row: ShiftRow): ConnectionIO[Boolean] def upsert(unsaved: ShiftRow): ConnectionIO[ShiftRow] + def upsertBatch(unsaved: List[ShiftRow]): Stream[ConnectionIO, ShiftRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Stream[ConnectionIO, ShiftRow], batchSize: Int = 10000): ConnectionIO[Int] } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/humanresources/shift/ShiftRepoImpl.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/humanresources/shift/ShiftRepoImpl.scala index 57e5bfaf9..6ca1d52f9 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/humanresources/shift/ShiftRepoImpl.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/humanresources/shift/ShiftRepoImpl.scala @@ -11,12 +11,14 @@ import adventureworks.customtypes.Defaulted import adventureworks.customtypes.TypoLocalDateTime import adventureworks.customtypes.TypoLocalTime import adventureworks.public.Name +import cats.instances.list.catsStdInstancesForList import doobie.free.connection.ConnectionIO import doobie.postgres.syntax.FragmentOps import doobie.syntax.SqlInterpolator.SingleFragment.fromWrite import doobie.syntax.string.toSqlInterpolator import doobie.util.Write import doobie.util.fragment.Fragment +import doobie.util.update.Update import fs2.Stream import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder @@ -126,4 +128,35 @@ class ShiftRepoImpl extends ShiftRepo { returning "shiftid", "name", "starttime"::text, "endtime"::text, "modifieddate"::text """.query(using ShiftRow.read).unique } + override def upsertBatch(unsaved: List[ShiftRow]): Stream[ConnectionIO, ShiftRow] = { + Update[ShiftRow]( + s"""insert into humanresources.shift("shiftid", "name", "starttime", "endtime", "modifieddate") + values (?::int4,?::varchar,?::time,?::time,?::timestamp) + on conflict ("shiftid") + do update set + "name" = EXCLUDED."name", + "starttime" = EXCLUDED."starttime", + "endtime" = EXCLUDED."endtime", + "modifieddate" = EXCLUDED."modifieddate" + returning "shiftid", "name", "starttime"::text, "endtime"::text, "modifieddate"::text""" + )(using ShiftRow.write) + .updateManyWithGeneratedKeys[ShiftRow]("shiftid", "name", "starttime", "endtime", "modifieddate")(unsaved)(using catsStdInstancesForList, ShiftRow.read) + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, ShiftRow], batchSize: Int = 10000): ConnectionIO[Int] = { + for { + _ <- sql"create temporary table shift_TEMP (like humanresources.shift) on commit drop".update.run + _ <- new FragmentOps(sql"""copy shift_TEMP("shiftid", "name", "starttime", "endtime", "modifieddate") from stdin""").copyIn(unsaved, batchSize)(using ShiftRow.text) + res <- sql"""insert into humanresources.shift("shiftid", "name", "starttime", "endtime", "modifieddate") + select * from shift_TEMP + on conflict ("shiftid") + do update set + "name" = EXCLUDED."name", + "starttime" = EXCLUDED."starttime", + "endtime" = EXCLUDED."endtime", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table shift_TEMP;""".update.run + } yield res + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/humanresources/shift/ShiftRepoMock.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/humanresources/shift/ShiftRepoMock.scala index 7fdca63f1..e12fb39c8 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/humanresources/shift/ShiftRepoMock.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/humanresources/shift/ShiftRepoMock.scala @@ -105,4 +105,23 @@ class ShiftRepoMock(toRow: Function1[ShiftRowUnsaved, ShiftRow], unsaved } } + override def upsertBatch(unsaved: List[ShiftRow]): Stream[ConnectionIO, ShiftRow] = { + Stream.emits { + unsaved.map { row => + map += (row.shiftid -> row) + row + } + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, ShiftRow], batchSize: Int = 10000): ConnectionIO[Int] = { + unsaved.compile.toList.map { rows => + var num = 0 + rows.foreach { row => + map += (row.shiftid -> row) + num += 1 + } + num + } + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/humanresources/shift/ShiftRow.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/humanresources/shift/ShiftRow.scala index 62f47349b..f3fd65c89 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/humanresources/shift/ShiftRow.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/humanresources/shift/ShiftRow.scala @@ -14,6 +14,7 @@ import adventureworks.public.Name import doobie.enumerated.Nullability import doobie.postgres.Text import doobie.util.Read +import doobie.util.Write import io.circe.Decoder import io.circe.Encoder import java.sql.ResultSet @@ -69,4 +70,26 @@ object ShiftRow { sb.append(Text.DELIMETER) TypoLocalDateTime.text.unsafeEncode(row.modifieddate, sb) } + implicit lazy val write: Write[ShiftRow] = new Write[ShiftRow]( + puts = List((ShiftId.put, Nullability.NoNulls), + (Name.put, Nullability.NoNulls), + (TypoLocalTime.put, Nullability.NoNulls), + (TypoLocalTime.put, Nullability.NoNulls), + (TypoLocalDateTime.put, Nullability.NoNulls)), + toList = x => List(x.shiftid, x.name, x.starttime, x.endtime, x.modifieddate), + unsafeSet = (rs, i, a) => { + ShiftId.put.unsafeSetNonNullable(rs, i + 0, a.shiftid) + Name.put.unsafeSetNonNullable(rs, i + 1, a.name) + TypoLocalTime.put.unsafeSetNonNullable(rs, i + 2, a.starttime) + TypoLocalTime.put.unsafeSetNonNullable(rs, i + 3, a.endtime) + TypoLocalDateTime.put.unsafeSetNonNullable(rs, i + 4, a.modifieddate) + }, + unsafeUpdate = (ps, i, a) => { + ShiftId.put.unsafeUpdateNonNullable(ps, i + 0, a.shiftid) + Name.put.unsafeUpdateNonNullable(ps, i + 1, a.name) + TypoLocalTime.put.unsafeUpdateNonNullable(ps, i + 2, a.starttime) + TypoLocalTime.put.unsafeUpdateNonNullable(ps, i + 3, a.endtime) + TypoLocalDateTime.put.unsafeUpdateNonNullable(ps, i + 4, a.modifieddate) + } + ) } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/information_schema/CardinalNumber.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/information_schema/CardinalNumber.scala new file mode 100644 index 000000000..09a489e76 --- /dev/null +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/information_schema/CardinalNumber.scala @@ -0,0 +1,34 @@ +/** + * File has been automatically generated by `typo`. + * + * IF YOU CHANGE THIS FILE YOUR CHANGES WILL BE OVERWRITTEN. + */ +package adventureworks +package information_schema + +import doobie.postgres.Text +import doobie.util.Get +import doobie.util.Put +import doobie.util.meta.Meta +import io.circe.Decoder +import io.circe.Encoder +import typo.dsl.Bijection + +/** Domain `information_schema.cardinal_number` + * Constraint: CHECK ((VALUE >= 0)) + */ +case class CardinalNumber(value: Int) +object CardinalNumber { + implicit lazy val arrayGet: Get[Array[CardinalNumber]] = adventureworks.IntegerArrayMeta.get.map(_.map(CardinalNumber.apply)) + implicit lazy val arrayPut: Put[Array[CardinalNumber]] = adventureworks.IntegerArrayMeta.put.contramap(_.map(_.value)) + implicit lazy val bijection: Bijection[CardinalNumber, Int] = Bijection[CardinalNumber, Int](_.value)(CardinalNumber.apply) + implicit lazy val decoder: Decoder[CardinalNumber] = Decoder.decodeInt.map(CardinalNumber.apply) + implicit lazy val encoder: Encoder[CardinalNumber] = Encoder.encodeInt.contramap(_.value) + implicit lazy val get: Get[CardinalNumber] = Meta.IntMeta.get.map(CardinalNumber.apply) + implicit lazy val ordering: Ordering[CardinalNumber] = Ordering.by(_.value) + implicit lazy val put: Put[CardinalNumber] = Meta.IntMeta.put.contramap(_.value) + implicit lazy val text: Text[CardinalNumber] = new Text[CardinalNumber] { + override def unsafeEncode(v: CardinalNumber, sb: StringBuilder) = Text.intInstance.unsafeEncode(v.value, sb) + override def unsafeArrayEncode(v: CardinalNumber, sb: StringBuilder) = Text.intInstance.unsafeArrayEncode(v.value, sb) + } +} \ No newline at end of file diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/information_schema/CharacterData.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/information_schema/CharacterData.scala new file mode 100644 index 000000000..b3e025d1c --- /dev/null +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/information_schema/CharacterData.scala @@ -0,0 +1,34 @@ +/** + * File has been automatically generated by `typo`. + * + * IF YOU CHANGE THIS FILE YOUR CHANGES WILL BE OVERWRITTEN. + */ +package adventureworks +package information_schema + +import doobie.postgres.Text +import doobie.util.Get +import doobie.util.Put +import doobie.util.meta.Meta +import io.circe.Decoder +import io.circe.Encoder +import typo.dsl.Bijection + +/** Domain `information_schema.character_data` + * No constraint + */ +case class CharacterData(value: String) +object CharacterData { + implicit lazy val arrayGet: Get[Array[CharacterData]] = adventureworks.StringArrayMeta.get.map(_.map(CharacterData.apply)) + implicit lazy val arrayPut: Put[Array[CharacterData]] = adventureworks.StringArrayMeta.put.contramap(_.map(_.value)) + implicit lazy val bijection: Bijection[CharacterData, String] = Bijection[CharacterData, String](_.value)(CharacterData.apply) + implicit lazy val decoder: Decoder[CharacterData] = Decoder.decodeString.map(CharacterData.apply) + implicit lazy val encoder: Encoder[CharacterData] = Encoder.encodeString.contramap(_.value) + implicit lazy val get: Get[CharacterData] = Meta.StringMeta.get.map(CharacterData.apply) + implicit lazy val ordering: Ordering[CharacterData] = Ordering.by(_.value) + implicit lazy val put: Put[CharacterData] = Meta.StringMeta.put.contramap(_.value) + implicit lazy val text: Text[CharacterData] = new Text[CharacterData] { + override def unsafeEncode(v: CharacterData, sb: StringBuilder) = Text.stringInstance.unsafeEncode(v.value, sb) + override def unsafeArrayEncode(v: CharacterData, sb: StringBuilder) = Text.stringInstance.unsafeArrayEncode(v.value, sb) + } +} \ No newline at end of file diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/information_schema/SqlIdentifier.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/information_schema/SqlIdentifier.scala new file mode 100644 index 000000000..ece5efcd6 --- /dev/null +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/information_schema/SqlIdentifier.scala @@ -0,0 +1,34 @@ +/** + * File has been automatically generated by `typo`. + * + * IF YOU CHANGE THIS FILE YOUR CHANGES WILL BE OVERWRITTEN. + */ +package adventureworks +package information_schema + +import doobie.postgres.Text +import doobie.util.Get +import doobie.util.Put +import doobie.util.meta.Meta +import io.circe.Decoder +import io.circe.Encoder +import typo.dsl.Bijection + +/** Domain `information_schema.sql_identifier` + * No constraint + */ +case class SqlIdentifier(value: String) +object SqlIdentifier { + implicit lazy val arrayGet: Get[Array[SqlIdentifier]] = adventureworks.StringArrayMeta.get.map(_.map(SqlIdentifier.apply)) + implicit lazy val arrayPut: Put[Array[SqlIdentifier]] = adventureworks.StringArrayMeta.put.contramap(_.map(_.value)) + implicit lazy val bijection: Bijection[SqlIdentifier, String] = Bijection[SqlIdentifier, String](_.value)(SqlIdentifier.apply) + implicit lazy val decoder: Decoder[SqlIdentifier] = Decoder.decodeString.map(SqlIdentifier.apply) + implicit lazy val encoder: Encoder[SqlIdentifier] = Encoder.encodeString.contramap(_.value) + implicit lazy val get: Get[SqlIdentifier] = Meta.StringMeta.get.map(SqlIdentifier.apply) + implicit lazy val ordering: Ordering[SqlIdentifier] = Ordering.by(_.value) + implicit lazy val put: Put[SqlIdentifier] = Meta.StringMeta.put.contramap(_.value) + implicit lazy val text: Text[SqlIdentifier] = new Text[SqlIdentifier] { + override def unsafeEncode(v: SqlIdentifier, sb: StringBuilder) = Text.stringInstance.unsafeEncode(v.value, sb) + override def unsafeArrayEncode(v: SqlIdentifier, sb: StringBuilder) = Text.stringInstance.unsafeArrayEncode(v.value, sb) + } +} \ No newline at end of file diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/information_schema/TimeStamp.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/information_schema/TimeStamp.scala new file mode 100644 index 000000000..63904efc4 --- /dev/null +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/information_schema/TimeStamp.scala @@ -0,0 +1,34 @@ +/** + * File has been automatically generated by `typo`. + * + * IF YOU CHANGE THIS FILE YOUR CHANGES WILL BE OVERWRITTEN. + */ +package adventureworks +package information_schema + +import adventureworks.customtypes.TypoInstant +import doobie.postgres.Text +import doobie.util.Get +import doobie.util.Put +import io.circe.Decoder +import io.circe.Encoder +import typo.dsl.Bijection + +/** Domain `information_schema.time_stamp` + * No constraint + */ +case class TimeStamp(value: TypoInstant) +object TimeStamp { + implicit lazy val arrayGet: Get[Array[TimeStamp]] = TypoInstant.arrayGet.map(_.map(TimeStamp.apply)) + implicit lazy val arrayPut: Put[Array[TimeStamp]] = TypoInstant.arrayPut.contramap(_.map(_.value)) + implicit lazy val bijection: Bijection[TimeStamp, TypoInstant] = Bijection[TimeStamp, TypoInstant](_.value)(TimeStamp.apply) + implicit lazy val decoder: Decoder[TimeStamp] = TypoInstant.decoder.map(TimeStamp.apply) + implicit lazy val encoder: Encoder[TimeStamp] = TypoInstant.encoder.contramap(_.value) + implicit lazy val get: Get[TimeStamp] = TypoInstant.get.map(TimeStamp.apply) + implicit def ordering(implicit O0: Ordering[TypoInstant]): Ordering[TimeStamp] = Ordering.by(_.value) + implicit lazy val put: Put[TimeStamp] = TypoInstant.put.contramap(_.value) + implicit lazy val text: Text[TimeStamp] = new Text[TimeStamp] { + override def unsafeEncode(v: TimeStamp, sb: StringBuilder) = TypoInstant.text.unsafeEncode(v.value, sb) + override def unsafeArrayEncode(v: TimeStamp, sb: StringBuilder) = TypoInstant.text.unsafeArrayEncode(v.value, sb) + } +} \ No newline at end of file diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/information_schema/YesOrNo.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/information_schema/YesOrNo.scala new file mode 100644 index 000000000..caa7bddda --- /dev/null +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/information_schema/YesOrNo.scala @@ -0,0 +1,34 @@ +/** + * File has been automatically generated by `typo`. + * + * IF YOU CHANGE THIS FILE YOUR CHANGES WILL BE OVERWRITTEN. + */ +package adventureworks +package information_schema + +import doobie.postgres.Text +import doobie.util.Get +import doobie.util.Put +import doobie.util.meta.Meta +import io.circe.Decoder +import io.circe.Encoder +import typo.dsl.Bijection + +/** Domain `information_schema.yes_or_no` + * Constraint: CHECK (((VALUE)::text = ANY ((ARRAY['YES'::character varying, 'NO'::character varying])::text[]))) + */ +case class YesOrNo(value: String) +object YesOrNo { + implicit lazy val arrayGet: Get[Array[YesOrNo]] = adventureworks.StringArrayMeta.get.map(_.map(YesOrNo.apply)) + implicit lazy val arrayPut: Put[Array[YesOrNo]] = adventureworks.StringArrayMeta.put.contramap(_.map(_.value)) + implicit lazy val bijection: Bijection[YesOrNo, String] = Bijection[YesOrNo, String](_.value)(YesOrNo.apply) + implicit lazy val decoder: Decoder[YesOrNo] = Decoder.decodeString.map(YesOrNo.apply) + implicit lazy val encoder: Encoder[YesOrNo] = Encoder.encodeString.contramap(_.value) + implicit lazy val get: Get[YesOrNo] = Meta.StringMeta.get.map(YesOrNo.apply) + implicit lazy val ordering: Ordering[YesOrNo] = Ordering.by(_.value) + implicit lazy val put: Put[YesOrNo] = Meta.StringMeta.put.contramap(_.value) + implicit lazy val text: Text[YesOrNo] = new Text[YesOrNo] { + override def unsafeEncode(v: YesOrNo, sb: StringBuilder) = Text.stringInstance.unsafeEncode(v.value, sb) + override def unsafeArrayEncode(v: YesOrNo, sb: StringBuilder) = Text.stringInstance.unsafeArrayEncode(v.value, sb) + } +} \ No newline at end of file diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/person/address/AddressRepo.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/person/address/AddressRepo.scala index 6489fe2c9..9e52b7795 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/person/address/AddressRepo.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/person/address/AddressRepo.scala @@ -30,4 +30,7 @@ trait AddressRepo { def update: UpdateBuilder[AddressFields, AddressRow] def update(row: AddressRow): ConnectionIO[Boolean] def upsert(unsaved: AddressRow): ConnectionIO[AddressRow] + def upsertBatch(unsaved: List[AddressRow]): Stream[ConnectionIO, AddressRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Stream[ConnectionIO, AddressRow], batchSize: Int = 10000): ConnectionIO[Int] } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/person/address/AddressRepoImpl.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/person/address/AddressRepoImpl.scala index 68ecf92e2..fd489d8c5 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/person/address/AddressRepoImpl.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/person/address/AddressRepoImpl.scala @@ -12,6 +12,7 @@ import adventureworks.customtypes.TypoBytea import adventureworks.customtypes.TypoLocalDateTime import adventureworks.customtypes.TypoUUID import adventureworks.person.stateprovince.StateprovinceId +import cats.instances.list.catsStdInstancesForList import doobie.free.connection.ConnectionIO import doobie.postgres.syntax.FragmentOps import doobie.syntax.SqlInterpolator.SingleFragment.fromWrite @@ -19,6 +20,7 @@ import doobie.syntax.string.toSqlInterpolator import doobie.util.Write import doobie.util.fragment.Fragment import doobie.util.meta.Meta +import doobie.util.update.Update import fs2.Stream import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder @@ -147,4 +149,43 @@ class AddressRepoImpl extends AddressRepo { returning "addressid", "addressline1", "addressline2", "city", "stateprovinceid", "postalcode", "spatiallocation", "rowguid", "modifieddate"::text """.query(using AddressRow.read).unique } + override def upsertBatch(unsaved: List[AddressRow]): Stream[ConnectionIO, AddressRow] = { + Update[AddressRow]( + s"""insert into person.address("addressid", "addressline1", "addressline2", "city", "stateprovinceid", "postalcode", "spatiallocation", "rowguid", "modifieddate") + values (?::int4,?,?,?,?::int4,?,?::bytea,?::uuid,?::timestamp) + on conflict ("addressid") + do update set + "addressline1" = EXCLUDED."addressline1", + "addressline2" = EXCLUDED."addressline2", + "city" = EXCLUDED."city", + "stateprovinceid" = EXCLUDED."stateprovinceid", + "postalcode" = EXCLUDED."postalcode", + "spatiallocation" = EXCLUDED."spatiallocation", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + returning "addressid", "addressline1", "addressline2", "city", "stateprovinceid", "postalcode", "spatiallocation", "rowguid", "modifieddate"::text""" + )(using AddressRow.write) + .updateManyWithGeneratedKeys[AddressRow]("addressid", "addressline1", "addressline2", "city", "stateprovinceid", "postalcode", "spatiallocation", "rowguid", "modifieddate")(unsaved)(using catsStdInstancesForList, AddressRow.read) + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, AddressRow], batchSize: Int = 10000): ConnectionIO[Int] = { + for { + _ <- sql"create temporary table address_TEMP (like person.address) on commit drop".update.run + _ <- new FragmentOps(sql"""copy address_TEMP("addressid", "addressline1", "addressline2", "city", "stateprovinceid", "postalcode", "spatiallocation", "rowguid", "modifieddate") from stdin""").copyIn(unsaved, batchSize)(using AddressRow.text) + res <- sql"""insert into person.address("addressid", "addressline1", "addressline2", "city", "stateprovinceid", "postalcode", "spatiallocation", "rowguid", "modifieddate") + select * from address_TEMP + on conflict ("addressid") + do update set + "addressline1" = EXCLUDED."addressline1", + "addressline2" = EXCLUDED."addressline2", + "city" = EXCLUDED."city", + "stateprovinceid" = EXCLUDED."stateprovinceid", + "postalcode" = EXCLUDED."postalcode", + "spatiallocation" = EXCLUDED."spatiallocation", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table address_TEMP;""".update.run + } yield res + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/person/address/AddressRepoMock.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/person/address/AddressRepoMock.scala index 2c7ccd842..756b29b0a 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/person/address/AddressRepoMock.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/person/address/AddressRepoMock.scala @@ -105,4 +105,23 @@ class AddressRepoMock(toRow: Function1[AddressRowUnsaved, AddressRow], unsaved } } + override def upsertBatch(unsaved: List[AddressRow]): Stream[ConnectionIO, AddressRow] = { + Stream.emits { + unsaved.map { row => + map += (row.addressid -> row) + row + } + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, AddressRow], batchSize: Int = 10000): ConnectionIO[Int] = { + unsaved.compile.toList.map { rows => + var num = 0 + rows.foreach { row => + map += (row.addressid -> row) + num += 1 + } + num + } + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/person/address/AddressRow.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/person/address/AddressRow.scala index a9b561f1f..e025c9767 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/person/address/AddressRow.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/person/address/AddressRow.scala @@ -15,6 +15,7 @@ import adventureworks.person.stateprovince.StateprovinceId import doobie.enumerated.Nullability import doobie.postgres.Text import doobie.util.Read +import doobie.util.Write import doobie.util.meta.Meta import io.circe.Decoder import io.circe.Encoder @@ -96,4 +97,38 @@ object AddressRow { sb.append(Text.DELIMETER) TypoLocalDateTime.text.unsafeEncode(row.modifieddate, sb) } + implicit lazy val write: Write[AddressRow] = new Write[AddressRow]( + puts = List((AddressId.put, Nullability.NoNulls), + (Meta.StringMeta.put, Nullability.NoNulls), + (Meta.StringMeta.put, Nullability.Nullable), + (Meta.StringMeta.put, Nullability.NoNulls), + (StateprovinceId.put, Nullability.NoNulls), + (Meta.StringMeta.put, Nullability.NoNulls), + (TypoBytea.put, Nullability.Nullable), + (TypoUUID.put, Nullability.NoNulls), + (TypoLocalDateTime.put, Nullability.NoNulls)), + toList = x => List(x.addressid, x.addressline1, x.addressline2, x.city, x.stateprovinceid, x.postalcode, x.spatiallocation, x.rowguid, x.modifieddate), + unsafeSet = (rs, i, a) => { + AddressId.put.unsafeSetNonNullable(rs, i + 0, a.addressid) + Meta.StringMeta.put.unsafeSetNonNullable(rs, i + 1, a.addressline1) + Meta.StringMeta.put.unsafeSetNullable(rs, i + 2, a.addressline2) + Meta.StringMeta.put.unsafeSetNonNullable(rs, i + 3, a.city) + StateprovinceId.put.unsafeSetNonNullable(rs, i + 4, a.stateprovinceid) + Meta.StringMeta.put.unsafeSetNonNullable(rs, i + 5, a.postalcode) + TypoBytea.put.unsafeSetNullable(rs, i + 6, a.spatiallocation) + TypoUUID.put.unsafeSetNonNullable(rs, i + 7, a.rowguid) + TypoLocalDateTime.put.unsafeSetNonNullable(rs, i + 8, a.modifieddate) + }, + unsafeUpdate = (ps, i, a) => { + AddressId.put.unsafeUpdateNonNullable(ps, i + 0, a.addressid) + Meta.StringMeta.put.unsafeUpdateNonNullable(ps, i + 1, a.addressline1) + Meta.StringMeta.put.unsafeUpdateNullable(ps, i + 2, a.addressline2) + Meta.StringMeta.put.unsafeUpdateNonNullable(ps, i + 3, a.city) + StateprovinceId.put.unsafeUpdateNonNullable(ps, i + 4, a.stateprovinceid) + Meta.StringMeta.put.unsafeUpdateNonNullable(ps, i + 5, a.postalcode) + TypoBytea.put.unsafeUpdateNullable(ps, i + 6, a.spatiallocation) + TypoUUID.put.unsafeUpdateNonNullable(ps, i + 7, a.rowguid) + TypoLocalDateTime.put.unsafeUpdateNonNullable(ps, i + 8, a.modifieddate) + } + ) } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/person/addresstype/AddresstypeRepo.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/person/addresstype/AddresstypeRepo.scala index b1f569f9f..989b32ee7 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/person/addresstype/AddresstypeRepo.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/person/addresstype/AddresstypeRepo.scala @@ -30,4 +30,7 @@ trait AddresstypeRepo { def update: UpdateBuilder[AddresstypeFields, AddresstypeRow] def update(row: AddresstypeRow): ConnectionIO[Boolean] def upsert(unsaved: AddresstypeRow): ConnectionIO[AddresstypeRow] + def upsertBatch(unsaved: List[AddresstypeRow]): Stream[ConnectionIO, AddresstypeRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Stream[ConnectionIO, AddresstypeRow], batchSize: Int = 10000): ConnectionIO[Int] } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/person/addresstype/AddresstypeRepoImpl.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/person/addresstype/AddresstypeRepoImpl.scala index 6903106a0..1d2792a48 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/person/addresstype/AddresstypeRepoImpl.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/person/addresstype/AddresstypeRepoImpl.scala @@ -11,12 +11,14 @@ import adventureworks.customtypes.Defaulted import adventureworks.customtypes.TypoLocalDateTime import adventureworks.customtypes.TypoUUID import adventureworks.public.Name +import cats.instances.list.catsStdInstancesForList import doobie.free.connection.ConnectionIO import doobie.postgres.syntax.FragmentOps import doobie.syntax.SqlInterpolator.SingleFragment.fromWrite import doobie.syntax.string.toSqlInterpolator import doobie.util.Write import doobie.util.fragment.Fragment +import doobie.util.update.Update import fs2.Stream import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder @@ -125,4 +127,33 @@ class AddresstypeRepoImpl extends AddresstypeRepo { returning "addresstypeid", "name", "rowguid", "modifieddate"::text """.query(using AddresstypeRow.read).unique } + override def upsertBatch(unsaved: List[AddresstypeRow]): Stream[ConnectionIO, AddresstypeRow] = { + Update[AddresstypeRow]( + s"""insert into person.addresstype("addresstypeid", "name", "rowguid", "modifieddate") + values (?::int4,?::varchar,?::uuid,?::timestamp) + on conflict ("addresstypeid") + do update set + "name" = EXCLUDED."name", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + returning "addresstypeid", "name", "rowguid", "modifieddate"::text""" + )(using AddresstypeRow.write) + .updateManyWithGeneratedKeys[AddresstypeRow]("addresstypeid", "name", "rowguid", "modifieddate")(unsaved)(using catsStdInstancesForList, AddresstypeRow.read) + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, AddresstypeRow], batchSize: Int = 10000): ConnectionIO[Int] = { + for { + _ <- sql"create temporary table addresstype_TEMP (like person.addresstype) on commit drop".update.run + _ <- new FragmentOps(sql"""copy addresstype_TEMP("addresstypeid", "name", "rowguid", "modifieddate") from stdin""").copyIn(unsaved, batchSize)(using AddresstypeRow.text) + res <- sql"""insert into person.addresstype("addresstypeid", "name", "rowguid", "modifieddate") + select * from addresstype_TEMP + on conflict ("addresstypeid") + do update set + "name" = EXCLUDED."name", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table addresstype_TEMP;""".update.run + } yield res + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/person/addresstype/AddresstypeRepoMock.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/person/addresstype/AddresstypeRepoMock.scala index dfac5baa3..966ce5824 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/person/addresstype/AddresstypeRepoMock.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/person/addresstype/AddresstypeRepoMock.scala @@ -105,4 +105,23 @@ class AddresstypeRepoMock(toRow: Function1[AddresstypeRowUnsaved, AddresstypeRow unsaved } } + override def upsertBatch(unsaved: List[AddresstypeRow]): Stream[ConnectionIO, AddresstypeRow] = { + Stream.emits { + unsaved.map { row => + map += (row.addresstypeid -> row) + row + } + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, AddresstypeRow], batchSize: Int = 10000): ConnectionIO[Int] = { + unsaved.compile.toList.map { rows => + var num = 0 + rows.foreach { row => + map += (row.addresstypeid -> row) + num += 1 + } + num + } + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/person/addresstype/AddresstypeRow.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/person/addresstype/AddresstypeRow.scala index 5b299747b..cbc276eb1 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/person/addresstype/AddresstypeRow.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/person/addresstype/AddresstypeRow.scala @@ -14,6 +14,7 @@ import adventureworks.public.Name import doobie.enumerated.Nullability import doobie.postgres.Text import doobie.util.Read +import doobie.util.Write import io.circe.Decoder import io.circe.Encoder import java.sql.ResultSet @@ -63,4 +64,23 @@ object AddresstypeRow { sb.append(Text.DELIMETER) TypoLocalDateTime.text.unsafeEncode(row.modifieddate, sb) } + implicit lazy val write: Write[AddresstypeRow] = new Write[AddresstypeRow]( + puts = List((AddresstypeId.put, Nullability.NoNulls), + (Name.put, Nullability.NoNulls), + (TypoUUID.put, Nullability.NoNulls), + (TypoLocalDateTime.put, Nullability.NoNulls)), + toList = x => List(x.addresstypeid, x.name, x.rowguid, x.modifieddate), + unsafeSet = (rs, i, a) => { + AddresstypeId.put.unsafeSetNonNullable(rs, i + 0, a.addresstypeid) + Name.put.unsafeSetNonNullable(rs, i + 1, a.name) + TypoUUID.put.unsafeSetNonNullable(rs, i + 2, a.rowguid) + TypoLocalDateTime.put.unsafeSetNonNullable(rs, i + 3, a.modifieddate) + }, + unsafeUpdate = (ps, i, a) => { + AddresstypeId.put.unsafeUpdateNonNullable(ps, i + 0, a.addresstypeid) + Name.put.unsafeUpdateNonNullable(ps, i + 1, a.name) + TypoUUID.put.unsafeUpdateNonNullable(ps, i + 2, a.rowguid) + TypoLocalDateTime.put.unsafeUpdateNonNullable(ps, i + 3, a.modifieddate) + } + ) } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/person/businessentity/BusinessentityRepo.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/person/businessentity/BusinessentityRepo.scala index 6394b5241..19824a386 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/person/businessentity/BusinessentityRepo.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/person/businessentity/BusinessentityRepo.scala @@ -30,4 +30,7 @@ trait BusinessentityRepo { def update: UpdateBuilder[BusinessentityFields, BusinessentityRow] def update(row: BusinessentityRow): ConnectionIO[Boolean] def upsert(unsaved: BusinessentityRow): ConnectionIO[BusinessentityRow] + def upsertBatch(unsaved: List[BusinessentityRow]): Stream[ConnectionIO, BusinessentityRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Stream[ConnectionIO, BusinessentityRow], batchSize: Int = 10000): ConnectionIO[Int] } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/person/businessentity/BusinessentityRepoImpl.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/person/businessentity/BusinessentityRepoImpl.scala index f0efbba26..0e6e7f8b1 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/person/businessentity/BusinessentityRepoImpl.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/person/businessentity/BusinessentityRepoImpl.scala @@ -10,12 +10,14 @@ package businessentity import adventureworks.customtypes.Defaulted import adventureworks.customtypes.TypoLocalDateTime import adventureworks.customtypes.TypoUUID +import cats.instances.list.catsStdInstancesForList import doobie.free.connection.ConnectionIO import doobie.postgres.syntax.FragmentOps import doobie.syntax.SqlInterpolator.SingleFragment.fromWrite import doobie.syntax.string.toSqlInterpolator import doobie.util.Write import doobie.util.fragment.Fragment +import doobie.util.update.Update import fs2.Stream import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder @@ -120,4 +122,31 @@ class BusinessentityRepoImpl extends BusinessentityRepo { returning "businessentityid", "rowguid", "modifieddate"::text """.query(using BusinessentityRow.read).unique } + override def upsertBatch(unsaved: List[BusinessentityRow]): Stream[ConnectionIO, BusinessentityRow] = { + Update[BusinessentityRow]( + s"""insert into person.businessentity("businessentityid", "rowguid", "modifieddate") + values (?::int4,?::uuid,?::timestamp) + on conflict ("businessentityid") + do update set + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + returning "businessentityid", "rowguid", "modifieddate"::text""" + )(using BusinessentityRow.write) + .updateManyWithGeneratedKeys[BusinessentityRow]("businessentityid", "rowguid", "modifieddate")(unsaved)(using catsStdInstancesForList, BusinessentityRow.read) + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, BusinessentityRow], batchSize: Int = 10000): ConnectionIO[Int] = { + for { + _ <- sql"create temporary table businessentity_TEMP (like person.businessentity) on commit drop".update.run + _ <- new FragmentOps(sql"""copy businessentity_TEMP("businessentityid", "rowguid", "modifieddate") from stdin""").copyIn(unsaved, batchSize)(using BusinessentityRow.text) + res <- sql"""insert into person.businessentity("businessentityid", "rowguid", "modifieddate") + select * from businessentity_TEMP + on conflict ("businessentityid") + do update set + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table businessentity_TEMP;""".update.run + } yield res + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/person/businessentity/BusinessentityRepoMock.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/person/businessentity/BusinessentityRepoMock.scala index a17b71c53..e094cc685 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/person/businessentity/BusinessentityRepoMock.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/person/businessentity/BusinessentityRepoMock.scala @@ -105,4 +105,23 @@ class BusinessentityRepoMock(toRow: Function1[BusinessentityRowUnsaved, Business unsaved } } + override def upsertBatch(unsaved: List[BusinessentityRow]): Stream[ConnectionIO, BusinessentityRow] = { + Stream.emits { + unsaved.map { row => + map += (row.businessentityid -> row) + row + } + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, BusinessentityRow], batchSize: Int = 10000): ConnectionIO[Int] = { + unsaved.compile.toList.map { rows => + var num = 0 + rows.foreach { row => + map += (row.businessentityid -> row) + num += 1 + } + num + } + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/person/businessentity/BusinessentityRow.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/person/businessentity/BusinessentityRow.scala index c85943b51..b6f78424d 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/person/businessentity/BusinessentityRow.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/person/businessentity/BusinessentityRow.scala @@ -13,6 +13,7 @@ import adventureworks.customtypes.TypoUUID import doobie.enumerated.Nullability import doobie.postgres.Text import doobie.util.Read +import doobie.util.Write import io.circe.Decoder import io.circe.Encoder import java.sql.ResultSet @@ -56,4 +57,20 @@ object BusinessentityRow { sb.append(Text.DELIMETER) TypoLocalDateTime.text.unsafeEncode(row.modifieddate, sb) } + implicit lazy val write: Write[BusinessentityRow] = new Write[BusinessentityRow]( + puts = List((BusinessentityId.put, Nullability.NoNulls), + (TypoUUID.put, Nullability.NoNulls), + (TypoLocalDateTime.put, Nullability.NoNulls)), + toList = x => List(x.businessentityid, x.rowguid, x.modifieddate), + unsafeSet = (rs, i, a) => { + BusinessentityId.put.unsafeSetNonNullable(rs, i + 0, a.businessentityid) + TypoUUID.put.unsafeSetNonNullable(rs, i + 1, a.rowguid) + TypoLocalDateTime.put.unsafeSetNonNullable(rs, i + 2, a.modifieddate) + }, + unsafeUpdate = (ps, i, a) => { + BusinessentityId.put.unsafeUpdateNonNullable(ps, i + 0, a.businessentityid) + TypoUUID.put.unsafeUpdateNonNullable(ps, i + 1, a.rowguid) + TypoLocalDateTime.put.unsafeUpdateNonNullable(ps, i + 2, a.modifieddate) + } + ) } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/person/businessentityaddress/BusinessentityaddressRepo.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/person/businessentityaddress/BusinessentityaddressRepo.scala index 1dca0ffc3..933d24768 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/person/businessentityaddress/BusinessentityaddressRepo.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/person/businessentityaddress/BusinessentityaddressRepo.scala @@ -30,4 +30,7 @@ trait BusinessentityaddressRepo { def update: UpdateBuilder[BusinessentityaddressFields, BusinessentityaddressRow] def update(row: BusinessentityaddressRow): ConnectionIO[Boolean] def upsert(unsaved: BusinessentityaddressRow): ConnectionIO[BusinessentityaddressRow] + def upsertBatch(unsaved: List[BusinessentityaddressRow]): Stream[ConnectionIO, BusinessentityaddressRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Stream[ConnectionIO, BusinessentityaddressRow], batchSize: Int = 10000): ConnectionIO[Int] } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/person/businessentityaddress/BusinessentityaddressRepoImpl.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/person/businessentityaddress/BusinessentityaddressRepoImpl.scala index 6b5c39ede..1784258a2 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/person/businessentityaddress/BusinessentityaddressRepoImpl.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/person/businessentityaddress/BusinessentityaddressRepoImpl.scala @@ -13,12 +13,14 @@ import adventureworks.customtypes.TypoUUID import adventureworks.person.address.AddressId import adventureworks.person.addresstype.AddresstypeId import adventureworks.person.businessentity.BusinessentityId +import cats.instances.list.catsStdInstancesForList import doobie.free.connection.ConnectionIO import doobie.postgres.syntax.FragmentOps import doobie.syntax.SqlInterpolator.SingleFragment.fromWrite import doobie.syntax.string.toSqlInterpolator import doobie.util.Write import doobie.util.fragment.Fragment +import doobie.util.update.Update import fs2.Stream import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder @@ -140,4 +142,31 @@ class BusinessentityaddressRepoImpl extends BusinessentityaddressRepo { returning "businessentityid", "addressid", "addresstypeid", "rowguid", "modifieddate"::text """.query(using BusinessentityaddressRow.read).unique } + override def upsertBatch(unsaved: List[BusinessentityaddressRow]): Stream[ConnectionIO, BusinessentityaddressRow] = { + Update[BusinessentityaddressRow]( + s"""insert into person.businessentityaddress("businessentityid", "addressid", "addresstypeid", "rowguid", "modifieddate") + values (?::int4,?::int4,?::int4,?::uuid,?::timestamp) + on conflict ("businessentityid", "addressid", "addresstypeid") + do update set + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + returning "businessentityid", "addressid", "addresstypeid", "rowguid", "modifieddate"::text""" + )(using BusinessentityaddressRow.write) + .updateManyWithGeneratedKeys[BusinessentityaddressRow]("businessentityid", "addressid", "addresstypeid", "rowguid", "modifieddate")(unsaved)(using catsStdInstancesForList, BusinessentityaddressRow.read) + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, BusinessentityaddressRow], batchSize: Int = 10000): ConnectionIO[Int] = { + for { + _ <- sql"create temporary table businessentityaddress_TEMP (like person.businessentityaddress) on commit drop".update.run + _ <- new FragmentOps(sql"""copy businessentityaddress_TEMP("businessentityid", "addressid", "addresstypeid", "rowguid", "modifieddate") from stdin""").copyIn(unsaved, batchSize)(using BusinessentityaddressRow.text) + res <- sql"""insert into person.businessentityaddress("businessentityid", "addressid", "addresstypeid", "rowguid", "modifieddate") + select * from businessentityaddress_TEMP + on conflict ("businessentityid", "addressid", "addresstypeid") + do update set + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table businessentityaddress_TEMP;""".update.run + } yield res + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/person/businessentityaddress/BusinessentityaddressRepoMock.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/person/businessentityaddress/BusinessentityaddressRepoMock.scala index 23237f543..8605223de 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/person/businessentityaddress/BusinessentityaddressRepoMock.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/person/businessentityaddress/BusinessentityaddressRepoMock.scala @@ -105,4 +105,23 @@ class BusinessentityaddressRepoMock(toRow: Function1[BusinessentityaddressRowUns unsaved } } + override def upsertBatch(unsaved: List[BusinessentityaddressRow]): Stream[ConnectionIO, BusinessentityaddressRow] = { + Stream.emits { + unsaved.map { row => + map += (row.compositeId -> row) + row + } + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, BusinessentityaddressRow], batchSize: Int = 10000): ConnectionIO[Int] = { + unsaved.compile.toList.map { rows => + var num = 0 + rows.foreach { row => + map += (row.compositeId -> row) + num += 1 + } + num + } + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/person/businessentityaddress/BusinessentityaddressRow.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/person/businessentityaddress/BusinessentityaddressRow.scala index bc0366f0c..0071d0b70 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/person/businessentityaddress/BusinessentityaddressRow.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/person/businessentityaddress/BusinessentityaddressRow.scala @@ -16,6 +16,7 @@ import adventureworks.person.businessentity.BusinessentityId import doobie.enumerated.Nullability import doobie.postgres.Text import doobie.util.Read +import doobie.util.Write import io.circe.Decoder import io.circe.Encoder import java.sql.ResultSet @@ -76,4 +77,26 @@ object BusinessentityaddressRow { sb.append(Text.DELIMETER) TypoLocalDateTime.text.unsafeEncode(row.modifieddate, sb) } + implicit lazy val write: Write[BusinessentityaddressRow] = new Write[BusinessentityaddressRow]( + puts = List((BusinessentityId.put, Nullability.NoNulls), + (AddressId.put, Nullability.NoNulls), + (AddresstypeId.put, Nullability.NoNulls), + (TypoUUID.put, Nullability.NoNulls), + (TypoLocalDateTime.put, Nullability.NoNulls)), + toList = x => List(x.businessentityid, x.addressid, x.addresstypeid, x.rowguid, x.modifieddate), + unsafeSet = (rs, i, a) => { + BusinessentityId.put.unsafeSetNonNullable(rs, i + 0, a.businessentityid) + AddressId.put.unsafeSetNonNullable(rs, i + 1, a.addressid) + AddresstypeId.put.unsafeSetNonNullable(rs, i + 2, a.addresstypeid) + TypoUUID.put.unsafeSetNonNullable(rs, i + 3, a.rowguid) + TypoLocalDateTime.put.unsafeSetNonNullable(rs, i + 4, a.modifieddate) + }, + unsafeUpdate = (ps, i, a) => { + BusinessentityId.put.unsafeUpdateNonNullable(ps, i + 0, a.businessentityid) + AddressId.put.unsafeUpdateNonNullable(ps, i + 1, a.addressid) + AddresstypeId.put.unsafeUpdateNonNullable(ps, i + 2, a.addresstypeid) + TypoUUID.put.unsafeUpdateNonNullable(ps, i + 3, a.rowguid) + TypoLocalDateTime.put.unsafeUpdateNonNullable(ps, i + 4, a.modifieddate) + } + ) } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/person/businessentitycontact/BusinessentitycontactRepo.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/person/businessentitycontact/BusinessentitycontactRepo.scala index abffd5b6f..161a0a1b0 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/person/businessentitycontact/BusinessentitycontactRepo.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/person/businessentitycontact/BusinessentitycontactRepo.scala @@ -30,4 +30,7 @@ trait BusinessentitycontactRepo { def update: UpdateBuilder[BusinessentitycontactFields, BusinessentitycontactRow] def update(row: BusinessentitycontactRow): ConnectionIO[Boolean] def upsert(unsaved: BusinessentitycontactRow): ConnectionIO[BusinessentitycontactRow] + def upsertBatch(unsaved: List[BusinessentitycontactRow]): Stream[ConnectionIO, BusinessentitycontactRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Stream[ConnectionIO, BusinessentitycontactRow], batchSize: Int = 10000): ConnectionIO[Int] } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/person/businessentitycontact/BusinessentitycontactRepoImpl.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/person/businessentitycontact/BusinessentitycontactRepoImpl.scala index 85342187b..8069625d6 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/person/businessentitycontact/BusinessentitycontactRepoImpl.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/person/businessentitycontact/BusinessentitycontactRepoImpl.scala @@ -12,12 +12,14 @@ import adventureworks.customtypes.TypoLocalDateTime import adventureworks.customtypes.TypoUUID import adventureworks.person.businessentity.BusinessentityId import adventureworks.person.contacttype.ContacttypeId +import cats.instances.list.catsStdInstancesForList import doobie.free.connection.ConnectionIO import doobie.postgres.syntax.FragmentOps import doobie.syntax.SqlInterpolator.SingleFragment.fromWrite import doobie.syntax.string.toSqlInterpolator import doobie.util.Write import doobie.util.fragment.Fragment +import doobie.util.update.Update import fs2.Stream import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder @@ -139,4 +141,31 @@ class BusinessentitycontactRepoImpl extends BusinessentitycontactRepo { returning "businessentityid", "personid", "contacttypeid", "rowguid", "modifieddate"::text """.query(using BusinessentitycontactRow.read).unique } + override def upsertBatch(unsaved: List[BusinessentitycontactRow]): Stream[ConnectionIO, BusinessentitycontactRow] = { + Update[BusinessentitycontactRow]( + s"""insert into person.businessentitycontact("businessentityid", "personid", "contacttypeid", "rowguid", "modifieddate") + values (?::int4,?::int4,?::int4,?::uuid,?::timestamp) + on conflict ("businessentityid", "personid", "contacttypeid") + do update set + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + returning "businessentityid", "personid", "contacttypeid", "rowguid", "modifieddate"::text""" + )(using BusinessentitycontactRow.write) + .updateManyWithGeneratedKeys[BusinessentitycontactRow]("businessentityid", "personid", "contacttypeid", "rowguid", "modifieddate")(unsaved)(using catsStdInstancesForList, BusinessentitycontactRow.read) + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, BusinessentitycontactRow], batchSize: Int = 10000): ConnectionIO[Int] = { + for { + _ <- sql"create temporary table businessentitycontact_TEMP (like person.businessentitycontact) on commit drop".update.run + _ <- new FragmentOps(sql"""copy businessentitycontact_TEMP("businessentityid", "personid", "contacttypeid", "rowguid", "modifieddate") from stdin""").copyIn(unsaved, batchSize)(using BusinessentitycontactRow.text) + res <- sql"""insert into person.businessentitycontact("businessentityid", "personid", "contacttypeid", "rowguid", "modifieddate") + select * from businessentitycontact_TEMP + on conflict ("businessentityid", "personid", "contacttypeid") + do update set + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table businessentitycontact_TEMP;""".update.run + } yield res + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/person/businessentitycontact/BusinessentitycontactRepoMock.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/person/businessentitycontact/BusinessentitycontactRepoMock.scala index 9bcc51a0e..3b6ebb89a 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/person/businessentitycontact/BusinessentitycontactRepoMock.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/person/businessentitycontact/BusinessentitycontactRepoMock.scala @@ -105,4 +105,23 @@ class BusinessentitycontactRepoMock(toRow: Function1[BusinessentitycontactRowUns unsaved } } + override def upsertBatch(unsaved: List[BusinessentitycontactRow]): Stream[ConnectionIO, BusinessentitycontactRow] = { + Stream.emits { + unsaved.map { row => + map += (row.compositeId -> row) + row + } + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, BusinessentitycontactRow], batchSize: Int = 10000): ConnectionIO[Int] = { + unsaved.compile.toList.map { rows => + var num = 0 + rows.foreach { row => + map += (row.compositeId -> row) + num += 1 + } + num + } + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/person/businessentitycontact/BusinessentitycontactRow.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/person/businessentitycontact/BusinessentitycontactRow.scala index d1abcff4c..848a0fb86 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/person/businessentitycontact/BusinessentitycontactRow.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/person/businessentitycontact/BusinessentitycontactRow.scala @@ -15,6 +15,7 @@ import adventureworks.person.contacttype.ContacttypeId import doobie.enumerated.Nullability import doobie.postgres.Text import doobie.util.Read +import doobie.util.Write import io.circe.Decoder import io.circe.Encoder import java.sql.ResultSet @@ -75,4 +76,26 @@ object BusinessentitycontactRow { sb.append(Text.DELIMETER) TypoLocalDateTime.text.unsafeEncode(row.modifieddate, sb) } + implicit lazy val write: Write[BusinessentitycontactRow] = new Write[BusinessentitycontactRow]( + puts = List((BusinessentityId.put, Nullability.NoNulls), + (BusinessentityId.put, Nullability.NoNulls), + (ContacttypeId.put, Nullability.NoNulls), + (TypoUUID.put, Nullability.NoNulls), + (TypoLocalDateTime.put, Nullability.NoNulls)), + toList = x => List(x.businessentityid, x.personid, x.contacttypeid, x.rowguid, x.modifieddate), + unsafeSet = (rs, i, a) => { + BusinessentityId.put.unsafeSetNonNullable(rs, i + 0, a.businessentityid) + BusinessentityId.put.unsafeSetNonNullable(rs, i + 1, a.personid) + ContacttypeId.put.unsafeSetNonNullable(rs, i + 2, a.contacttypeid) + TypoUUID.put.unsafeSetNonNullable(rs, i + 3, a.rowguid) + TypoLocalDateTime.put.unsafeSetNonNullable(rs, i + 4, a.modifieddate) + }, + unsafeUpdate = (ps, i, a) => { + BusinessentityId.put.unsafeUpdateNonNullable(ps, i + 0, a.businessentityid) + BusinessentityId.put.unsafeUpdateNonNullable(ps, i + 1, a.personid) + ContacttypeId.put.unsafeUpdateNonNullable(ps, i + 2, a.contacttypeid) + TypoUUID.put.unsafeUpdateNonNullable(ps, i + 3, a.rowguid) + TypoLocalDateTime.put.unsafeUpdateNonNullable(ps, i + 4, a.modifieddate) + } + ) } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/person/contacttype/ContacttypeRepo.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/person/contacttype/ContacttypeRepo.scala index 5d7e93a36..ab505d336 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/person/contacttype/ContacttypeRepo.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/person/contacttype/ContacttypeRepo.scala @@ -30,4 +30,7 @@ trait ContacttypeRepo { def update: UpdateBuilder[ContacttypeFields, ContacttypeRow] def update(row: ContacttypeRow): ConnectionIO[Boolean] def upsert(unsaved: ContacttypeRow): ConnectionIO[ContacttypeRow] + def upsertBatch(unsaved: List[ContacttypeRow]): Stream[ConnectionIO, ContacttypeRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Stream[ConnectionIO, ContacttypeRow], batchSize: Int = 10000): ConnectionIO[Int] } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/person/contacttype/ContacttypeRepoImpl.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/person/contacttype/ContacttypeRepoImpl.scala index 9b70f69bb..851c8063f 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/person/contacttype/ContacttypeRepoImpl.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/person/contacttype/ContacttypeRepoImpl.scala @@ -10,12 +10,14 @@ package contacttype import adventureworks.customtypes.Defaulted import adventureworks.customtypes.TypoLocalDateTime import adventureworks.public.Name +import cats.instances.list.catsStdInstancesForList import doobie.free.connection.ConnectionIO import doobie.postgres.syntax.FragmentOps import doobie.syntax.SqlInterpolator.SingleFragment.fromWrite import doobie.syntax.string.toSqlInterpolator import doobie.util.Write import doobie.util.fragment.Fragment +import doobie.util.update.Update import fs2.Stream import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder @@ -117,4 +119,31 @@ class ContacttypeRepoImpl extends ContacttypeRepo { returning "contacttypeid", "name", "modifieddate"::text """.query(using ContacttypeRow.read).unique } + override def upsertBatch(unsaved: List[ContacttypeRow]): Stream[ConnectionIO, ContacttypeRow] = { + Update[ContacttypeRow]( + s"""insert into person.contacttype("contacttypeid", "name", "modifieddate") + values (?::int4,?::varchar,?::timestamp) + on conflict ("contacttypeid") + do update set + "name" = EXCLUDED."name", + "modifieddate" = EXCLUDED."modifieddate" + returning "contacttypeid", "name", "modifieddate"::text""" + )(using ContacttypeRow.write) + .updateManyWithGeneratedKeys[ContacttypeRow]("contacttypeid", "name", "modifieddate")(unsaved)(using catsStdInstancesForList, ContacttypeRow.read) + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, ContacttypeRow], batchSize: Int = 10000): ConnectionIO[Int] = { + for { + _ <- sql"create temporary table contacttype_TEMP (like person.contacttype) on commit drop".update.run + _ <- new FragmentOps(sql"""copy contacttype_TEMP("contacttypeid", "name", "modifieddate") from stdin""").copyIn(unsaved, batchSize)(using ContacttypeRow.text) + res <- sql"""insert into person.contacttype("contacttypeid", "name", "modifieddate") + select * from contacttype_TEMP + on conflict ("contacttypeid") + do update set + "name" = EXCLUDED."name", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table contacttype_TEMP;""".update.run + } yield res + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/person/contacttype/ContacttypeRepoMock.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/person/contacttype/ContacttypeRepoMock.scala index 0e86ae9c9..5eb28e2c5 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/person/contacttype/ContacttypeRepoMock.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/person/contacttype/ContacttypeRepoMock.scala @@ -105,4 +105,23 @@ class ContacttypeRepoMock(toRow: Function1[ContacttypeRowUnsaved, ContacttypeRow unsaved } } + override def upsertBatch(unsaved: List[ContacttypeRow]): Stream[ConnectionIO, ContacttypeRow] = { + Stream.emits { + unsaved.map { row => + map += (row.contacttypeid -> row) + row + } + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, ContacttypeRow], batchSize: Int = 10000): ConnectionIO[Int] = { + unsaved.compile.toList.map { rows => + var num = 0 + rows.foreach { row => + map += (row.contacttypeid -> row) + num += 1 + } + num + } + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/person/contacttype/ContacttypeRow.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/person/contacttype/ContacttypeRow.scala index a834a834d..d50da8fef 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/person/contacttype/ContacttypeRow.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/person/contacttype/ContacttypeRow.scala @@ -13,6 +13,7 @@ import adventureworks.public.Name import doobie.enumerated.Nullability import doobie.postgres.Text import doobie.util.Read +import doobie.util.Write import io.circe.Decoder import io.circe.Encoder import java.sql.ResultSet @@ -56,4 +57,20 @@ object ContacttypeRow { sb.append(Text.DELIMETER) TypoLocalDateTime.text.unsafeEncode(row.modifieddate, sb) } + implicit lazy val write: Write[ContacttypeRow] = new Write[ContacttypeRow]( + puts = List((ContacttypeId.put, Nullability.NoNulls), + (Name.put, Nullability.NoNulls), + (TypoLocalDateTime.put, Nullability.NoNulls)), + toList = x => List(x.contacttypeid, x.name, x.modifieddate), + unsafeSet = (rs, i, a) => { + ContacttypeId.put.unsafeSetNonNullable(rs, i + 0, a.contacttypeid) + Name.put.unsafeSetNonNullable(rs, i + 1, a.name) + TypoLocalDateTime.put.unsafeSetNonNullable(rs, i + 2, a.modifieddate) + }, + unsafeUpdate = (ps, i, a) => { + ContacttypeId.put.unsafeUpdateNonNullable(ps, i + 0, a.contacttypeid) + Name.put.unsafeUpdateNonNullable(ps, i + 1, a.name) + TypoLocalDateTime.put.unsafeUpdateNonNullable(ps, i + 2, a.modifieddate) + } + ) } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/person/countryregion/CountryregionRepo.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/person/countryregion/CountryregionRepo.scala index b4e96ced8..b64280424 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/person/countryregion/CountryregionRepo.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/person/countryregion/CountryregionRepo.scala @@ -30,4 +30,7 @@ trait CountryregionRepo { def update: UpdateBuilder[CountryregionFields, CountryregionRow] def update(row: CountryregionRow): ConnectionIO[Boolean] def upsert(unsaved: CountryregionRow): ConnectionIO[CountryregionRow] + def upsertBatch(unsaved: List[CountryregionRow]): Stream[ConnectionIO, CountryregionRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Stream[ConnectionIO, CountryregionRow], batchSize: Int = 10000): ConnectionIO[Int] } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/person/countryregion/CountryregionRepoImpl.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/person/countryregion/CountryregionRepoImpl.scala index 920daa6e3..b14d6433e 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/person/countryregion/CountryregionRepoImpl.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/person/countryregion/CountryregionRepoImpl.scala @@ -10,12 +10,14 @@ package countryregion import adventureworks.customtypes.Defaulted import adventureworks.customtypes.TypoLocalDateTime import adventureworks.public.Name +import cats.instances.list.catsStdInstancesForList import doobie.free.connection.ConnectionIO import doobie.postgres.syntax.FragmentOps import doobie.syntax.SqlInterpolator.SingleFragment.fromWrite import doobie.syntax.string.toSqlInterpolator import doobie.util.Write import doobie.util.fragment.Fragment +import doobie.util.update.Update import fs2.Stream import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder @@ -114,4 +116,31 @@ class CountryregionRepoImpl extends CountryregionRepo { returning "countryregioncode", "name", "modifieddate"::text """.query(using CountryregionRow.read).unique } + override def upsertBatch(unsaved: List[CountryregionRow]): Stream[ConnectionIO, CountryregionRow] = { + Update[CountryregionRow]( + s"""insert into person.countryregion("countryregioncode", "name", "modifieddate") + values (?,?::varchar,?::timestamp) + on conflict ("countryregioncode") + do update set + "name" = EXCLUDED."name", + "modifieddate" = EXCLUDED."modifieddate" + returning "countryregioncode", "name", "modifieddate"::text""" + )(using CountryregionRow.write) + .updateManyWithGeneratedKeys[CountryregionRow]("countryregioncode", "name", "modifieddate")(unsaved)(using catsStdInstancesForList, CountryregionRow.read) + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, CountryregionRow], batchSize: Int = 10000): ConnectionIO[Int] = { + for { + _ <- sql"create temporary table countryregion_TEMP (like person.countryregion) on commit drop".update.run + _ <- new FragmentOps(sql"""copy countryregion_TEMP("countryregioncode", "name", "modifieddate") from stdin""").copyIn(unsaved, batchSize)(using CountryregionRow.text) + res <- sql"""insert into person.countryregion("countryregioncode", "name", "modifieddate") + select * from countryregion_TEMP + on conflict ("countryregioncode") + do update set + "name" = EXCLUDED."name", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table countryregion_TEMP;""".update.run + } yield res + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/person/countryregion/CountryregionRepoMock.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/person/countryregion/CountryregionRepoMock.scala index 8b6584f11..0d0791daa 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/person/countryregion/CountryregionRepoMock.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/person/countryregion/CountryregionRepoMock.scala @@ -105,4 +105,23 @@ class CountryregionRepoMock(toRow: Function1[CountryregionRowUnsaved, Countryreg unsaved } } + override def upsertBatch(unsaved: List[CountryregionRow]): Stream[ConnectionIO, CountryregionRow] = { + Stream.emits { + unsaved.map { row => + map += (row.countryregioncode -> row) + row + } + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, CountryregionRow], batchSize: Int = 10000): ConnectionIO[Int] = { + unsaved.compile.toList.map { rows => + var num = 0 + rows.foreach { row => + map += (row.countryregioncode -> row) + num += 1 + } + num + } + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/person/countryregion/CountryregionRow.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/person/countryregion/CountryregionRow.scala index 0a640016b..83d4aa4cc 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/person/countryregion/CountryregionRow.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/person/countryregion/CountryregionRow.scala @@ -13,6 +13,7 @@ import adventureworks.public.Name import doobie.enumerated.Nullability import doobie.postgres.Text import doobie.util.Read +import doobie.util.Write import io.circe.Decoder import io.circe.Encoder import java.sql.ResultSet @@ -55,4 +56,20 @@ object CountryregionRow { sb.append(Text.DELIMETER) TypoLocalDateTime.text.unsafeEncode(row.modifieddate, sb) } + implicit lazy val write: Write[CountryregionRow] = new Write[CountryregionRow]( + puts = List((CountryregionId.put, Nullability.NoNulls), + (Name.put, Nullability.NoNulls), + (TypoLocalDateTime.put, Nullability.NoNulls)), + toList = x => List(x.countryregioncode, x.name, x.modifieddate), + unsafeSet = (rs, i, a) => { + CountryregionId.put.unsafeSetNonNullable(rs, i + 0, a.countryregioncode) + Name.put.unsafeSetNonNullable(rs, i + 1, a.name) + TypoLocalDateTime.put.unsafeSetNonNullable(rs, i + 2, a.modifieddate) + }, + unsafeUpdate = (ps, i, a) => { + CountryregionId.put.unsafeUpdateNonNullable(ps, i + 0, a.countryregioncode) + Name.put.unsafeUpdateNonNullable(ps, i + 1, a.name) + TypoLocalDateTime.put.unsafeUpdateNonNullable(ps, i + 2, a.modifieddate) + } + ) } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/person/emailaddress/EmailaddressRepo.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/person/emailaddress/EmailaddressRepo.scala index d64e2df5c..879394666 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/person/emailaddress/EmailaddressRepo.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/person/emailaddress/EmailaddressRepo.scala @@ -30,4 +30,7 @@ trait EmailaddressRepo { def update: UpdateBuilder[EmailaddressFields, EmailaddressRow] def update(row: EmailaddressRow): ConnectionIO[Boolean] def upsert(unsaved: EmailaddressRow): ConnectionIO[EmailaddressRow] + def upsertBatch(unsaved: List[EmailaddressRow]): Stream[ConnectionIO, EmailaddressRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Stream[ConnectionIO, EmailaddressRow], batchSize: Int = 10000): ConnectionIO[Int] } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/person/emailaddress/EmailaddressRepoImpl.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/person/emailaddress/EmailaddressRepoImpl.scala index 6052d1f2f..8bf8b0412 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/person/emailaddress/EmailaddressRepoImpl.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/person/emailaddress/EmailaddressRepoImpl.scala @@ -11,6 +11,7 @@ import adventureworks.customtypes.Defaulted import adventureworks.customtypes.TypoLocalDateTime import adventureworks.customtypes.TypoUUID import adventureworks.person.businessentity.BusinessentityId +import cats.instances.list.catsStdInstancesForList import doobie.free.connection.ConnectionIO import doobie.postgres.syntax.FragmentOps import doobie.syntax.SqlInterpolator.SingleFragment.fromWrite @@ -18,6 +19,7 @@ import doobie.syntax.string.toSqlInterpolator import doobie.util.Write import doobie.util.fragment.Fragment import doobie.util.meta.Meta +import doobie.util.update.Update import fs2.Stream import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder @@ -142,4 +144,33 @@ class EmailaddressRepoImpl extends EmailaddressRepo { returning "businessentityid", "emailaddressid", "emailaddress", "rowguid", "modifieddate"::text """.query(using EmailaddressRow.read).unique } + override def upsertBatch(unsaved: List[EmailaddressRow]): Stream[ConnectionIO, EmailaddressRow] = { + Update[EmailaddressRow]( + s"""insert into person.emailaddress("businessentityid", "emailaddressid", "emailaddress", "rowguid", "modifieddate") + values (?::int4,?::int4,?,?::uuid,?::timestamp) + on conflict ("businessentityid", "emailaddressid") + do update set + "emailaddress" = EXCLUDED."emailaddress", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + returning "businessentityid", "emailaddressid", "emailaddress", "rowguid", "modifieddate"::text""" + )(using EmailaddressRow.write) + .updateManyWithGeneratedKeys[EmailaddressRow]("businessentityid", "emailaddressid", "emailaddress", "rowguid", "modifieddate")(unsaved)(using catsStdInstancesForList, EmailaddressRow.read) + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, EmailaddressRow], batchSize: Int = 10000): ConnectionIO[Int] = { + for { + _ <- sql"create temporary table emailaddress_TEMP (like person.emailaddress) on commit drop".update.run + _ <- new FragmentOps(sql"""copy emailaddress_TEMP("businessentityid", "emailaddressid", "emailaddress", "rowguid", "modifieddate") from stdin""").copyIn(unsaved, batchSize)(using EmailaddressRow.text) + res <- sql"""insert into person.emailaddress("businessentityid", "emailaddressid", "emailaddress", "rowguid", "modifieddate") + select * from emailaddress_TEMP + on conflict ("businessentityid", "emailaddressid") + do update set + "emailaddress" = EXCLUDED."emailaddress", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table emailaddress_TEMP;""".update.run + } yield res + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/person/emailaddress/EmailaddressRepoMock.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/person/emailaddress/EmailaddressRepoMock.scala index d8ffa5e1b..ca0b8e098 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/person/emailaddress/EmailaddressRepoMock.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/person/emailaddress/EmailaddressRepoMock.scala @@ -105,4 +105,23 @@ class EmailaddressRepoMock(toRow: Function1[EmailaddressRowUnsaved, Emailaddress unsaved } } + override def upsertBatch(unsaved: List[EmailaddressRow]): Stream[ConnectionIO, EmailaddressRow] = { + Stream.emits { + unsaved.map { row => + map += (row.compositeId -> row) + row + } + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, EmailaddressRow], batchSize: Int = 10000): ConnectionIO[Int] = { + unsaved.compile.toList.map { rows => + var num = 0 + rows.foreach { row => + map += (row.compositeId -> row) + num += 1 + } + num + } + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/person/emailaddress/EmailaddressRow.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/person/emailaddress/EmailaddressRow.scala index 37f5c3206..ed3de2a66 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/person/emailaddress/EmailaddressRow.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/person/emailaddress/EmailaddressRow.scala @@ -14,6 +14,7 @@ import adventureworks.person.businessentity.BusinessentityId import doobie.enumerated.Nullability import doobie.postgres.Text import doobie.util.Read +import doobie.util.Write import doobie.util.meta.Meta import io.circe.Decoder import io.circe.Encoder @@ -74,4 +75,26 @@ object EmailaddressRow { sb.append(Text.DELIMETER) TypoLocalDateTime.text.unsafeEncode(row.modifieddate, sb) } + implicit lazy val write: Write[EmailaddressRow] = new Write[EmailaddressRow]( + puts = List((BusinessentityId.put, Nullability.NoNulls), + (Meta.IntMeta.put, Nullability.NoNulls), + (Meta.StringMeta.put, Nullability.Nullable), + (TypoUUID.put, Nullability.NoNulls), + (TypoLocalDateTime.put, Nullability.NoNulls)), + toList = x => List(x.businessentityid, x.emailaddressid, x.emailaddress, x.rowguid, x.modifieddate), + unsafeSet = (rs, i, a) => { + BusinessentityId.put.unsafeSetNonNullable(rs, i + 0, a.businessentityid) + Meta.IntMeta.put.unsafeSetNonNullable(rs, i + 1, a.emailaddressid) + Meta.StringMeta.put.unsafeSetNullable(rs, i + 2, a.emailaddress) + TypoUUID.put.unsafeSetNonNullable(rs, i + 3, a.rowguid) + TypoLocalDateTime.put.unsafeSetNonNullable(rs, i + 4, a.modifieddate) + }, + unsafeUpdate = (ps, i, a) => { + BusinessentityId.put.unsafeUpdateNonNullable(ps, i + 0, a.businessentityid) + Meta.IntMeta.put.unsafeUpdateNonNullable(ps, i + 1, a.emailaddressid) + Meta.StringMeta.put.unsafeUpdateNullable(ps, i + 2, a.emailaddress) + TypoUUID.put.unsafeUpdateNonNullable(ps, i + 3, a.rowguid) + TypoLocalDateTime.put.unsafeUpdateNonNullable(ps, i + 4, a.modifieddate) + } + ) } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/person/password/PasswordRepo.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/person/password/PasswordRepo.scala index 3a5d28d89..a5d017e75 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/person/password/PasswordRepo.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/person/password/PasswordRepo.scala @@ -31,4 +31,7 @@ trait PasswordRepo { def update: UpdateBuilder[PasswordFields, PasswordRow] def update(row: PasswordRow): ConnectionIO[Boolean] def upsert(unsaved: PasswordRow): ConnectionIO[PasswordRow] + def upsertBatch(unsaved: List[PasswordRow]): Stream[ConnectionIO, PasswordRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Stream[ConnectionIO, PasswordRow], batchSize: Int = 10000): ConnectionIO[Int] } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/person/password/PasswordRepoImpl.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/person/password/PasswordRepoImpl.scala index 97ae0612d..7695a3c55 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/person/password/PasswordRepoImpl.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/person/password/PasswordRepoImpl.scala @@ -11,6 +11,7 @@ import adventureworks.customtypes.Defaulted import adventureworks.customtypes.TypoLocalDateTime import adventureworks.customtypes.TypoUUID import adventureworks.person.businessentity.BusinessentityId +import cats.instances.list.catsStdInstancesForList import doobie.free.connection.ConnectionIO import doobie.postgres.syntax.FragmentOps import doobie.syntax.SqlInterpolator.SingleFragment.fromWrite @@ -18,6 +19,7 @@ import doobie.syntax.string.toSqlInterpolator import doobie.util.Write import doobie.util.fragment.Fragment import doobie.util.meta.Meta +import doobie.util.update.Update import fs2.Stream import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder @@ -127,4 +129,35 @@ class PasswordRepoImpl extends PasswordRepo { returning "businessentityid", "passwordhash", "passwordsalt", "rowguid", "modifieddate"::text """.query(using PasswordRow.read).unique } + override def upsertBatch(unsaved: List[PasswordRow]): Stream[ConnectionIO, PasswordRow] = { + Update[PasswordRow]( + s"""insert into person.password("businessentityid", "passwordhash", "passwordsalt", "rowguid", "modifieddate") + values (?::int4,?,?,?::uuid,?::timestamp) + on conflict ("businessentityid") + do update set + "passwordhash" = EXCLUDED."passwordhash", + "passwordsalt" = EXCLUDED."passwordsalt", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + returning "businessentityid", "passwordhash", "passwordsalt", "rowguid", "modifieddate"::text""" + )(using PasswordRow.write) + .updateManyWithGeneratedKeys[PasswordRow]("businessentityid", "passwordhash", "passwordsalt", "rowguid", "modifieddate")(unsaved)(using catsStdInstancesForList, PasswordRow.read) + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, PasswordRow], batchSize: Int = 10000): ConnectionIO[Int] = { + for { + _ <- sql"create temporary table password_TEMP (like person.password) on commit drop".update.run + _ <- new FragmentOps(sql"""copy password_TEMP("businessentityid", "passwordhash", "passwordsalt", "rowguid", "modifieddate") from stdin""").copyIn(unsaved, batchSize)(using PasswordRow.text) + res <- sql"""insert into person.password("businessentityid", "passwordhash", "passwordsalt", "rowguid", "modifieddate") + select * from password_TEMP + on conflict ("businessentityid") + do update set + "passwordhash" = EXCLUDED."passwordhash", + "passwordsalt" = EXCLUDED."passwordsalt", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table password_TEMP;""".update.run + } yield res + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/person/password/PasswordRepoMock.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/person/password/PasswordRepoMock.scala index 7958d68ec..932570e1a 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/person/password/PasswordRepoMock.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/person/password/PasswordRepoMock.scala @@ -106,4 +106,23 @@ class PasswordRepoMock(toRow: Function1[PasswordRowUnsaved, PasswordRow], unsaved } } + override def upsertBatch(unsaved: List[PasswordRow]): Stream[ConnectionIO, PasswordRow] = { + Stream.emits { + unsaved.map { row => + map += (row.businessentityid -> row) + row + } + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, PasswordRow], batchSize: Int = 10000): ConnectionIO[Int] = { + unsaved.compile.toList.map { rows => + var num = 0 + rows.foreach { row => + map += (row.businessentityid -> row) + num += 1 + } + num + } + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/person/password/PasswordRow.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/person/password/PasswordRow.scala index 20603b218..604145d17 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/person/password/PasswordRow.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/person/password/PasswordRow.scala @@ -14,6 +14,7 @@ import adventureworks.person.businessentity.BusinessentityId import doobie.enumerated.Nullability import doobie.postgres.Text import doobie.util.Read +import doobie.util.Write import doobie.util.meta.Meta import io.circe.Decoder import io.circe.Encoder @@ -69,4 +70,26 @@ object PasswordRow { sb.append(Text.DELIMETER) TypoLocalDateTime.text.unsafeEncode(row.modifieddate, sb) } + implicit lazy val write: Write[PasswordRow] = new Write[PasswordRow]( + puts = List((BusinessentityId.put, Nullability.NoNulls), + (Meta.StringMeta.put, Nullability.NoNulls), + (Meta.StringMeta.put, Nullability.NoNulls), + (TypoUUID.put, Nullability.NoNulls), + (TypoLocalDateTime.put, Nullability.NoNulls)), + toList = x => List(x.businessentityid, x.passwordhash, x.passwordsalt, x.rowguid, x.modifieddate), + unsafeSet = (rs, i, a) => { + BusinessentityId.put.unsafeSetNonNullable(rs, i + 0, a.businessentityid) + Meta.StringMeta.put.unsafeSetNonNullable(rs, i + 1, a.passwordhash) + Meta.StringMeta.put.unsafeSetNonNullable(rs, i + 2, a.passwordsalt) + TypoUUID.put.unsafeSetNonNullable(rs, i + 3, a.rowguid) + TypoLocalDateTime.put.unsafeSetNonNullable(rs, i + 4, a.modifieddate) + }, + unsafeUpdate = (ps, i, a) => { + BusinessentityId.put.unsafeUpdateNonNullable(ps, i + 0, a.businessentityid) + Meta.StringMeta.put.unsafeUpdateNonNullable(ps, i + 1, a.passwordhash) + Meta.StringMeta.put.unsafeUpdateNonNullable(ps, i + 2, a.passwordsalt) + TypoUUID.put.unsafeUpdateNonNullable(ps, i + 3, a.rowguid) + TypoLocalDateTime.put.unsafeUpdateNonNullable(ps, i + 4, a.modifieddate) + } + ) } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/person/person/PersonRepo.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/person/person/PersonRepo.scala index 4fbf1b2a8..2fd452cc0 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/person/person/PersonRepo.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/person/person/PersonRepo.scala @@ -31,4 +31,7 @@ trait PersonRepo { def update: UpdateBuilder[PersonFields, PersonRow] def update(row: PersonRow): ConnectionIO[Boolean] def upsert(unsaved: PersonRow): ConnectionIO[PersonRow] + def upsertBatch(unsaved: List[PersonRow]): Stream[ConnectionIO, PersonRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Stream[ConnectionIO, PersonRow], batchSize: Int = 10000): ConnectionIO[Int] } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/person/person/PersonRepoImpl.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/person/person/PersonRepoImpl.scala index 67c2319e8..9744f786f 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/person/person/PersonRepoImpl.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/person/person/PersonRepoImpl.scala @@ -15,6 +15,7 @@ import adventureworks.person.businessentity.BusinessentityId import adventureworks.public.Name import adventureworks.public.NameStyle import adventureworks.userdefined.FirstName +import cats.instances.list.catsStdInstancesForList import doobie.free.connection.ConnectionIO import doobie.postgres.syntax.FragmentOps import doobie.syntax.SqlInterpolator.SingleFragment.fromWrite @@ -22,6 +23,7 @@ import doobie.syntax.string.toSqlInterpolator import doobie.util.Write import doobie.util.fragment.Fragment import doobie.util.meta.Meta +import doobie.util.update.Update import fs2.Stream import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder @@ -169,4 +171,51 @@ class PersonRepoImpl extends PersonRepo { returning "businessentityid", "persontype", "namestyle", "title", "firstname", "middlename", "lastname", "suffix", "emailpromotion", "additionalcontactinfo", "demographics", "rowguid", "modifieddate"::text """.query(using PersonRow.read).unique } + override def upsertBatch(unsaved: List[PersonRow]): Stream[ConnectionIO, PersonRow] = { + Update[PersonRow]( + s"""insert into person.person("businessentityid", "persontype", "namestyle", "title", "firstname", "middlename", "lastname", "suffix", "emailpromotion", "additionalcontactinfo", "demographics", "rowguid", "modifieddate") + values (?::int4,?::bpchar,?::bool,?,?::varchar,?::varchar,?::varchar,?,?::int4,?::xml,?::xml,?::uuid,?::timestamp) + on conflict ("businessentityid") + do update set + "persontype" = EXCLUDED."persontype", + "namestyle" = EXCLUDED."namestyle", + "title" = EXCLUDED."title", + "firstname" = EXCLUDED."firstname", + "middlename" = EXCLUDED."middlename", + "lastname" = EXCLUDED."lastname", + "suffix" = EXCLUDED."suffix", + "emailpromotion" = EXCLUDED."emailpromotion", + "additionalcontactinfo" = EXCLUDED."additionalcontactinfo", + "demographics" = EXCLUDED."demographics", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + returning "businessentityid", "persontype", "namestyle", "title", "firstname", "middlename", "lastname", "suffix", "emailpromotion", "additionalcontactinfo", "demographics", "rowguid", "modifieddate"::text""" + )(using PersonRow.write) + .updateManyWithGeneratedKeys[PersonRow]("businessentityid", "persontype", "namestyle", "title", "firstname", "middlename", "lastname", "suffix", "emailpromotion", "additionalcontactinfo", "demographics", "rowguid", "modifieddate")(unsaved)(using catsStdInstancesForList, PersonRow.read) + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, PersonRow], batchSize: Int = 10000): ConnectionIO[Int] = { + for { + _ <- sql"create temporary table person_TEMP (like person.person) on commit drop".update.run + _ <- new FragmentOps(sql"""copy person_TEMP("businessentityid", "persontype", "namestyle", "title", "firstname", "middlename", "lastname", "suffix", "emailpromotion", "additionalcontactinfo", "demographics", "rowguid", "modifieddate") from stdin""").copyIn(unsaved, batchSize)(using PersonRow.text) + res <- sql"""insert into person.person("businessentityid", "persontype", "namestyle", "title", "firstname", "middlename", "lastname", "suffix", "emailpromotion", "additionalcontactinfo", "demographics", "rowguid", "modifieddate") + select * from person_TEMP + on conflict ("businessentityid") + do update set + "persontype" = EXCLUDED."persontype", + "namestyle" = EXCLUDED."namestyle", + "title" = EXCLUDED."title", + "firstname" = EXCLUDED."firstname", + "middlename" = EXCLUDED."middlename", + "lastname" = EXCLUDED."lastname", + "suffix" = EXCLUDED."suffix", + "emailpromotion" = EXCLUDED."emailpromotion", + "additionalcontactinfo" = EXCLUDED."additionalcontactinfo", + "demographics" = EXCLUDED."demographics", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table person_TEMP;""".update.run + } yield res + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/person/person/PersonRepoMock.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/person/person/PersonRepoMock.scala index 6fb77b058..5ab392d96 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/person/person/PersonRepoMock.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/person/person/PersonRepoMock.scala @@ -106,4 +106,23 @@ class PersonRepoMock(toRow: Function1[PersonRowUnsaved, PersonRow], unsaved } } + override def upsertBatch(unsaved: List[PersonRow]): Stream[ConnectionIO, PersonRow] = { + Stream.emits { + unsaved.map { row => + map += (row.businessentityid -> row) + row + } + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, PersonRow], batchSize: Int = 10000): ConnectionIO[Int] = { + unsaved.compile.toList.map { rows => + var num = 0 + rows.foreach { row => + map += (row.businessentityid -> row) + num += 1 + } + num + } + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/person/person/PersonRow.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/person/person/PersonRow.scala index 77761b278..e20a68719 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/person/person/PersonRow.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/person/person/PersonRow.scala @@ -18,6 +18,7 @@ import adventureworks.userdefined.FirstName import doobie.enumerated.Nullability import doobie.postgres.Text import doobie.util.Read +import doobie.util.Write import doobie.util.meta.Meta import io.circe.Decoder import io.circe.Encoder @@ -126,4 +127,50 @@ object PersonRow { sb.append(Text.DELIMETER) TypoLocalDateTime.text.unsafeEncode(row.modifieddate, sb) } + implicit lazy val write: Write[PersonRow] = new Write[PersonRow]( + puts = List((BusinessentityId.put, Nullability.NoNulls), + (Meta.StringMeta.put, Nullability.NoNulls), + (NameStyle.put, Nullability.NoNulls), + (Meta.StringMeta.put, Nullability.Nullable), + (/* user-picked */ FirstName.put, Nullability.NoNulls), + (Name.put, Nullability.Nullable), + (Name.put, Nullability.NoNulls), + (Meta.StringMeta.put, Nullability.Nullable), + (Meta.IntMeta.put, Nullability.NoNulls), + (TypoXml.put, Nullability.Nullable), + (TypoXml.put, Nullability.Nullable), + (TypoUUID.put, Nullability.NoNulls), + (TypoLocalDateTime.put, Nullability.NoNulls)), + toList = x => List(x.businessentityid, x.persontype, x.namestyle, x.title, x.firstname, x.middlename, x.lastname, x.suffix, x.emailpromotion, x.additionalcontactinfo, x.demographics, x.rowguid, x.modifieddate), + unsafeSet = (rs, i, a) => { + BusinessentityId.put.unsafeSetNonNullable(rs, i + 0, a.businessentityid) + Meta.StringMeta.put.unsafeSetNonNullable(rs, i + 1, a.persontype) + NameStyle.put.unsafeSetNonNullable(rs, i + 2, a.namestyle) + Meta.StringMeta.put.unsafeSetNullable(rs, i + 3, a.title) + /* user-picked */ FirstName.put.unsafeSetNonNullable(rs, i + 4, a.firstname) + Name.put.unsafeSetNullable(rs, i + 5, a.middlename) + Name.put.unsafeSetNonNullable(rs, i + 6, a.lastname) + Meta.StringMeta.put.unsafeSetNullable(rs, i + 7, a.suffix) + Meta.IntMeta.put.unsafeSetNonNullable(rs, i + 8, a.emailpromotion) + TypoXml.put.unsafeSetNullable(rs, i + 9, a.additionalcontactinfo) + TypoXml.put.unsafeSetNullable(rs, i + 10, a.demographics) + TypoUUID.put.unsafeSetNonNullable(rs, i + 11, a.rowguid) + TypoLocalDateTime.put.unsafeSetNonNullable(rs, i + 12, a.modifieddate) + }, + unsafeUpdate = (ps, i, a) => { + BusinessentityId.put.unsafeUpdateNonNullable(ps, i + 0, a.businessentityid) + Meta.StringMeta.put.unsafeUpdateNonNullable(ps, i + 1, a.persontype) + NameStyle.put.unsafeUpdateNonNullable(ps, i + 2, a.namestyle) + Meta.StringMeta.put.unsafeUpdateNullable(ps, i + 3, a.title) + /* user-picked */ FirstName.put.unsafeUpdateNonNullable(ps, i + 4, a.firstname) + Name.put.unsafeUpdateNullable(ps, i + 5, a.middlename) + Name.put.unsafeUpdateNonNullable(ps, i + 6, a.lastname) + Meta.StringMeta.put.unsafeUpdateNullable(ps, i + 7, a.suffix) + Meta.IntMeta.put.unsafeUpdateNonNullable(ps, i + 8, a.emailpromotion) + TypoXml.put.unsafeUpdateNullable(ps, i + 9, a.additionalcontactinfo) + TypoXml.put.unsafeUpdateNullable(ps, i + 10, a.demographics) + TypoUUID.put.unsafeUpdateNonNullable(ps, i + 11, a.rowguid) + TypoLocalDateTime.put.unsafeUpdateNonNullable(ps, i + 12, a.modifieddate) + } + ) } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/person/personphone/PersonphoneRepo.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/person/personphone/PersonphoneRepo.scala index 0cecdafb0..f6d010f6e 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/person/personphone/PersonphoneRepo.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/person/personphone/PersonphoneRepo.scala @@ -30,4 +30,7 @@ trait PersonphoneRepo { def update: UpdateBuilder[PersonphoneFields, PersonphoneRow] def update(row: PersonphoneRow): ConnectionIO[Boolean] def upsert(unsaved: PersonphoneRow): ConnectionIO[PersonphoneRow] + def upsertBatch(unsaved: List[PersonphoneRow]): Stream[ConnectionIO, PersonphoneRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Stream[ConnectionIO, PersonphoneRow], batchSize: Int = 10000): ConnectionIO[Int] } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/person/personphone/PersonphoneRepoImpl.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/person/personphone/PersonphoneRepoImpl.scala index a9fecce74..e3ca1c18c 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/person/personphone/PersonphoneRepoImpl.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/person/personphone/PersonphoneRepoImpl.scala @@ -12,12 +12,14 @@ import adventureworks.customtypes.TypoLocalDateTime import adventureworks.person.businessentity.BusinessentityId import adventureworks.person.phonenumbertype.PhonenumbertypeId import adventureworks.public.Phone +import cats.instances.list.catsStdInstancesForList import doobie.free.connection.ConnectionIO import doobie.postgres.syntax.FragmentOps import doobie.syntax.SqlInterpolator.SingleFragment.fromWrite import doobie.syntax.string.toSqlInterpolator import doobie.util.Write import doobie.util.fragment.Fragment +import doobie.util.update.Update import fs2.Stream import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder @@ -132,4 +134,29 @@ class PersonphoneRepoImpl extends PersonphoneRepo { returning "businessentityid", "phonenumber", "phonenumbertypeid", "modifieddate"::text """.query(using PersonphoneRow.read).unique } + override def upsertBatch(unsaved: List[PersonphoneRow]): Stream[ConnectionIO, PersonphoneRow] = { + Update[PersonphoneRow]( + s"""insert into person.personphone("businessentityid", "phonenumber", "phonenumbertypeid", "modifieddate") + values (?::int4,?::varchar,?::int4,?::timestamp) + on conflict ("businessentityid", "phonenumber", "phonenumbertypeid") + do update set + "modifieddate" = EXCLUDED."modifieddate" + returning "businessentityid", "phonenumber", "phonenumbertypeid", "modifieddate"::text""" + )(using PersonphoneRow.write) + .updateManyWithGeneratedKeys[PersonphoneRow]("businessentityid", "phonenumber", "phonenumbertypeid", "modifieddate")(unsaved)(using catsStdInstancesForList, PersonphoneRow.read) + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, PersonphoneRow], batchSize: Int = 10000): ConnectionIO[Int] = { + for { + _ <- sql"create temporary table personphone_TEMP (like person.personphone) on commit drop".update.run + _ <- new FragmentOps(sql"""copy personphone_TEMP("businessentityid", "phonenumber", "phonenumbertypeid", "modifieddate") from stdin""").copyIn(unsaved, batchSize)(using PersonphoneRow.text) + res <- sql"""insert into person.personphone("businessentityid", "phonenumber", "phonenumbertypeid", "modifieddate") + select * from personphone_TEMP + on conflict ("businessentityid", "phonenumber", "phonenumbertypeid") + do update set + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table personphone_TEMP;""".update.run + } yield res + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/person/personphone/PersonphoneRepoMock.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/person/personphone/PersonphoneRepoMock.scala index bb3bc4b6d..aefd0e44c 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/person/personphone/PersonphoneRepoMock.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/person/personphone/PersonphoneRepoMock.scala @@ -105,4 +105,23 @@ class PersonphoneRepoMock(toRow: Function1[PersonphoneRowUnsaved, PersonphoneRow unsaved } } + override def upsertBatch(unsaved: List[PersonphoneRow]): Stream[ConnectionIO, PersonphoneRow] = { + Stream.emits { + unsaved.map { row => + map += (row.compositeId -> row) + row + } + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, PersonphoneRow], batchSize: Int = 10000): ConnectionIO[Int] = { + unsaved.compile.toList.map { rows => + var num = 0 + rows.foreach { row => + map += (row.compositeId -> row) + num += 1 + } + num + } + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/person/personphone/PersonphoneRow.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/person/personphone/PersonphoneRow.scala index e642cdbc7..84a9f03f4 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/person/personphone/PersonphoneRow.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/person/personphone/PersonphoneRow.scala @@ -15,6 +15,7 @@ import adventureworks.public.Phone import doobie.enumerated.Nullability import doobie.postgres.Text import doobie.util.Read +import doobie.util.Write import io.circe.Decoder import io.circe.Encoder import java.sql.ResultSet @@ -68,4 +69,23 @@ object PersonphoneRow { sb.append(Text.DELIMETER) TypoLocalDateTime.text.unsafeEncode(row.modifieddate, sb) } + implicit lazy val write: Write[PersonphoneRow] = new Write[PersonphoneRow]( + puts = List((BusinessentityId.put, Nullability.NoNulls), + (Phone.put, Nullability.NoNulls), + (PhonenumbertypeId.put, Nullability.NoNulls), + (TypoLocalDateTime.put, Nullability.NoNulls)), + toList = x => List(x.businessentityid, x.phonenumber, x.phonenumbertypeid, x.modifieddate), + unsafeSet = (rs, i, a) => { + BusinessentityId.put.unsafeSetNonNullable(rs, i + 0, a.businessentityid) + Phone.put.unsafeSetNonNullable(rs, i + 1, a.phonenumber) + PhonenumbertypeId.put.unsafeSetNonNullable(rs, i + 2, a.phonenumbertypeid) + TypoLocalDateTime.put.unsafeSetNonNullable(rs, i + 3, a.modifieddate) + }, + unsafeUpdate = (ps, i, a) => { + BusinessentityId.put.unsafeUpdateNonNullable(ps, i + 0, a.businessentityid) + Phone.put.unsafeUpdateNonNullable(ps, i + 1, a.phonenumber) + PhonenumbertypeId.put.unsafeUpdateNonNullable(ps, i + 2, a.phonenumbertypeid) + TypoLocalDateTime.put.unsafeUpdateNonNullable(ps, i + 3, a.modifieddate) + } + ) } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/person/phonenumbertype/PhonenumbertypeRepo.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/person/phonenumbertype/PhonenumbertypeRepo.scala index 18f72dd85..e7ba17009 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/person/phonenumbertype/PhonenumbertypeRepo.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/person/phonenumbertype/PhonenumbertypeRepo.scala @@ -30,4 +30,7 @@ trait PhonenumbertypeRepo { def update: UpdateBuilder[PhonenumbertypeFields, PhonenumbertypeRow] def update(row: PhonenumbertypeRow): ConnectionIO[Boolean] def upsert(unsaved: PhonenumbertypeRow): ConnectionIO[PhonenumbertypeRow] + def upsertBatch(unsaved: List[PhonenumbertypeRow]): Stream[ConnectionIO, PhonenumbertypeRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Stream[ConnectionIO, PhonenumbertypeRow], batchSize: Int = 10000): ConnectionIO[Int] } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/person/phonenumbertype/PhonenumbertypeRepoImpl.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/person/phonenumbertype/PhonenumbertypeRepoImpl.scala index c7972999b..35b4e44d9 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/person/phonenumbertype/PhonenumbertypeRepoImpl.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/person/phonenumbertype/PhonenumbertypeRepoImpl.scala @@ -10,12 +10,14 @@ package phonenumbertype import adventureworks.customtypes.Defaulted import adventureworks.customtypes.TypoLocalDateTime import adventureworks.public.Name +import cats.instances.list.catsStdInstancesForList import doobie.free.connection.ConnectionIO import doobie.postgres.syntax.FragmentOps import doobie.syntax.SqlInterpolator.SingleFragment.fromWrite import doobie.syntax.string.toSqlInterpolator import doobie.util.Write import doobie.util.fragment.Fragment +import doobie.util.update.Update import fs2.Stream import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder @@ -117,4 +119,31 @@ class PhonenumbertypeRepoImpl extends PhonenumbertypeRepo { returning "phonenumbertypeid", "name", "modifieddate"::text """.query(using PhonenumbertypeRow.read).unique } + override def upsertBatch(unsaved: List[PhonenumbertypeRow]): Stream[ConnectionIO, PhonenumbertypeRow] = { + Update[PhonenumbertypeRow]( + s"""insert into person.phonenumbertype("phonenumbertypeid", "name", "modifieddate") + values (?::int4,?::varchar,?::timestamp) + on conflict ("phonenumbertypeid") + do update set + "name" = EXCLUDED."name", + "modifieddate" = EXCLUDED."modifieddate" + returning "phonenumbertypeid", "name", "modifieddate"::text""" + )(using PhonenumbertypeRow.write) + .updateManyWithGeneratedKeys[PhonenumbertypeRow]("phonenumbertypeid", "name", "modifieddate")(unsaved)(using catsStdInstancesForList, PhonenumbertypeRow.read) + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, PhonenumbertypeRow], batchSize: Int = 10000): ConnectionIO[Int] = { + for { + _ <- sql"create temporary table phonenumbertype_TEMP (like person.phonenumbertype) on commit drop".update.run + _ <- new FragmentOps(sql"""copy phonenumbertype_TEMP("phonenumbertypeid", "name", "modifieddate") from stdin""").copyIn(unsaved, batchSize)(using PhonenumbertypeRow.text) + res <- sql"""insert into person.phonenumbertype("phonenumbertypeid", "name", "modifieddate") + select * from phonenumbertype_TEMP + on conflict ("phonenumbertypeid") + do update set + "name" = EXCLUDED."name", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table phonenumbertype_TEMP;""".update.run + } yield res + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/person/phonenumbertype/PhonenumbertypeRepoMock.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/person/phonenumbertype/PhonenumbertypeRepoMock.scala index 5253a2b2f..7b2e9627c 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/person/phonenumbertype/PhonenumbertypeRepoMock.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/person/phonenumbertype/PhonenumbertypeRepoMock.scala @@ -105,4 +105,23 @@ class PhonenumbertypeRepoMock(toRow: Function1[PhonenumbertypeRowUnsaved, Phonen unsaved } } + override def upsertBatch(unsaved: List[PhonenumbertypeRow]): Stream[ConnectionIO, PhonenumbertypeRow] = { + Stream.emits { + unsaved.map { row => + map += (row.phonenumbertypeid -> row) + row + } + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, PhonenumbertypeRow], batchSize: Int = 10000): ConnectionIO[Int] = { + unsaved.compile.toList.map { rows => + var num = 0 + rows.foreach { row => + map += (row.phonenumbertypeid -> row) + num += 1 + } + num + } + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/person/phonenumbertype/PhonenumbertypeRow.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/person/phonenumbertype/PhonenumbertypeRow.scala index 31373d96f..f0ac7984a 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/person/phonenumbertype/PhonenumbertypeRow.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/person/phonenumbertype/PhonenumbertypeRow.scala @@ -13,6 +13,7 @@ import adventureworks.public.Name import doobie.enumerated.Nullability import doobie.postgres.Text import doobie.util.Read +import doobie.util.Write import io.circe.Decoder import io.circe.Encoder import java.sql.ResultSet @@ -56,4 +57,20 @@ object PhonenumbertypeRow { sb.append(Text.DELIMETER) TypoLocalDateTime.text.unsafeEncode(row.modifieddate, sb) } + implicit lazy val write: Write[PhonenumbertypeRow] = new Write[PhonenumbertypeRow]( + puts = List((PhonenumbertypeId.put, Nullability.NoNulls), + (Name.put, Nullability.NoNulls), + (TypoLocalDateTime.put, Nullability.NoNulls)), + toList = x => List(x.phonenumbertypeid, x.name, x.modifieddate), + unsafeSet = (rs, i, a) => { + PhonenumbertypeId.put.unsafeSetNonNullable(rs, i + 0, a.phonenumbertypeid) + Name.put.unsafeSetNonNullable(rs, i + 1, a.name) + TypoLocalDateTime.put.unsafeSetNonNullable(rs, i + 2, a.modifieddate) + }, + unsafeUpdate = (ps, i, a) => { + PhonenumbertypeId.put.unsafeUpdateNonNullable(ps, i + 0, a.phonenumbertypeid) + Name.put.unsafeUpdateNonNullable(ps, i + 1, a.name) + TypoLocalDateTime.put.unsafeUpdateNonNullable(ps, i + 2, a.modifieddate) + } + ) } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/person/stateprovince/StateprovinceRepo.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/person/stateprovince/StateprovinceRepo.scala index eeba397b3..2cddc6f17 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/person/stateprovince/StateprovinceRepo.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/person/stateprovince/StateprovinceRepo.scala @@ -30,4 +30,7 @@ trait StateprovinceRepo { def update: UpdateBuilder[StateprovinceFields, StateprovinceRow] def update(row: StateprovinceRow): ConnectionIO[Boolean] def upsert(unsaved: StateprovinceRow): ConnectionIO[StateprovinceRow] + def upsertBatch(unsaved: List[StateprovinceRow]): Stream[ConnectionIO, StateprovinceRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Stream[ConnectionIO, StateprovinceRow], batchSize: Int = 10000): ConnectionIO[Int] } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/person/stateprovince/StateprovinceRepoImpl.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/person/stateprovince/StateprovinceRepoImpl.scala index 732052309..1b63adf34 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/person/stateprovince/StateprovinceRepoImpl.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/person/stateprovince/StateprovinceRepoImpl.scala @@ -14,6 +14,7 @@ import adventureworks.person.countryregion.CountryregionId import adventureworks.public.Flag import adventureworks.public.Name import adventureworks.sales.salesterritory.SalesterritoryId +import cats.instances.list.catsStdInstancesForList import doobie.free.connection.ConnectionIO import doobie.postgres.syntax.FragmentOps import doobie.syntax.SqlInterpolator.SingleFragment.fromWrite @@ -21,6 +22,7 @@ import doobie.syntax.string.toSqlInterpolator import doobie.util.Write import doobie.util.fragment.Fragment import doobie.util.meta.Meta +import doobie.util.update.Update import fs2.Stream import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder @@ -148,4 +150,41 @@ class StateprovinceRepoImpl extends StateprovinceRepo { returning "stateprovinceid", "stateprovincecode", "countryregioncode", "isonlystateprovinceflag", "name", "territoryid", "rowguid", "modifieddate"::text """.query(using StateprovinceRow.read).unique } + override def upsertBatch(unsaved: List[StateprovinceRow]): Stream[ConnectionIO, StateprovinceRow] = { + Update[StateprovinceRow]( + s"""insert into person.stateprovince("stateprovinceid", "stateprovincecode", "countryregioncode", "isonlystateprovinceflag", "name", "territoryid", "rowguid", "modifieddate") + values (?::int4,?::bpchar,?,?::bool,?::varchar,?::int4,?::uuid,?::timestamp) + on conflict ("stateprovinceid") + do update set + "stateprovincecode" = EXCLUDED."stateprovincecode", + "countryregioncode" = EXCLUDED."countryregioncode", + "isonlystateprovinceflag" = EXCLUDED."isonlystateprovinceflag", + "name" = EXCLUDED."name", + "territoryid" = EXCLUDED."territoryid", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + returning "stateprovinceid", "stateprovincecode", "countryregioncode", "isonlystateprovinceflag", "name", "territoryid", "rowguid", "modifieddate"::text""" + )(using StateprovinceRow.write) + .updateManyWithGeneratedKeys[StateprovinceRow]("stateprovinceid", "stateprovincecode", "countryregioncode", "isonlystateprovinceflag", "name", "territoryid", "rowguid", "modifieddate")(unsaved)(using catsStdInstancesForList, StateprovinceRow.read) + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, StateprovinceRow], batchSize: Int = 10000): ConnectionIO[Int] = { + for { + _ <- sql"create temporary table stateprovince_TEMP (like person.stateprovince) on commit drop".update.run + _ <- new FragmentOps(sql"""copy stateprovince_TEMP("stateprovinceid", "stateprovincecode", "countryregioncode", "isonlystateprovinceflag", "name", "territoryid", "rowguid", "modifieddate") from stdin""").copyIn(unsaved, batchSize)(using StateprovinceRow.text) + res <- sql"""insert into person.stateprovince("stateprovinceid", "stateprovincecode", "countryregioncode", "isonlystateprovinceflag", "name", "territoryid", "rowguid", "modifieddate") + select * from stateprovince_TEMP + on conflict ("stateprovinceid") + do update set + "stateprovincecode" = EXCLUDED."stateprovincecode", + "countryregioncode" = EXCLUDED."countryregioncode", + "isonlystateprovinceflag" = EXCLUDED."isonlystateprovinceflag", + "name" = EXCLUDED."name", + "territoryid" = EXCLUDED."territoryid", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table stateprovince_TEMP;""".update.run + } yield res + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/person/stateprovince/StateprovinceRepoMock.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/person/stateprovince/StateprovinceRepoMock.scala index fac80aed1..0deaae9e2 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/person/stateprovince/StateprovinceRepoMock.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/person/stateprovince/StateprovinceRepoMock.scala @@ -105,4 +105,23 @@ class StateprovinceRepoMock(toRow: Function1[StateprovinceRowUnsaved, Stateprovi unsaved } } + override def upsertBatch(unsaved: List[StateprovinceRow]): Stream[ConnectionIO, StateprovinceRow] = { + Stream.emits { + unsaved.map { row => + map += (row.stateprovinceid -> row) + row + } + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, StateprovinceRow], batchSize: Int = 10000): ConnectionIO[Int] = { + unsaved.compile.toList.map { rows => + var num = 0 + rows.foreach { row => + map += (row.stateprovinceid -> row) + num += 1 + } + num + } + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/person/stateprovince/StateprovinceRow.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/person/stateprovince/StateprovinceRow.scala index de4c2f318..65fb43876 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/person/stateprovince/StateprovinceRow.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/person/stateprovince/StateprovinceRow.scala @@ -17,6 +17,7 @@ import adventureworks.sales.salesterritory.SalesterritoryId import doobie.enumerated.Nullability import doobie.postgres.Text import doobie.util.Read +import doobie.util.Write import doobie.util.meta.Meta import io.circe.Decoder import io.circe.Encoder @@ -94,4 +95,35 @@ object StateprovinceRow { sb.append(Text.DELIMETER) TypoLocalDateTime.text.unsafeEncode(row.modifieddate, sb) } + implicit lazy val write: Write[StateprovinceRow] = new Write[StateprovinceRow]( + puts = List((StateprovinceId.put, Nullability.NoNulls), + (Meta.StringMeta.put, Nullability.NoNulls), + (CountryregionId.put, Nullability.NoNulls), + (Flag.put, Nullability.NoNulls), + (Name.put, Nullability.NoNulls), + (SalesterritoryId.put, Nullability.NoNulls), + (TypoUUID.put, Nullability.NoNulls), + (TypoLocalDateTime.put, Nullability.NoNulls)), + toList = x => List(x.stateprovinceid, x.stateprovincecode, x.countryregioncode, x.isonlystateprovinceflag, x.name, x.territoryid, x.rowguid, x.modifieddate), + unsafeSet = (rs, i, a) => { + StateprovinceId.put.unsafeSetNonNullable(rs, i + 0, a.stateprovinceid) + Meta.StringMeta.put.unsafeSetNonNullable(rs, i + 1, a.stateprovincecode) + CountryregionId.put.unsafeSetNonNullable(rs, i + 2, a.countryregioncode) + Flag.put.unsafeSetNonNullable(rs, i + 3, a.isonlystateprovinceflag) + Name.put.unsafeSetNonNullable(rs, i + 4, a.name) + SalesterritoryId.put.unsafeSetNonNullable(rs, i + 5, a.territoryid) + TypoUUID.put.unsafeSetNonNullable(rs, i + 6, a.rowguid) + TypoLocalDateTime.put.unsafeSetNonNullable(rs, i + 7, a.modifieddate) + }, + unsafeUpdate = (ps, i, a) => { + StateprovinceId.put.unsafeUpdateNonNullable(ps, i + 0, a.stateprovinceid) + Meta.StringMeta.put.unsafeUpdateNonNullable(ps, i + 1, a.stateprovincecode) + CountryregionId.put.unsafeUpdateNonNullable(ps, i + 2, a.countryregioncode) + Flag.put.unsafeUpdateNonNullable(ps, i + 3, a.isonlystateprovinceflag) + Name.put.unsafeUpdateNonNullable(ps, i + 4, a.name) + SalesterritoryId.put.unsafeUpdateNonNullable(ps, i + 5, a.territoryid) + TypoUUID.put.unsafeUpdateNonNullable(ps, i + 6, a.rowguid) + TypoLocalDateTime.put.unsafeUpdateNonNullable(ps, i + 7, a.modifieddate) + } + ) } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/billofmaterials/BillofmaterialsRepo.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/billofmaterials/BillofmaterialsRepo.scala index 89ee58183..b7ef4bd78 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/billofmaterials/BillofmaterialsRepo.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/billofmaterials/BillofmaterialsRepo.scala @@ -30,4 +30,7 @@ trait BillofmaterialsRepo { def update: UpdateBuilder[BillofmaterialsFields, BillofmaterialsRow] def update(row: BillofmaterialsRow): ConnectionIO[Boolean] def upsert(unsaved: BillofmaterialsRow): ConnectionIO[BillofmaterialsRow] + def upsertBatch(unsaved: List[BillofmaterialsRow]): Stream[ConnectionIO, BillofmaterialsRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Stream[ConnectionIO, BillofmaterialsRow], batchSize: Int = 10000): ConnectionIO[Int] } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/billofmaterials/BillofmaterialsRepoImpl.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/billofmaterials/BillofmaterialsRepoImpl.scala index 01c65f7c3..efb879c16 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/billofmaterials/BillofmaterialsRepoImpl.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/billofmaterials/BillofmaterialsRepoImpl.scala @@ -12,6 +12,7 @@ import adventureworks.customtypes.TypoLocalDateTime import adventureworks.customtypes.TypoShort import adventureworks.production.product.ProductId import adventureworks.production.unitmeasure.UnitmeasureId +import cats.instances.list.catsStdInstancesForList import doobie.free.connection.ConnectionIO import doobie.postgres.syntax.FragmentOps import doobie.syntax.SqlInterpolator.SingleFragment.fromWrite @@ -19,6 +20,7 @@ import doobie.syntax.string.toSqlInterpolator import doobie.util.Write import doobie.util.fragment.Fragment import doobie.util.meta.Meta +import doobie.util.update.Update import fs2.Stream import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder @@ -150,4 +152,43 @@ class BillofmaterialsRepoImpl extends BillofmaterialsRepo { returning "billofmaterialsid", "productassemblyid", "componentid", "startdate"::text, "enddate"::text, "unitmeasurecode", "bomlevel", "perassemblyqty", "modifieddate"::text """.query(using BillofmaterialsRow.read).unique } + override def upsertBatch(unsaved: List[BillofmaterialsRow]): Stream[ConnectionIO, BillofmaterialsRow] = { + Update[BillofmaterialsRow]( + s"""insert into production.billofmaterials("billofmaterialsid", "productassemblyid", "componentid", "startdate", "enddate", "unitmeasurecode", "bomlevel", "perassemblyqty", "modifieddate") + values (?::int4,?::int4,?::int4,?::timestamp,?::timestamp,?::bpchar,?::int2,?::numeric,?::timestamp) + on conflict ("billofmaterialsid") + do update set + "productassemblyid" = EXCLUDED."productassemblyid", + "componentid" = EXCLUDED."componentid", + "startdate" = EXCLUDED."startdate", + "enddate" = EXCLUDED."enddate", + "unitmeasurecode" = EXCLUDED."unitmeasurecode", + "bomlevel" = EXCLUDED."bomlevel", + "perassemblyqty" = EXCLUDED."perassemblyqty", + "modifieddate" = EXCLUDED."modifieddate" + returning "billofmaterialsid", "productassemblyid", "componentid", "startdate"::text, "enddate"::text, "unitmeasurecode", "bomlevel", "perassemblyqty", "modifieddate"::text""" + )(using BillofmaterialsRow.write) + .updateManyWithGeneratedKeys[BillofmaterialsRow]("billofmaterialsid", "productassemblyid", "componentid", "startdate", "enddate", "unitmeasurecode", "bomlevel", "perassemblyqty", "modifieddate")(unsaved)(using catsStdInstancesForList, BillofmaterialsRow.read) + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, BillofmaterialsRow], batchSize: Int = 10000): ConnectionIO[Int] = { + for { + _ <- sql"create temporary table billofmaterials_TEMP (like production.billofmaterials) on commit drop".update.run + _ <- new FragmentOps(sql"""copy billofmaterials_TEMP("billofmaterialsid", "productassemblyid", "componentid", "startdate", "enddate", "unitmeasurecode", "bomlevel", "perassemblyqty", "modifieddate") from stdin""").copyIn(unsaved, batchSize)(using BillofmaterialsRow.text) + res <- sql"""insert into production.billofmaterials("billofmaterialsid", "productassemblyid", "componentid", "startdate", "enddate", "unitmeasurecode", "bomlevel", "perassemblyqty", "modifieddate") + select * from billofmaterials_TEMP + on conflict ("billofmaterialsid") + do update set + "productassemblyid" = EXCLUDED."productassemblyid", + "componentid" = EXCLUDED."componentid", + "startdate" = EXCLUDED."startdate", + "enddate" = EXCLUDED."enddate", + "unitmeasurecode" = EXCLUDED."unitmeasurecode", + "bomlevel" = EXCLUDED."bomlevel", + "perassemblyqty" = EXCLUDED."perassemblyqty", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table billofmaterials_TEMP;""".update.run + } yield res + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/billofmaterials/BillofmaterialsRepoMock.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/billofmaterials/BillofmaterialsRepoMock.scala index 5640ab96c..d79efe0da 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/billofmaterials/BillofmaterialsRepoMock.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/billofmaterials/BillofmaterialsRepoMock.scala @@ -105,4 +105,23 @@ class BillofmaterialsRepoMock(toRow: Function1[BillofmaterialsRowUnsaved, Billof unsaved } } + override def upsertBatch(unsaved: List[BillofmaterialsRow]): Stream[ConnectionIO, BillofmaterialsRow] = { + Stream.emits { + unsaved.map { row => + map += (row.billofmaterialsid -> row) + row + } + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, BillofmaterialsRow], batchSize: Int = 10000): ConnectionIO[Int] = { + unsaved.compile.toList.map { rows => + var num = 0 + rows.foreach { row => + map += (row.billofmaterialsid -> row) + num += 1 + } + num + } + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/billofmaterials/BillofmaterialsRow.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/billofmaterials/BillofmaterialsRow.scala index e8429d3ab..868216fa1 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/billofmaterials/BillofmaterialsRow.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/billofmaterials/BillofmaterialsRow.scala @@ -15,6 +15,7 @@ import adventureworks.production.unitmeasure.UnitmeasureId import doobie.enumerated.Nullability import doobie.postgres.Text import doobie.util.Read +import doobie.util.Write import doobie.util.meta.Meta import io.circe.Decoder import io.circe.Encoder @@ -108,4 +109,38 @@ object BillofmaterialsRow { sb.append(Text.DELIMETER) TypoLocalDateTime.text.unsafeEncode(row.modifieddate, sb) } + implicit lazy val write: Write[BillofmaterialsRow] = new Write[BillofmaterialsRow]( + puts = List((Meta.IntMeta.put, Nullability.NoNulls), + (ProductId.put, Nullability.Nullable), + (ProductId.put, Nullability.NoNulls), + (TypoLocalDateTime.put, Nullability.NoNulls), + (TypoLocalDateTime.put, Nullability.Nullable), + (UnitmeasureId.put, Nullability.NoNulls), + (TypoShort.put, Nullability.NoNulls), + (Meta.ScalaBigDecimalMeta.put, Nullability.NoNulls), + (TypoLocalDateTime.put, Nullability.NoNulls)), + toList = x => List(x.billofmaterialsid, x.productassemblyid, x.componentid, x.startdate, x.enddate, x.unitmeasurecode, x.bomlevel, x.perassemblyqty, x.modifieddate), + unsafeSet = (rs, i, a) => { + Meta.IntMeta.put.unsafeSetNonNullable(rs, i + 0, a.billofmaterialsid) + ProductId.put.unsafeSetNullable(rs, i + 1, a.productassemblyid) + ProductId.put.unsafeSetNonNullable(rs, i + 2, a.componentid) + TypoLocalDateTime.put.unsafeSetNonNullable(rs, i + 3, a.startdate) + TypoLocalDateTime.put.unsafeSetNullable(rs, i + 4, a.enddate) + UnitmeasureId.put.unsafeSetNonNullable(rs, i + 5, a.unitmeasurecode) + TypoShort.put.unsafeSetNonNullable(rs, i + 6, a.bomlevel) + Meta.ScalaBigDecimalMeta.put.unsafeSetNonNullable(rs, i + 7, a.perassemblyqty) + TypoLocalDateTime.put.unsafeSetNonNullable(rs, i + 8, a.modifieddate) + }, + unsafeUpdate = (ps, i, a) => { + Meta.IntMeta.put.unsafeUpdateNonNullable(ps, i + 0, a.billofmaterialsid) + ProductId.put.unsafeUpdateNullable(ps, i + 1, a.productassemblyid) + ProductId.put.unsafeUpdateNonNullable(ps, i + 2, a.componentid) + TypoLocalDateTime.put.unsafeUpdateNonNullable(ps, i + 3, a.startdate) + TypoLocalDateTime.put.unsafeUpdateNullable(ps, i + 4, a.enddate) + UnitmeasureId.put.unsafeUpdateNonNullable(ps, i + 5, a.unitmeasurecode) + TypoShort.put.unsafeUpdateNonNullable(ps, i + 6, a.bomlevel) + Meta.ScalaBigDecimalMeta.put.unsafeUpdateNonNullable(ps, i + 7, a.perassemblyqty) + TypoLocalDateTime.put.unsafeUpdateNonNullable(ps, i + 8, a.modifieddate) + } + ) } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/culture/CultureRepo.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/culture/CultureRepo.scala index 58a1d85c3..984d40b8d 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/culture/CultureRepo.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/culture/CultureRepo.scala @@ -30,4 +30,7 @@ trait CultureRepo { def update: UpdateBuilder[CultureFields, CultureRow] def update(row: CultureRow): ConnectionIO[Boolean] def upsert(unsaved: CultureRow): ConnectionIO[CultureRow] + def upsertBatch(unsaved: List[CultureRow]): Stream[ConnectionIO, CultureRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Stream[ConnectionIO, CultureRow], batchSize: Int = 10000): ConnectionIO[Int] } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/culture/CultureRepoImpl.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/culture/CultureRepoImpl.scala index a58da3339..7008fcdc1 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/culture/CultureRepoImpl.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/culture/CultureRepoImpl.scala @@ -10,12 +10,14 @@ package culture import adventureworks.customtypes.Defaulted import adventureworks.customtypes.TypoLocalDateTime import adventureworks.public.Name +import cats.instances.list.catsStdInstancesForList import doobie.free.connection.ConnectionIO import doobie.postgres.syntax.FragmentOps import doobie.syntax.SqlInterpolator.SingleFragment.fromWrite import doobie.syntax.string.toSqlInterpolator import doobie.util.Write import doobie.util.fragment.Fragment +import doobie.util.update.Update import fs2.Stream import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder @@ -114,4 +116,31 @@ class CultureRepoImpl extends CultureRepo { returning "cultureid", "name", "modifieddate"::text """.query(using CultureRow.read).unique } + override def upsertBatch(unsaved: List[CultureRow]): Stream[ConnectionIO, CultureRow] = { + Update[CultureRow]( + s"""insert into production.culture("cultureid", "name", "modifieddate") + values (?::bpchar,?::varchar,?::timestamp) + on conflict ("cultureid") + do update set + "name" = EXCLUDED."name", + "modifieddate" = EXCLUDED."modifieddate" + returning "cultureid", "name", "modifieddate"::text""" + )(using CultureRow.write) + .updateManyWithGeneratedKeys[CultureRow]("cultureid", "name", "modifieddate")(unsaved)(using catsStdInstancesForList, CultureRow.read) + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, CultureRow], batchSize: Int = 10000): ConnectionIO[Int] = { + for { + _ <- sql"create temporary table culture_TEMP (like production.culture) on commit drop".update.run + _ <- new FragmentOps(sql"""copy culture_TEMP("cultureid", "name", "modifieddate") from stdin""").copyIn(unsaved, batchSize)(using CultureRow.text) + res <- sql"""insert into production.culture("cultureid", "name", "modifieddate") + select * from culture_TEMP + on conflict ("cultureid") + do update set + "name" = EXCLUDED."name", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table culture_TEMP;""".update.run + } yield res + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/culture/CultureRepoMock.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/culture/CultureRepoMock.scala index 52aaccb3d..7d5e188ab 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/culture/CultureRepoMock.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/culture/CultureRepoMock.scala @@ -105,4 +105,23 @@ class CultureRepoMock(toRow: Function1[CultureRowUnsaved, CultureRow], unsaved } } + override def upsertBatch(unsaved: List[CultureRow]): Stream[ConnectionIO, CultureRow] = { + Stream.emits { + unsaved.map { row => + map += (row.cultureid -> row) + row + } + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, CultureRow], batchSize: Int = 10000): ConnectionIO[Int] = { + unsaved.compile.toList.map { rows => + var num = 0 + rows.foreach { row => + map += (row.cultureid -> row) + num += 1 + } + num + } + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/culture/CultureRow.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/culture/CultureRow.scala index 0849fe060..eae087398 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/culture/CultureRow.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/culture/CultureRow.scala @@ -13,6 +13,7 @@ import adventureworks.public.Name import doobie.enumerated.Nullability import doobie.postgres.Text import doobie.util.Read +import doobie.util.Write import io.circe.Decoder import io.circe.Encoder import java.sql.ResultSet @@ -55,4 +56,20 @@ object CultureRow { sb.append(Text.DELIMETER) TypoLocalDateTime.text.unsafeEncode(row.modifieddate, sb) } + implicit lazy val write: Write[CultureRow] = new Write[CultureRow]( + puts = List((CultureId.put, Nullability.NoNulls), + (Name.put, Nullability.NoNulls), + (TypoLocalDateTime.put, Nullability.NoNulls)), + toList = x => List(x.cultureid, x.name, x.modifieddate), + unsafeSet = (rs, i, a) => { + CultureId.put.unsafeSetNonNullable(rs, i + 0, a.cultureid) + Name.put.unsafeSetNonNullable(rs, i + 1, a.name) + TypoLocalDateTime.put.unsafeSetNonNullable(rs, i + 2, a.modifieddate) + }, + unsafeUpdate = (ps, i, a) => { + CultureId.put.unsafeUpdateNonNullable(ps, i + 0, a.cultureid) + Name.put.unsafeUpdateNonNullable(ps, i + 1, a.name) + TypoLocalDateTime.put.unsafeUpdateNonNullable(ps, i + 2, a.modifieddate) + } + ) } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/document/DocumentRepo.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/document/DocumentRepo.scala index 463a4d087..694a5f48d 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/document/DocumentRepo.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/document/DocumentRepo.scala @@ -32,4 +32,7 @@ trait DocumentRepo { def update: UpdateBuilder[DocumentFields, DocumentRow] def update(row: DocumentRow): ConnectionIO[Boolean] def upsert(unsaved: DocumentRow): ConnectionIO[DocumentRow] + def upsertBatch(unsaved: List[DocumentRow]): Stream[ConnectionIO, DocumentRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Stream[ConnectionIO, DocumentRow], batchSize: Int = 10000): ConnectionIO[Int] } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/document/DocumentRepoImpl.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/document/DocumentRepoImpl.scala index 447c9fef2..b25447879 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/document/DocumentRepoImpl.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/document/DocumentRepoImpl.scala @@ -14,6 +14,7 @@ import adventureworks.customtypes.TypoShort import adventureworks.customtypes.TypoUUID import adventureworks.person.businessentity.BusinessentityId import adventureworks.public.Flag +import cats.instances.list.catsStdInstancesForList import doobie.free.connection.ConnectionIO import doobie.postgres.syntax.FragmentOps import doobie.syntax.SqlInterpolator.SingleFragment.fromWrite @@ -21,6 +22,7 @@ import doobie.syntax.string.toSqlInterpolator import doobie.util.Write import doobie.util.fragment.Fragment import doobie.util.meta.Meta +import doobie.util.update.Update import fs2.Stream import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder @@ -177,4 +179,51 @@ class DocumentRepoImpl extends DocumentRepo { returning "title", "owner", "folderflag", "filename", "fileextension", "revision", "changenumber", "status", "documentsummary", "document", "rowguid", "modifieddate"::text, "documentnode" """.query(using DocumentRow.read).unique } + override def upsertBatch(unsaved: List[DocumentRow]): Stream[ConnectionIO, DocumentRow] = { + Update[DocumentRow]( + s"""insert into production.document("title", "owner", "folderflag", "filename", "fileextension", "revision", "changenumber", "status", "documentsummary", "document", "rowguid", "modifieddate", "documentnode") + values (?,?::int4,?::bool,?,?,?::bpchar,?::int4,?::int2,?,?::bytea,?::uuid,?::timestamp,?) + on conflict ("documentnode") + do update set + "title" = EXCLUDED."title", + "owner" = EXCLUDED."owner", + "folderflag" = EXCLUDED."folderflag", + "filename" = EXCLUDED."filename", + "fileextension" = EXCLUDED."fileextension", + "revision" = EXCLUDED."revision", + "changenumber" = EXCLUDED."changenumber", + "status" = EXCLUDED."status", + "documentsummary" = EXCLUDED."documentsummary", + "document" = EXCLUDED."document", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + returning "title", "owner", "folderflag", "filename", "fileextension", "revision", "changenumber", "status", "documentsummary", "document", "rowguid", "modifieddate"::text, "documentnode"""" + )(using DocumentRow.write) + .updateManyWithGeneratedKeys[DocumentRow]("title", "owner", "folderflag", "filename", "fileextension", "revision", "changenumber", "status", "documentsummary", "document", "rowguid", "modifieddate", "documentnode")(unsaved)(using catsStdInstancesForList, DocumentRow.read) + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, DocumentRow], batchSize: Int = 10000): ConnectionIO[Int] = { + for { + _ <- sql"create temporary table document_TEMP (like production.document) on commit drop".update.run + _ <- new FragmentOps(sql"""copy document_TEMP("title", "owner", "folderflag", "filename", "fileextension", "revision", "changenumber", "status", "documentsummary", "document", "rowguid", "modifieddate", "documentnode") from stdin""").copyIn(unsaved, batchSize)(using DocumentRow.text) + res <- sql"""insert into production.document("title", "owner", "folderflag", "filename", "fileextension", "revision", "changenumber", "status", "documentsummary", "document", "rowguid", "modifieddate", "documentnode") + select * from document_TEMP + on conflict ("documentnode") + do update set + "title" = EXCLUDED."title", + "owner" = EXCLUDED."owner", + "folderflag" = EXCLUDED."folderflag", + "filename" = EXCLUDED."filename", + "fileextension" = EXCLUDED."fileextension", + "revision" = EXCLUDED."revision", + "changenumber" = EXCLUDED."changenumber", + "status" = EXCLUDED."status", + "documentsummary" = EXCLUDED."documentsummary", + "document" = EXCLUDED."document", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table document_TEMP;""".update.run + } yield res + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/document/DocumentRepoMock.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/document/DocumentRepoMock.scala index 270536c08..51b5cc731 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/document/DocumentRepoMock.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/document/DocumentRepoMock.scala @@ -109,4 +109,23 @@ class DocumentRepoMock(toRow: Function1[DocumentRowUnsaved, DocumentRow], unsaved } } + override def upsertBatch(unsaved: List[DocumentRow]): Stream[ConnectionIO, DocumentRow] = { + Stream.emits { + unsaved.map { row => + map += (row.documentnode -> row) + row + } + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, DocumentRow], batchSize: Int = 10000): ConnectionIO[Int] = { + unsaved.compile.toList.map { rows => + var num = 0 + rows.foreach { row => + map += (row.documentnode -> row) + num += 1 + } + num + } + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/document/DocumentRow.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/document/DocumentRow.scala index 8f15a5fcd..99269ec78 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/document/DocumentRow.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/document/DocumentRow.scala @@ -17,6 +17,7 @@ import adventureworks.public.Flag import doobie.enumerated.Nullability import doobie.postgres.Text import doobie.util.Read +import doobie.util.Write import doobie.util.meta.Meta import io.circe.Decoder import io.circe.Encoder @@ -126,4 +127,50 @@ object DocumentRow { sb.append(Text.DELIMETER) DocumentId.text.unsafeEncode(row.documentnode, sb) } + implicit lazy val write: Write[DocumentRow] = new Write[DocumentRow]( + puts = List((Meta.StringMeta.put, Nullability.NoNulls), + (BusinessentityId.put, Nullability.NoNulls), + (Flag.put, Nullability.NoNulls), + (Meta.StringMeta.put, Nullability.NoNulls), + (Meta.StringMeta.put, Nullability.Nullable), + (Meta.StringMeta.put, Nullability.NoNulls), + (Meta.IntMeta.put, Nullability.NoNulls), + (TypoShort.put, Nullability.NoNulls), + (Meta.StringMeta.put, Nullability.Nullable), + (TypoBytea.put, Nullability.Nullable), + (TypoUUID.put, Nullability.NoNulls), + (TypoLocalDateTime.put, Nullability.NoNulls), + (DocumentId.put, Nullability.NoNulls)), + toList = x => List(x.title, x.owner, x.folderflag, x.filename, x.fileextension, x.revision, x.changenumber, x.status, x.documentsummary, x.document, x.rowguid, x.modifieddate, x.documentnode), + unsafeSet = (rs, i, a) => { + Meta.StringMeta.put.unsafeSetNonNullable(rs, i + 0, a.title) + BusinessentityId.put.unsafeSetNonNullable(rs, i + 1, a.owner) + Flag.put.unsafeSetNonNullable(rs, i + 2, a.folderflag) + Meta.StringMeta.put.unsafeSetNonNullable(rs, i + 3, a.filename) + Meta.StringMeta.put.unsafeSetNullable(rs, i + 4, a.fileextension) + Meta.StringMeta.put.unsafeSetNonNullable(rs, i + 5, a.revision) + Meta.IntMeta.put.unsafeSetNonNullable(rs, i + 6, a.changenumber) + TypoShort.put.unsafeSetNonNullable(rs, i + 7, a.status) + Meta.StringMeta.put.unsafeSetNullable(rs, i + 8, a.documentsummary) + TypoBytea.put.unsafeSetNullable(rs, i + 9, a.document) + TypoUUID.put.unsafeSetNonNullable(rs, i + 10, a.rowguid) + TypoLocalDateTime.put.unsafeSetNonNullable(rs, i + 11, a.modifieddate) + DocumentId.put.unsafeSetNonNullable(rs, i + 12, a.documentnode) + }, + unsafeUpdate = (ps, i, a) => { + Meta.StringMeta.put.unsafeUpdateNonNullable(ps, i + 0, a.title) + BusinessentityId.put.unsafeUpdateNonNullable(ps, i + 1, a.owner) + Flag.put.unsafeUpdateNonNullable(ps, i + 2, a.folderflag) + Meta.StringMeta.put.unsafeUpdateNonNullable(ps, i + 3, a.filename) + Meta.StringMeta.put.unsafeUpdateNullable(ps, i + 4, a.fileextension) + Meta.StringMeta.put.unsafeUpdateNonNullable(ps, i + 5, a.revision) + Meta.IntMeta.put.unsafeUpdateNonNullable(ps, i + 6, a.changenumber) + TypoShort.put.unsafeUpdateNonNullable(ps, i + 7, a.status) + Meta.StringMeta.put.unsafeUpdateNullable(ps, i + 8, a.documentsummary) + TypoBytea.put.unsafeUpdateNullable(ps, i + 9, a.document) + TypoUUID.put.unsafeUpdateNonNullable(ps, i + 10, a.rowguid) + TypoLocalDateTime.put.unsafeUpdateNonNullable(ps, i + 11, a.modifieddate) + DocumentId.put.unsafeUpdateNonNullable(ps, i + 12, a.documentnode) + } + ) } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/illustration/IllustrationRepo.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/illustration/IllustrationRepo.scala index 7e513c17c..790cef184 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/illustration/IllustrationRepo.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/illustration/IllustrationRepo.scala @@ -30,4 +30,7 @@ trait IllustrationRepo { def update: UpdateBuilder[IllustrationFields, IllustrationRow] def update(row: IllustrationRow): ConnectionIO[Boolean] def upsert(unsaved: IllustrationRow): ConnectionIO[IllustrationRow] + def upsertBatch(unsaved: List[IllustrationRow]): Stream[ConnectionIO, IllustrationRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Stream[ConnectionIO, IllustrationRow], batchSize: Int = 10000): ConnectionIO[Int] } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/illustration/IllustrationRepoImpl.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/illustration/IllustrationRepoImpl.scala index 488fb755b..0a508b150 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/illustration/IllustrationRepoImpl.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/illustration/IllustrationRepoImpl.scala @@ -10,12 +10,14 @@ package illustration import adventureworks.customtypes.Defaulted import adventureworks.customtypes.TypoLocalDateTime import adventureworks.customtypes.TypoXml +import cats.instances.list.catsStdInstancesForList import doobie.free.connection.ConnectionIO import doobie.postgres.syntax.FragmentOps import doobie.syntax.SqlInterpolator.SingleFragment.fromWrite import doobie.syntax.string.toSqlInterpolator import doobie.util.Write import doobie.util.fragment.Fragment +import doobie.util.update.Update import fs2.Stream import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder @@ -117,4 +119,31 @@ class IllustrationRepoImpl extends IllustrationRepo { returning "illustrationid", "diagram", "modifieddate"::text """.query(using IllustrationRow.read).unique } + override def upsertBatch(unsaved: List[IllustrationRow]): Stream[ConnectionIO, IllustrationRow] = { + Update[IllustrationRow]( + s"""insert into production.illustration("illustrationid", "diagram", "modifieddate") + values (?::int4,?::xml,?::timestamp) + on conflict ("illustrationid") + do update set + "diagram" = EXCLUDED."diagram", + "modifieddate" = EXCLUDED."modifieddate" + returning "illustrationid", "diagram", "modifieddate"::text""" + )(using IllustrationRow.write) + .updateManyWithGeneratedKeys[IllustrationRow]("illustrationid", "diagram", "modifieddate")(unsaved)(using catsStdInstancesForList, IllustrationRow.read) + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, IllustrationRow], batchSize: Int = 10000): ConnectionIO[Int] = { + for { + _ <- sql"create temporary table illustration_TEMP (like production.illustration) on commit drop".update.run + _ <- new FragmentOps(sql"""copy illustration_TEMP("illustrationid", "diagram", "modifieddate") from stdin""").copyIn(unsaved, batchSize)(using IllustrationRow.text) + res <- sql"""insert into production.illustration("illustrationid", "diagram", "modifieddate") + select * from illustration_TEMP + on conflict ("illustrationid") + do update set + "diagram" = EXCLUDED."diagram", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table illustration_TEMP;""".update.run + } yield res + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/illustration/IllustrationRepoMock.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/illustration/IllustrationRepoMock.scala index 8925569c5..635eb931b 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/illustration/IllustrationRepoMock.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/illustration/IllustrationRepoMock.scala @@ -105,4 +105,23 @@ class IllustrationRepoMock(toRow: Function1[IllustrationRowUnsaved, Illustration unsaved } } + override def upsertBatch(unsaved: List[IllustrationRow]): Stream[ConnectionIO, IllustrationRow] = { + Stream.emits { + unsaved.map { row => + map += (row.illustrationid -> row) + row + } + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, IllustrationRow], batchSize: Int = 10000): ConnectionIO[Int] = { + unsaved.compile.toList.map { rows => + var num = 0 + rows.foreach { row => + map += (row.illustrationid -> row) + num += 1 + } + num + } + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/illustration/IllustrationRow.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/illustration/IllustrationRow.scala index 258746474..90975d905 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/illustration/IllustrationRow.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/illustration/IllustrationRow.scala @@ -13,6 +13,7 @@ import adventureworks.customtypes.TypoXml import doobie.enumerated.Nullability import doobie.postgres.Text import doobie.util.Read +import doobie.util.Write import io.circe.Decoder import io.circe.Encoder import java.sql.ResultSet @@ -56,4 +57,20 @@ object IllustrationRow { sb.append(Text.DELIMETER) TypoLocalDateTime.text.unsafeEncode(row.modifieddate, sb) } + implicit lazy val write: Write[IllustrationRow] = new Write[IllustrationRow]( + puts = List((IllustrationId.put, Nullability.NoNulls), + (TypoXml.put, Nullability.Nullable), + (TypoLocalDateTime.put, Nullability.NoNulls)), + toList = x => List(x.illustrationid, x.diagram, x.modifieddate), + unsafeSet = (rs, i, a) => { + IllustrationId.put.unsafeSetNonNullable(rs, i + 0, a.illustrationid) + TypoXml.put.unsafeSetNullable(rs, i + 1, a.diagram) + TypoLocalDateTime.put.unsafeSetNonNullable(rs, i + 2, a.modifieddate) + }, + unsafeUpdate = (ps, i, a) => { + IllustrationId.put.unsafeUpdateNonNullable(ps, i + 0, a.illustrationid) + TypoXml.put.unsafeUpdateNullable(ps, i + 1, a.diagram) + TypoLocalDateTime.put.unsafeUpdateNonNullable(ps, i + 2, a.modifieddate) + } + ) } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/location/LocationRepo.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/location/LocationRepo.scala index e86826bc1..13ed69247 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/location/LocationRepo.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/location/LocationRepo.scala @@ -30,4 +30,7 @@ trait LocationRepo { def update: UpdateBuilder[LocationFields, LocationRow] def update(row: LocationRow): ConnectionIO[Boolean] def upsert(unsaved: LocationRow): ConnectionIO[LocationRow] + def upsertBatch(unsaved: List[LocationRow]): Stream[ConnectionIO, LocationRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Stream[ConnectionIO, LocationRow], batchSize: Int = 10000): ConnectionIO[Int] } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/location/LocationRepoImpl.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/location/LocationRepoImpl.scala index c1d8f5d8f..e48b20a1f 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/location/LocationRepoImpl.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/location/LocationRepoImpl.scala @@ -10,6 +10,7 @@ package location import adventureworks.customtypes.Defaulted import adventureworks.customtypes.TypoLocalDateTime import adventureworks.public.Name +import cats.instances.list.catsStdInstancesForList import doobie.free.connection.ConnectionIO import doobie.postgres.syntax.FragmentOps import doobie.syntax.SqlInterpolator.SingleFragment.fromWrite @@ -17,6 +18,7 @@ import doobie.syntax.string.toSqlInterpolator import doobie.util.Write import doobie.util.fragment.Fragment import doobie.util.meta.Meta +import doobie.util.update.Update import fs2.Stream import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder @@ -132,4 +134,35 @@ class LocationRepoImpl extends LocationRepo { returning "locationid", "name", "costrate", "availability", "modifieddate"::text """.query(using LocationRow.read).unique } + override def upsertBatch(unsaved: List[LocationRow]): Stream[ConnectionIO, LocationRow] = { + Update[LocationRow]( + s"""insert into production.location("locationid", "name", "costrate", "availability", "modifieddate") + values (?::int4,?::varchar,?::numeric,?::numeric,?::timestamp) + on conflict ("locationid") + do update set + "name" = EXCLUDED."name", + "costrate" = EXCLUDED."costrate", + "availability" = EXCLUDED."availability", + "modifieddate" = EXCLUDED."modifieddate" + returning "locationid", "name", "costrate", "availability", "modifieddate"::text""" + )(using LocationRow.write) + .updateManyWithGeneratedKeys[LocationRow]("locationid", "name", "costrate", "availability", "modifieddate")(unsaved)(using catsStdInstancesForList, LocationRow.read) + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, LocationRow], batchSize: Int = 10000): ConnectionIO[Int] = { + for { + _ <- sql"create temporary table location_TEMP (like production.location) on commit drop".update.run + _ <- new FragmentOps(sql"""copy location_TEMP("locationid", "name", "costrate", "availability", "modifieddate") from stdin""").copyIn(unsaved, batchSize)(using LocationRow.text) + res <- sql"""insert into production.location("locationid", "name", "costrate", "availability", "modifieddate") + select * from location_TEMP + on conflict ("locationid") + do update set + "name" = EXCLUDED."name", + "costrate" = EXCLUDED."costrate", + "availability" = EXCLUDED."availability", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table location_TEMP;""".update.run + } yield res + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/location/LocationRepoMock.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/location/LocationRepoMock.scala index 7337b0d6e..7042d1e66 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/location/LocationRepoMock.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/location/LocationRepoMock.scala @@ -105,4 +105,23 @@ class LocationRepoMock(toRow: Function1[LocationRowUnsaved, LocationRow], unsaved } } + override def upsertBatch(unsaved: List[LocationRow]): Stream[ConnectionIO, LocationRow] = { + Stream.emits { + unsaved.map { row => + map += (row.locationid -> row) + row + } + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, LocationRow], batchSize: Int = 10000): ConnectionIO[Int] = { + unsaved.compile.toList.map { rows => + var num = 0 + rows.foreach { row => + map += (row.locationid -> row) + num += 1 + } + num + } + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/location/LocationRow.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/location/LocationRow.scala index efc3273e1..63fc662d8 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/location/LocationRow.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/location/LocationRow.scala @@ -13,6 +13,7 @@ import adventureworks.public.Name import doobie.enumerated.Nullability import doobie.postgres.Text import doobie.util.Read +import doobie.util.Write import doobie.util.meta.Meta import io.circe.Decoder import io.circe.Encoder @@ -73,4 +74,26 @@ object LocationRow { sb.append(Text.DELIMETER) TypoLocalDateTime.text.unsafeEncode(row.modifieddate, sb) } + implicit lazy val write: Write[LocationRow] = new Write[LocationRow]( + puts = List((LocationId.put, Nullability.NoNulls), + (Name.put, Nullability.NoNulls), + (Meta.ScalaBigDecimalMeta.put, Nullability.NoNulls), + (Meta.ScalaBigDecimalMeta.put, Nullability.NoNulls), + (TypoLocalDateTime.put, Nullability.NoNulls)), + toList = x => List(x.locationid, x.name, x.costrate, x.availability, x.modifieddate), + unsafeSet = (rs, i, a) => { + LocationId.put.unsafeSetNonNullable(rs, i + 0, a.locationid) + Name.put.unsafeSetNonNullable(rs, i + 1, a.name) + Meta.ScalaBigDecimalMeta.put.unsafeSetNonNullable(rs, i + 2, a.costrate) + Meta.ScalaBigDecimalMeta.put.unsafeSetNonNullable(rs, i + 3, a.availability) + TypoLocalDateTime.put.unsafeSetNonNullable(rs, i + 4, a.modifieddate) + }, + unsafeUpdate = (ps, i, a) => { + LocationId.put.unsafeUpdateNonNullable(ps, i + 0, a.locationid) + Name.put.unsafeUpdateNonNullable(ps, i + 1, a.name) + Meta.ScalaBigDecimalMeta.put.unsafeUpdateNonNullable(ps, i + 2, a.costrate) + Meta.ScalaBigDecimalMeta.put.unsafeUpdateNonNullable(ps, i + 3, a.availability) + TypoLocalDateTime.put.unsafeUpdateNonNullable(ps, i + 4, a.modifieddate) + } + ) } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/product/ProductRepo.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/product/ProductRepo.scala index af68a8720..e97d9992a 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/product/ProductRepo.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/product/ProductRepo.scala @@ -30,4 +30,7 @@ trait ProductRepo { def update: UpdateBuilder[ProductFields, ProductRow] def update(row: ProductRow): ConnectionIO[Boolean] def upsert(unsaved: ProductRow): ConnectionIO[ProductRow] + def upsertBatch(unsaved: List[ProductRow]): Stream[ConnectionIO, ProductRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Stream[ConnectionIO, ProductRow], batchSize: Int = 10000): ConnectionIO[Int] } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/product/ProductRepoImpl.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/product/ProductRepoImpl.scala index 7e0ad5f73..594c57821 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/product/ProductRepoImpl.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/product/ProductRepoImpl.scala @@ -16,6 +16,7 @@ import adventureworks.production.productsubcategory.ProductsubcategoryId import adventureworks.production.unitmeasure.UnitmeasureId import adventureworks.public.Flag import adventureworks.public.Name +import cats.instances.list.catsStdInstancesForList import doobie.free.connection.ConnectionIO import doobie.postgres.syntax.FragmentOps import doobie.syntax.SqlInterpolator.SingleFragment.fromWrite @@ -23,6 +24,7 @@ import doobie.syntax.string.toSqlInterpolator import doobie.util.Write import doobie.util.fragment.Fragment import doobie.util.meta.Meta +import doobie.util.update.Update import fs2.Stream import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder @@ -221,4 +223,75 @@ class ProductRepoImpl extends ProductRepo { returning "productid", "name", "productnumber", "makeflag", "finishedgoodsflag", "color", "safetystocklevel", "reorderpoint", "standardcost", "listprice", "size", "sizeunitmeasurecode", "weightunitmeasurecode", "weight", "daystomanufacture", "productline", "class", "style", "productsubcategoryid", "productmodelid", "sellstartdate"::text, "sellenddate"::text, "discontinueddate"::text, "rowguid", "modifieddate"::text """.query(using ProductRow.read).unique } + override def upsertBatch(unsaved: List[ProductRow]): Stream[ConnectionIO, ProductRow] = { + Update[ProductRow]( + s"""insert into production.product("productid", "name", "productnumber", "makeflag", "finishedgoodsflag", "color", "safetystocklevel", "reorderpoint", "standardcost", "listprice", "size", "sizeunitmeasurecode", "weightunitmeasurecode", "weight", "daystomanufacture", "productline", "class", "style", "productsubcategoryid", "productmodelid", "sellstartdate", "sellenddate", "discontinueddate", "rowguid", "modifieddate") + values (?::int4,?::varchar,?,?::bool,?::bool,?,?::int2,?::int2,?::numeric,?::numeric,?,?::bpchar,?::bpchar,?::numeric,?::int4,?::bpchar,?::bpchar,?::bpchar,?::int4,?::int4,?::timestamp,?::timestamp,?::timestamp,?::uuid,?::timestamp) + on conflict ("productid") + do update set + "name" = EXCLUDED."name", + "productnumber" = EXCLUDED."productnumber", + "makeflag" = EXCLUDED."makeflag", + "finishedgoodsflag" = EXCLUDED."finishedgoodsflag", + "color" = EXCLUDED."color", + "safetystocklevel" = EXCLUDED."safetystocklevel", + "reorderpoint" = EXCLUDED."reorderpoint", + "standardcost" = EXCLUDED."standardcost", + "listprice" = EXCLUDED."listprice", + "size" = EXCLUDED."size", + "sizeunitmeasurecode" = EXCLUDED."sizeunitmeasurecode", + "weightunitmeasurecode" = EXCLUDED."weightunitmeasurecode", + "weight" = EXCLUDED."weight", + "daystomanufacture" = EXCLUDED."daystomanufacture", + "productline" = EXCLUDED."productline", + "class" = EXCLUDED."class", + "style" = EXCLUDED."style", + "productsubcategoryid" = EXCLUDED."productsubcategoryid", + "productmodelid" = EXCLUDED."productmodelid", + "sellstartdate" = EXCLUDED."sellstartdate", + "sellenddate" = EXCLUDED."sellenddate", + "discontinueddate" = EXCLUDED."discontinueddate", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + returning "productid", "name", "productnumber", "makeflag", "finishedgoodsflag", "color", "safetystocklevel", "reorderpoint", "standardcost", "listprice", "size", "sizeunitmeasurecode", "weightunitmeasurecode", "weight", "daystomanufacture", "productline", "class", "style", "productsubcategoryid", "productmodelid", "sellstartdate"::text, "sellenddate"::text, "discontinueddate"::text, "rowguid", "modifieddate"::text""" + )(using ProductRow.write) + .updateManyWithGeneratedKeys[ProductRow]("productid", "name", "productnumber", "makeflag", "finishedgoodsflag", "color", "safetystocklevel", "reorderpoint", "standardcost", "listprice", "size", "sizeunitmeasurecode", "weightunitmeasurecode", "weight", "daystomanufacture", "productline", "class", "style", "productsubcategoryid", "productmodelid", "sellstartdate", "sellenddate", "discontinueddate", "rowguid", "modifieddate")(unsaved)(using catsStdInstancesForList, ProductRow.read) + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, ProductRow], batchSize: Int = 10000): ConnectionIO[Int] = { + for { + _ <- sql"create temporary table product_TEMP (like production.product) on commit drop".update.run + _ <- new FragmentOps(sql"""copy product_TEMP("productid", "name", "productnumber", "makeflag", "finishedgoodsflag", "color", "safetystocklevel", "reorderpoint", "standardcost", "listprice", "size", "sizeunitmeasurecode", "weightunitmeasurecode", "weight", "daystomanufacture", "productline", "class", "style", "productsubcategoryid", "productmodelid", "sellstartdate", "sellenddate", "discontinueddate", "rowguid", "modifieddate") from stdin""").copyIn(unsaved, batchSize)(using ProductRow.text) + res <- sql"""insert into production.product("productid", "name", "productnumber", "makeflag", "finishedgoodsflag", "color", "safetystocklevel", "reorderpoint", "standardcost", "listprice", "size", "sizeunitmeasurecode", "weightunitmeasurecode", "weight", "daystomanufacture", "productline", "class", "style", "productsubcategoryid", "productmodelid", "sellstartdate", "sellenddate", "discontinueddate", "rowguid", "modifieddate") + select * from product_TEMP + on conflict ("productid") + do update set + "name" = EXCLUDED."name", + "productnumber" = EXCLUDED."productnumber", + "makeflag" = EXCLUDED."makeflag", + "finishedgoodsflag" = EXCLUDED."finishedgoodsflag", + "color" = EXCLUDED."color", + "safetystocklevel" = EXCLUDED."safetystocklevel", + "reorderpoint" = EXCLUDED."reorderpoint", + "standardcost" = EXCLUDED."standardcost", + "listprice" = EXCLUDED."listprice", + "size" = EXCLUDED."size", + "sizeunitmeasurecode" = EXCLUDED."sizeunitmeasurecode", + "weightunitmeasurecode" = EXCLUDED."weightunitmeasurecode", + "weight" = EXCLUDED."weight", + "daystomanufacture" = EXCLUDED."daystomanufacture", + "productline" = EXCLUDED."productline", + "class" = EXCLUDED."class", + "style" = EXCLUDED."style", + "productsubcategoryid" = EXCLUDED."productsubcategoryid", + "productmodelid" = EXCLUDED."productmodelid", + "sellstartdate" = EXCLUDED."sellstartdate", + "sellenddate" = EXCLUDED."sellenddate", + "discontinueddate" = EXCLUDED."discontinueddate", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table product_TEMP;""".update.run + } yield res + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/product/ProductRepoMock.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/product/ProductRepoMock.scala index e60b5715b..92c26703b 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/product/ProductRepoMock.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/product/ProductRepoMock.scala @@ -105,4 +105,23 @@ class ProductRepoMock(toRow: Function1[ProductRowUnsaved, ProductRow], unsaved } } + override def upsertBatch(unsaved: List[ProductRow]): Stream[ConnectionIO, ProductRow] = { + Stream.emits { + unsaved.map { row => + map += (row.productid -> row) + row + } + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, ProductRow], batchSize: Int = 10000): ConnectionIO[Int] = { + unsaved.compile.toList.map { rows => + var num = 0 + rows.foreach { row => + map += (row.productid -> row) + num += 1 + } + num + } + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/product/ProductRow.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/product/ProductRow.scala index 7ab660d0e..bda24fd71 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/product/ProductRow.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/product/ProductRow.scala @@ -19,6 +19,7 @@ import adventureworks.public.Name import doobie.enumerated.Nullability import doobie.postgres.Text import doobie.util.Read +import doobie.util.Write import doobie.util.meta.Meta import io.circe.Decoder import io.circe.DecodingFailure @@ -278,4 +279,86 @@ object ProductRow { sb.append(Text.DELIMETER) TypoLocalDateTime.text.unsafeEncode(row.modifieddate, sb) } + implicit lazy val write: Write[ProductRow] = new Write[ProductRow]( + puts = List((ProductId.put, Nullability.NoNulls), + (Name.put, Nullability.NoNulls), + (Meta.StringMeta.put, Nullability.NoNulls), + (Flag.put, Nullability.NoNulls), + (Flag.put, Nullability.NoNulls), + (Meta.StringMeta.put, Nullability.Nullable), + (TypoShort.put, Nullability.NoNulls), + (TypoShort.put, Nullability.NoNulls), + (Meta.ScalaBigDecimalMeta.put, Nullability.NoNulls), + (Meta.ScalaBigDecimalMeta.put, Nullability.NoNulls), + (Meta.StringMeta.put, Nullability.Nullable), + (UnitmeasureId.put, Nullability.Nullable), + (UnitmeasureId.put, Nullability.Nullable), + (Meta.ScalaBigDecimalMeta.put, Nullability.Nullable), + (Meta.IntMeta.put, Nullability.NoNulls), + (Meta.StringMeta.put, Nullability.Nullable), + (Meta.StringMeta.put, Nullability.Nullable), + (Meta.StringMeta.put, Nullability.Nullable), + (ProductsubcategoryId.put, Nullability.Nullable), + (ProductmodelId.put, Nullability.Nullable), + (TypoLocalDateTime.put, Nullability.NoNulls), + (TypoLocalDateTime.put, Nullability.Nullable), + (TypoLocalDateTime.put, Nullability.Nullable), + (TypoUUID.put, Nullability.NoNulls), + (TypoLocalDateTime.put, Nullability.NoNulls)), + toList = x => List(x.productid, x.name, x.productnumber, x.makeflag, x.finishedgoodsflag, x.color, x.safetystocklevel, x.reorderpoint, x.standardcost, x.listprice, x.size, x.sizeunitmeasurecode, x.weightunitmeasurecode, x.weight, x.daystomanufacture, x.productline, x.`class`, x.style, x.productsubcategoryid, x.productmodelid, x.sellstartdate, x.sellenddate, x.discontinueddate, x.rowguid, x.modifieddate), + unsafeSet = (rs, i, a) => { + ProductId.put.unsafeSetNonNullable(rs, i + 0, a.productid) + Name.put.unsafeSetNonNullable(rs, i + 1, a.name) + Meta.StringMeta.put.unsafeSetNonNullable(rs, i + 2, a.productnumber) + Flag.put.unsafeSetNonNullable(rs, i + 3, a.makeflag) + Flag.put.unsafeSetNonNullable(rs, i + 4, a.finishedgoodsflag) + Meta.StringMeta.put.unsafeSetNullable(rs, i + 5, a.color) + TypoShort.put.unsafeSetNonNullable(rs, i + 6, a.safetystocklevel) + TypoShort.put.unsafeSetNonNullable(rs, i + 7, a.reorderpoint) + Meta.ScalaBigDecimalMeta.put.unsafeSetNonNullable(rs, i + 8, a.standardcost) + Meta.ScalaBigDecimalMeta.put.unsafeSetNonNullable(rs, i + 9, a.listprice) + Meta.StringMeta.put.unsafeSetNullable(rs, i + 10, a.size) + UnitmeasureId.put.unsafeSetNullable(rs, i + 11, a.sizeunitmeasurecode) + UnitmeasureId.put.unsafeSetNullable(rs, i + 12, a.weightunitmeasurecode) + Meta.ScalaBigDecimalMeta.put.unsafeSetNullable(rs, i + 13, a.weight) + Meta.IntMeta.put.unsafeSetNonNullable(rs, i + 14, a.daystomanufacture) + Meta.StringMeta.put.unsafeSetNullable(rs, i + 15, a.productline) + Meta.StringMeta.put.unsafeSetNullable(rs, i + 16, a.`class`) + Meta.StringMeta.put.unsafeSetNullable(rs, i + 17, a.style) + ProductsubcategoryId.put.unsafeSetNullable(rs, i + 18, a.productsubcategoryid) + ProductmodelId.put.unsafeSetNullable(rs, i + 19, a.productmodelid) + TypoLocalDateTime.put.unsafeSetNonNullable(rs, i + 20, a.sellstartdate) + TypoLocalDateTime.put.unsafeSetNullable(rs, i + 21, a.sellenddate) + TypoLocalDateTime.put.unsafeSetNullable(rs, i + 22, a.discontinueddate) + TypoUUID.put.unsafeSetNonNullable(rs, i + 23, a.rowguid) + TypoLocalDateTime.put.unsafeSetNonNullable(rs, i + 24, a.modifieddate) + }, + unsafeUpdate = (ps, i, a) => { + ProductId.put.unsafeUpdateNonNullable(ps, i + 0, a.productid) + Name.put.unsafeUpdateNonNullable(ps, i + 1, a.name) + Meta.StringMeta.put.unsafeUpdateNonNullable(ps, i + 2, a.productnumber) + Flag.put.unsafeUpdateNonNullable(ps, i + 3, a.makeflag) + Flag.put.unsafeUpdateNonNullable(ps, i + 4, a.finishedgoodsflag) + Meta.StringMeta.put.unsafeUpdateNullable(ps, i + 5, a.color) + TypoShort.put.unsafeUpdateNonNullable(ps, i + 6, a.safetystocklevel) + TypoShort.put.unsafeUpdateNonNullable(ps, i + 7, a.reorderpoint) + Meta.ScalaBigDecimalMeta.put.unsafeUpdateNonNullable(ps, i + 8, a.standardcost) + Meta.ScalaBigDecimalMeta.put.unsafeUpdateNonNullable(ps, i + 9, a.listprice) + Meta.StringMeta.put.unsafeUpdateNullable(ps, i + 10, a.size) + UnitmeasureId.put.unsafeUpdateNullable(ps, i + 11, a.sizeunitmeasurecode) + UnitmeasureId.put.unsafeUpdateNullable(ps, i + 12, a.weightunitmeasurecode) + Meta.ScalaBigDecimalMeta.put.unsafeUpdateNullable(ps, i + 13, a.weight) + Meta.IntMeta.put.unsafeUpdateNonNullable(ps, i + 14, a.daystomanufacture) + Meta.StringMeta.put.unsafeUpdateNullable(ps, i + 15, a.productline) + Meta.StringMeta.put.unsafeUpdateNullable(ps, i + 16, a.`class`) + Meta.StringMeta.put.unsafeUpdateNullable(ps, i + 17, a.style) + ProductsubcategoryId.put.unsafeUpdateNullable(ps, i + 18, a.productsubcategoryid) + ProductmodelId.put.unsafeUpdateNullable(ps, i + 19, a.productmodelid) + TypoLocalDateTime.put.unsafeUpdateNonNullable(ps, i + 20, a.sellstartdate) + TypoLocalDateTime.put.unsafeUpdateNullable(ps, i + 21, a.sellenddate) + TypoLocalDateTime.put.unsafeUpdateNullable(ps, i + 22, a.discontinueddate) + TypoUUID.put.unsafeUpdateNonNullable(ps, i + 23, a.rowguid) + TypoLocalDateTime.put.unsafeUpdateNonNullable(ps, i + 24, a.modifieddate) + } + ) } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productcategory/ProductcategoryRepo.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productcategory/ProductcategoryRepo.scala index 87559d305..8410b4a8a 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productcategory/ProductcategoryRepo.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productcategory/ProductcategoryRepo.scala @@ -30,4 +30,7 @@ trait ProductcategoryRepo { def update: UpdateBuilder[ProductcategoryFields, ProductcategoryRow] def update(row: ProductcategoryRow): ConnectionIO[Boolean] def upsert(unsaved: ProductcategoryRow): ConnectionIO[ProductcategoryRow] + def upsertBatch(unsaved: List[ProductcategoryRow]): Stream[ConnectionIO, ProductcategoryRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Stream[ConnectionIO, ProductcategoryRow], batchSize: Int = 10000): ConnectionIO[Int] } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productcategory/ProductcategoryRepoImpl.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productcategory/ProductcategoryRepoImpl.scala index 39545534f..17564c3fa 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productcategory/ProductcategoryRepoImpl.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productcategory/ProductcategoryRepoImpl.scala @@ -11,12 +11,14 @@ import adventureworks.customtypes.Defaulted import adventureworks.customtypes.TypoLocalDateTime import adventureworks.customtypes.TypoUUID import adventureworks.public.Name +import cats.instances.list.catsStdInstancesForList import doobie.free.connection.ConnectionIO import doobie.postgres.syntax.FragmentOps import doobie.syntax.SqlInterpolator.SingleFragment.fromWrite import doobie.syntax.string.toSqlInterpolator import doobie.util.Write import doobie.util.fragment.Fragment +import doobie.util.update.Update import fs2.Stream import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder @@ -125,4 +127,33 @@ class ProductcategoryRepoImpl extends ProductcategoryRepo { returning "productcategoryid", "name", "rowguid", "modifieddate"::text """.query(using ProductcategoryRow.read).unique } + override def upsertBatch(unsaved: List[ProductcategoryRow]): Stream[ConnectionIO, ProductcategoryRow] = { + Update[ProductcategoryRow]( + s"""insert into production.productcategory("productcategoryid", "name", "rowguid", "modifieddate") + values (?::int4,?::varchar,?::uuid,?::timestamp) + on conflict ("productcategoryid") + do update set + "name" = EXCLUDED."name", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + returning "productcategoryid", "name", "rowguid", "modifieddate"::text""" + )(using ProductcategoryRow.write) + .updateManyWithGeneratedKeys[ProductcategoryRow]("productcategoryid", "name", "rowguid", "modifieddate")(unsaved)(using catsStdInstancesForList, ProductcategoryRow.read) + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, ProductcategoryRow], batchSize: Int = 10000): ConnectionIO[Int] = { + for { + _ <- sql"create temporary table productcategory_TEMP (like production.productcategory) on commit drop".update.run + _ <- new FragmentOps(sql"""copy productcategory_TEMP("productcategoryid", "name", "rowguid", "modifieddate") from stdin""").copyIn(unsaved, batchSize)(using ProductcategoryRow.text) + res <- sql"""insert into production.productcategory("productcategoryid", "name", "rowguid", "modifieddate") + select * from productcategory_TEMP + on conflict ("productcategoryid") + do update set + "name" = EXCLUDED."name", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table productcategory_TEMP;""".update.run + } yield res + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productcategory/ProductcategoryRepoMock.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productcategory/ProductcategoryRepoMock.scala index ff0c57640..22d1d9d1f 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productcategory/ProductcategoryRepoMock.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productcategory/ProductcategoryRepoMock.scala @@ -105,4 +105,23 @@ class ProductcategoryRepoMock(toRow: Function1[ProductcategoryRowUnsaved, Produc unsaved } } + override def upsertBatch(unsaved: List[ProductcategoryRow]): Stream[ConnectionIO, ProductcategoryRow] = { + Stream.emits { + unsaved.map { row => + map += (row.productcategoryid -> row) + row + } + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, ProductcategoryRow], batchSize: Int = 10000): ConnectionIO[Int] = { + unsaved.compile.toList.map { rows => + var num = 0 + rows.foreach { row => + map += (row.productcategoryid -> row) + num += 1 + } + num + } + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productcategory/ProductcategoryRow.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productcategory/ProductcategoryRow.scala index 1b79d0993..1d76e7087 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productcategory/ProductcategoryRow.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productcategory/ProductcategoryRow.scala @@ -14,6 +14,7 @@ import adventureworks.public.Name import doobie.enumerated.Nullability import doobie.postgres.Text import doobie.util.Read +import doobie.util.Write import io.circe.Decoder import io.circe.Encoder import java.sql.ResultSet @@ -63,4 +64,23 @@ object ProductcategoryRow { sb.append(Text.DELIMETER) TypoLocalDateTime.text.unsafeEncode(row.modifieddate, sb) } + implicit lazy val write: Write[ProductcategoryRow] = new Write[ProductcategoryRow]( + puts = List((ProductcategoryId.put, Nullability.NoNulls), + (Name.put, Nullability.NoNulls), + (TypoUUID.put, Nullability.NoNulls), + (TypoLocalDateTime.put, Nullability.NoNulls)), + toList = x => List(x.productcategoryid, x.name, x.rowguid, x.modifieddate), + unsafeSet = (rs, i, a) => { + ProductcategoryId.put.unsafeSetNonNullable(rs, i + 0, a.productcategoryid) + Name.put.unsafeSetNonNullable(rs, i + 1, a.name) + TypoUUID.put.unsafeSetNonNullable(rs, i + 2, a.rowguid) + TypoLocalDateTime.put.unsafeSetNonNullable(rs, i + 3, a.modifieddate) + }, + unsafeUpdate = (ps, i, a) => { + ProductcategoryId.put.unsafeUpdateNonNullable(ps, i + 0, a.productcategoryid) + Name.put.unsafeUpdateNonNullable(ps, i + 1, a.name) + TypoUUID.put.unsafeUpdateNonNullable(ps, i + 2, a.rowguid) + TypoLocalDateTime.put.unsafeUpdateNonNullable(ps, i + 3, a.modifieddate) + } + ) } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productcosthistory/ProductcosthistoryRepo.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productcosthistory/ProductcosthistoryRepo.scala index 151ea57fd..e3330ab80 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productcosthistory/ProductcosthistoryRepo.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productcosthistory/ProductcosthistoryRepo.scala @@ -30,4 +30,7 @@ trait ProductcosthistoryRepo { def update: UpdateBuilder[ProductcosthistoryFields, ProductcosthistoryRow] def update(row: ProductcosthistoryRow): ConnectionIO[Boolean] def upsert(unsaved: ProductcosthistoryRow): ConnectionIO[ProductcosthistoryRow] + def upsertBatch(unsaved: List[ProductcosthistoryRow]): Stream[ConnectionIO, ProductcosthistoryRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Stream[ConnectionIO, ProductcosthistoryRow], batchSize: Int = 10000): ConnectionIO[Int] } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productcosthistory/ProductcosthistoryRepoImpl.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productcosthistory/ProductcosthistoryRepoImpl.scala index 1f4673f64..4b07a0ebd 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productcosthistory/ProductcosthistoryRepoImpl.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productcosthistory/ProductcosthistoryRepoImpl.scala @@ -10,6 +10,7 @@ package productcosthistory import adventureworks.customtypes.Defaulted import adventureworks.customtypes.TypoLocalDateTime import adventureworks.production.product.ProductId +import cats.instances.list.catsStdInstancesForList import doobie.free.connection.ConnectionIO import doobie.postgres.syntax.FragmentOps import doobie.syntax.SqlInterpolator.SingleFragment.fromWrite @@ -17,6 +18,7 @@ import doobie.syntax.string.toSqlInterpolator import doobie.util.Write import doobie.util.fragment.Fragment import doobie.util.meta.Meta +import doobie.util.update.Update import fs2.Stream import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder @@ -135,4 +137,33 @@ class ProductcosthistoryRepoImpl extends ProductcosthistoryRepo { returning "productid", "startdate"::text, "enddate"::text, "standardcost", "modifieddate"::text """.query(using ProductcosthistoryRow.read).unique } + override def upsertBatch(unsaved: List[ProductcosthistoryRow]): Stream[ConnectionIO, ProductcosthistoryRow] = { + Update[ProductcosthistoryRow]( + s"""insert into production.productcosthistory("productid", "startdate", "enddate", "standardcost", "modifieddate") + values (?::int4,?::timestamp,?::timestamp,?::numeric,?::timestamp) + on conflict ("productid", "startdate") + do update set + "enddate" = EXCLUDED."enddate", + "standardcost" = EXCLUDED."standardcost", + "modifieddate" = EXCLUDED."modifieddate" + returning "productid", "startdate"::text, "enddate"::text, "standardcost", "modifieddate"::text""" + )(using ProductcosthistoryRow.write) + .updateManyWithGeneratedKeys[ProductcosthistoryRow]("productid", "startdate", "enddate", "standardcost", "modifieddate")(unsaved)(using catsStdInstancesForList, ProductcosthistoryRow.read) + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, ProductcosthistoryRow], batchSize: Int = 10000): ConnectionIO[Int] = { + for { + _ <- sql"create temporary table productcosthistory_TEMP (like production.productcosthistory) on commit drop".update.run + _ <- new FragmentOps(sql"""copy productcosthistory_TEMP("productid", "startdate", "enddate", "standardcost", "modifieddate") from stdin""").copyIn(unsaved, batchSize)(using ProductcosthistoryRow.text) + res <- sql"""insert into production.productcosthistory("productid", "startdate", "enddate", "standardcost", "modifieddate") + select * from productcosthistory_TEMP + on conflict ("productid", "startdate") + do update set + "enddate" = EXCLUDED."enddate", + "standardcost" = EXCLUDED."standardcost", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table productcosthistory_TEMP;""".update.run + } yield res + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productcosthistory/ProductcosthistoryRepoMock.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productcosthistory/ProductcosthistoryRepoMock.scala index c92ee66eb..8cc64e2b1 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productcosthistory/ProductcosthistoryRepoMock.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productcosthistory/ProductcosthistoryRepoMock.scala @@ -105,4 +105,23 @@ class ProductcosthistoryRepoMock(toRow: Function1[ProductcosthistoryRowUnsaved, unsaved } } + override def upsertBatch(unsaved: List[ProductcosthistoryRow]): Stream[ConnectionIO, ProductcosthistoryRow] = { + Stream.emits { + unsaved.map { row => + map += (row.compositeId -> row) + row + } + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, ProductcosthistoryRow], batchSize: Int = 10000): ConnectionIO[Int] = { + unsaved.compile.toList.map { rows => + var num = 0 + rows.foreach { row => + map += (row.compositeId -> row) + num += 1 + } + num + } + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productcosthistory/ProductcosthistoryRow.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productcosthistory/ProductcosthistoryRow.scala index 92bfc4ff4..77159deb0 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productcosthistory/ProductcosthistoryRow.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productcosthistory/ProductcosthistoryRow.scala @@ -13,6 +13,7 @@ import adventureworks.production.product.ProductId import doobie.enumerated.Nullability import doobie.postgres.Text import doobie.util.Read +import doobie.util.Write import doobie.util.meta.Meta import io.circe.Decoder import io.circe.Encoder @@ -75,4 +76,26 @@ object ProductcosthistoryRow { sb.append(Text.DELIMETER) TypoLocalDateTime.text.unsafeEncode(row.modifieddate, sb) } + implicit lazy val write: Write[ProductcosthistoryRow] = new Write[ProductcosthistoryRow]( + puts = List((ProductId.put, Nullability.NoNulls), + (TypoLocalDateTime.put, Nullability.NoNulls), + (TypoLocalDateTime.put, Nullability.Nullable), + (Meta.ScalaBigDecimalMeta.put, Nullability.NoNulls), + (TypoLocalDateTime.put, Nullability.NoNulls)), + toList = x => List(x.productid, x.startdate, x.enddate, x.standardcost, x.modifieddate), + unsafeSet = (rs, i, a) => { + ProductId.put.unsafeSetNonNullable(rs, i + 0, a.productid) + TypoLocalDateTime.put.unsafeSetNonNullable(rs, i + 1, a.startdate) + TypoLocalDateTime.put.unsafeSetNullable(rs, i + 2, a.enddate) + Meta.ScalaBigDecimalMeta.put.unsafeSetNonNullable(rs, i + 3, a.standardcost) + TypoLocalDateTime.put.unsafeSetNonNullable(rs, i + 4, a.modifieddate) + }, + unsafeUpdate = (ps, i, a) => { + ProductId.put.unsafeUpdateNonNullable(ps, i + 0, a.productid) + TypoLocalDateTime.put.unsafeUpdateNonNullable(ps, i + 1, a.startdate) + TypoLocalDateTime.put.unsafeUpdateNullable(ps, i + 2, a.enddate) + Meta.ScalaBigDecimalMeta.put.unsafeUpdateNonNullable(ps, i + 3, a.standardcost) + TypoLocalDateTime.put.unsafeUpdateNonNullable(ps, i + 4, a.modifieddate) + } + ) } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productdescription/ProductdescriptionRepo.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productdescription/ProductdescriptionRepo.scala index 9447f002c..8c552ec58 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productdescription/ProductdescriptionRepo.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productdescription/ProductdescriptionRepo.scala @@ -30,4 +30,7 @@ trait ProductdescriptionRepo { def update: UpdateBuilder[ProductdescriptionFields, ProductdescriptionRow] def update(row: ProductdescriptionRow): ConnectionIO[Boolean] def upsert(unsaved: ProductdescriptionRow): ConnectionIO[ProductdescriptionRow] + def upsertBatch(unsaved: List[ProductdescriptionRow]): Stream[ConnectionIO, ProductdescriptionRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Stream[ConnectionIO, ProductdescriptionRow], batchSize: Int = 10000): ConnectionIO[Int] } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productdescription/ProductdescriptionRepoImpl.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productdescription/ProductdescriptionRepoImpl.scala index f5e5f931c..a7bf4a8cf 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productdescription/ProductdescriptionRepoImpl.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productdescription/ProductdescriptionRepoImpl.scala @@ -10,6 +10,7 @@ package productdescription import adventureworks.customtypes.Defaulted import adventureworks.customtypes.TypoLocalDateTime import adventureworks.customtypes.TypoUUID +import cats.instances.list.catsStdInstancesForList import doobie.free.connection.ConnectionIO import doobie.postgres.syntax.FragmentOps import doobie.syntax.SqlInterpolator.SingleFragment.fromWrite @@ -17,6 +18,7 @@ import doobie.syntax.string.toSqlInterpolator import doobie.util.Write import doobie.util.fragment.Fragment import doobie.util.meta.Meta +import doobie.util.update.Update import fs2.Stream import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder @@ -125,4 +127,33 @@ class ProductdescriptionRepoImpl extends ProductdescriptionRepo { returning "productdescriptionid", "description", "rowguid", "modifieddate"::text """.query(using ProductdescriptionRow.read).unique } + override def upsertBatch(unsaved: List[ProductdescriptionRow]): Stream[ConnectionIO, ProductdescriptionRow] = { + Update[ProductdescriptionRow]( + s"""insert into production.productdescription("productdescriptionid", "description", "rowguid", "modifieddate") + values (?::int4,?,?::uuid,?::timestamp) + on conflict ("productdescriptionid") + do update set + "description" = EXCLUDED."description", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + returning "productdescriptionid", "description", "rowguid", "modifieddate"::text""" + )(using ProductdescriptionRow.write) + .updateManyWithGeneratedKeys[ProductdescriptionRow]("productdescriptionid", "description", "rowguid", "modifieddate")(unsaved)(using catsStdInstancesForList, ProductdescriptionRow.read) + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, ProductdescriptionRow], batchSize: Int = 10000): ConnectionIO[Int] = { + for { + _ <- sql"create temporary table productdescription_TEMP (like production.productdescription) on commit drop".update.run + _ <- new FragmentOps(sql"""copy productdescription_TEMP("productdescriptionid", "description", "rowguid", "modifieddate") from stdin""").copyIn(unsaved, batchSize)(using ProductdescriptionRow.text) + res <- sql"""insert into production.productdescription("productdescriptionid", "description", "rowguid", "modifieddate") + select * from productdescription_TEMP + on conflict ("productdescriptionid") + do update set + "description" = EXCLUDED."description", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table productdescription_TEMP;""".update.run + } yield res + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productdescription/ProductdescriptionRepoMock.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productdescription/ProductdescriptionRepoMock.scala index c91b85a7e..48b6ffbc4 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productdescription/ProductdescriptionRepoMock.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productdescription/ProductdescriptionRepoMock.scala @@ -105,4 +105,23 @@ class ProductdescriptionRepoMock(toRow: Function1[ProductdescriptionRowUnsaved, unsaved } } + override def upsertBatch(unsaved: List[ProductdescriptionRow]): Stream[ConnectionIO, ProductdescriptionRow] = { + Stream.emits { + unsaved.map { row => + map += (row.productdescriptionid -> row) + row + } + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, ProductdescriptionRow], batchSize: Int = 10000): ConnectionIO[Int] = { + unsaved.compile.toList.map { rows => + var num = 0 + rows.foreach { row => + map += (row.productdescriptionid -> row) + num += 1 + } + num + } + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productdescription/ProductdescriptionRow.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productdescription/ProductdescriptionRow.scala index d6592e181..7d9da3f26 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productdescription/ProductdescriptionRow.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productdescription/ProductdescriptionRow.scala @@ -13,6 +13,7 @@ import adventureworks.customtypes.TypoUUID import doobie.enumerated.Nullability import doobie.postgres.Text import doobie.util.Read +import doobie.util.Write import doobie.util.meta.Meta import io.circe.Decoder import io.circe.Encoder @@ -63,4 +64,23 @@ object ProductdescriptionRow { sb.append(Text.DELIMETER) TypoLocalDateTime.text.unsafeEncode(row.modifieddate, sb) } + implicit lazy val write: Write[ProductdescriptionRow] = new Write[ProductdescriptionRow]( + puts = List((ProductdescriptionId.put, Nullability.NoNulls), + (Meta.StringMeta.put, Nullability.NoNulls), + (TypoUUID.put, Nullability.NoNulls), + (TypoLocalDateTime.put, Nullability.NoNulls)), + toList = x => List(x.productdescriptionid, x.description, x.rowguid, x.modifieddate), + unsafeSet = (rs, i, a) => { + ProductdescriptionId.put.unsafeSetNonNullable(rs, i + 0, a.productdescriptionid) + Meta.StringMeta.put.unsafeSetNonNullable(rs, i + 1, a.description) + TypoUUID.put.unsafeSetNonNullable(rs, i + 2, a.rowguid) + TypoLocalDateTime.put.unsafeSetNonNullable(rs, i + 3, a.modifieddate) + }, + unsafeUpdate = (ps, i, a) => { + ProductdescriptionId.put.unsafeUpdateNonNullable(ps, i + 0, a.productdescriptionid) + Meta.StringMeta.put.unsafeUpdateNonNullable(ps, i + 1, a.description) + TypoUUID.put.unsafeUpdateNonNullable(ps, i + 2, a.rowguid) + TypoLocalDateTime.put.unsafeUpdateNonNullable(ps, i + 3, a.modifieddate) + } + ) } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productdocument/ProductdocumentRepo.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productdocument/ProductdocumentRepo.scala index 415d1b343..b5f6b036d 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productdocument/ProductdocumentRepo.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productdocument/ProductdocumentRepo.scala @@ -30,4 +30,7 @@ trait ProductdocumentRepo { def update: UpdateBuilder[ProductdocumentFields, ProductdocumentRow] def update(row: ProductdocumentRow): ConnectionIO[Boolean] def upsert(unsaved: ProductdocumentRow): ConnectionIO[ProductdocumentRow] + def upsertBatch(unsaved: List[ProductdocumentRow]): Stream[ConnectionIO, ProductdocumentRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Stream[ConnectionIO, ProductdocumentRow], batchSize: Int = 10000): ConnectionIO[Int] } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productdocument/ProductdocumentRepoImpl.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productdocument/ProductdocumentRepoImpl.scala index df2cda2f9..a0d4feeaa 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productdocument/ProductdocumentRepoImpl.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productdocument/ProductdocumentRepoImpl.scala @@ -11,12 +11,14 @@ import adventureworks.customtypes.Defaulted import adventureworks.customtypes.TypoLocalDateTime import adventureworks.production.document.DocumentId import adventureworks.production.product.ProductId +import cats.instances.list.catsStdInstancesForList import doobie.free.connection.ConnectionIO import doobie.postgres.syntax.FragmentOps import doobie.syntax.SqlInterpolator.SingleFragment.fromWrite import doobie.syntax.string.toSqlInterpolator import doobie.util.Write import doobie.util.fragment.Fragment +import doobie.util.update.Update import fs2.Stream import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder @@ -130,4 +132,29 @@ class ProductdocumentRepoImpl extends ProductdocumentRepo { returning "productid", "modifieddate"::text, "documentnode" """.query(using ProductdocumentRow.read).unique } + override def upsertBatch(unsaved: List[ProductdocumentRow]): Stream[ConnectionIO, ProductdocumentRow] = { + Update[ProductdocumentRow]( + s"""insert into production.productdocument("productid", "modifieddate", "documentnode") + values (?::int4,?::timestamp,?) + on conflict ("productid", "documentnode") + do update set + "modifieddate" = EXCLUDED."modifieddate" + returning "productid", "modifieddate"::text, "documentnode"""" + )(using ProductdocumentRow.write) + .updateManyWithGeneratedKeys[ProductdocumentRow]("productid", "modifieddate", "documentnode")(unsaved)(using catsStdInstancesForList, ProductdocumentRow.read) + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, ProductdocumentRow], batchSize: Int = 10000): ConnectionIO[Int] = { + for { + _ <- sql"create temporary table productdocument_TEMP (like production.productdocument) on commit drop".update.run + _ <- new FragmentOps(sql"""copy productdocument_TEMP("productid", "modifieddate", "documentnode") from stdin""").copyIn(unsaved, batchSize)(using ProductdocumentRow.text) + res <- sql"""insert into production.productdocument("productid", "modifieddate", "documentnode") + select * from productdocument_TEMP + on conflict ("productid", "documentnode") + do update set + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table productdocument_TEMP;""".update.run + } yield res + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productdocument/ProductdocumentRepoMock.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productdocument/ProductdocumentRepoMock.scala index 9d8fbc021..05006ff3f 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productdocument/ProductdocumentRepoMock.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productdocument/ProductdocumentRepoMock.scala @@ -105,4 +105,23 @@ class ProductdocumentRepoMock(toRow: Function1[ProductdocumentRowUnsaved, Produc unsaved } } + override def upsertBatch(unsaved: List[ProductdocumentRow]): Stream[ConnectionIO, ProductdocumentRow] = { + Stream.emits { + unsaved.map { row => + map += (row.compositeId -> row) + row + } + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, ProductdocumentRow], batchSize: Int = 10000): ConnectionIO[Int] = { + unsaved.compile.toList.map { rows => + var num = 0 + rows.foreach { row => + map += (row.compositeId -> row) + num += 1 + } + num + } + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productdocument/ProductdocumentRow.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productdocument/ProductdocumentRow.scala index b81bec8c9..2f32ee36d 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productdocument/ProductdocumentRow.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productdocument/ProductdocumentRow.scala @@ -14,6 +14,7 @@ import adventureworks.production.product.ProductId import doobie.enumerated.Nullability import doobie.postgres.Text import doobie.util.Read +import doobie.util.Write import io.circe.Decoder import io.circe.Encoder import java.sql.ResultSet @@ -62,4 +63,20 @@ object ProductdocumentRow { sb.append(Text.DELIMETER) DocumentId.text.unsafeEncode(row.documentnode, sb) } + implicit lazy val write: Write[ProductdocumentRow] = new Write[ProductdocumentRow]( + puts = List((ProductId.put, Nullability.NoNulls), + (TypoLocalDateTime.put, Nullability.NoNulls), + (DocumentId.put, Nullability.NoNulls)), + toList = x => List(x.productid, x.modifieddate, x.documentnode), + unsafeSet = (rs, i, a) => { + ProductId.put.unsafeSetNonNullable(rs, i + 0, a.productid) + TypoLocalDateTime.put.unsafeSetNonNullable(rs, i + 1, a.modifieddate) + DocumentId.put.unsafeSetNonNullable(rs, i + 2, a.documentnode) + }, + unsafeUpdate = (ps, i, a) => { + ProductId.put.unsafeUpdateNonNullable(ps, i + 0, a.productid) + TypoLocalDateTime.put.unsafeUpdateNonNullable(ps, i + 1, a.modifieddate) + DocumentId.put.unsafeUpdateNonNullable(ps, i + 2, a.documentnode) + } + ) } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productinventory/ProductinventoryRepo.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productinventory/ProductinventoryRepo.scala index 6500d9122..204cd2db9 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productinventory/ProductinventoryRepo.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productinventory/ProductinventoryRepo.scala @@ -30,4 +30,7 @@ trait ProductinventoryRepo { def update: UpdateBuilder[ProductinventoryFields, ProductinventoryRow] def update(row: ProductinventoryRow): ConnectionIO[Boolean] def upsert(unsaved: ProductinventoryRow): ConnectionIO[ProductinventoryRow] + def upsertBatch(unsaved: List[ProductinventoryRow]): Stream[ConnectionIO, ProductinventoryRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Stream[ConnectionIO, ProductinventoryRow], batchSize: Int = 10000): ConnectionIO[Int] } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productinventory/ProductinventoryRepoImpl.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productinventory/ProductinventoryRepoImpl.scala index 72c2db629..1dd6a9c28 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productinventory/ProductinventoryRepoImpl.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productinventory/ProductinventoryRepoImpl.scala @@ -13,6 +13,7 @@ import adventureworks.customtypes.TypoShort import adventureworks.customtypes.TypoUUID import adventureworks.production.location.LocationId import adventureworks.production.product.ProductId +import cats.instances.list.catsStdInstancesForList import doobie.free.connection.ConnectionIO import doobie.postgres.syntax.FragmentOps import doobie.syntax.SqlInterpolator.SingleFragment.fromWrite @@ -20,6 +21,7 @@ import doobie.syntax.string.toSqlInterpolator import doobie.util.Write import doobie.util.fragment.Fragment import doobie.util.meta.Meta +import doobie.util.update.Update import fs2.Stream import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder @@ -152,4 +154,37 @@ class ProductinventoryRepoImpl extends ProductinventoryRepo { returning "productid", "locationid", "shelf", "bin", "quantity", "rowguid", "modifieddate"::text """.query(using ProductinventoryRow.read).unique } + override def upsertBatch(unsaved: List[ProductinventoryRow]): Stream[ConnectionIO, ProductinventoryRow] = { + Update[ProductinventoryRow]( + s"""insert into production.productinventory("productid", "locationid", "shelf", "bin", "quantity", "rowguid", "modifieddate") + values (?::int4,?::int2,?,?::int2,?::int2,?::uuid,?::timestamp) + on conflict ("productid", "locationid") + do update set + "shelf" = EXCLUDED."shelf", + "bin" = EXCLUDED."bin", + "quantity" = EXCLUDED."quantity", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + returning "productid", "locationid", "shelf", "bin", "quantity", "rowguid", "modifieddate"::text""" + )(using ProductinventoryRow.write) + .updateManyWithGeneratedKeys[ProductinventoryRow]("productid", "locationid", "shelf", "bin", "quantity", "rowguid", "modifieddate")(unsaved)(using catsStdInstancesForList, ProductinventoryRow.read) + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, ProductinventoryRow], batchSize: Int = 10000): ConnectionIO[Int] = { + for { + _ <- sql"create temporary table productinventory_TEMP (like production.productinventory) on commit drop".update.run + _ <- new FragmentOps(sql"""copy productinventory_TEMP("productid", "locationid", "shelf", "bin", "quantity", "rowguid", "modifieddate") from stdin""").copyIn(unsaved, batchSize)(using ProductinventoryRow.text) + res <- sql"""insert into production.productinventory("productid", "locationid", "shelf", "bin", "quantity", "rowguid", "modifieddate") + select * from productinventory_TEMP + on conflict ("productid", "locationid") + do update set + "shelf" = EXCLUDED."shelf", + "bin" = EXCLUDED."bin", + "quantity" = EXCLUDED."quantity", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table productinventory_TEMP;""".update.run + } yield res + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productinventory/ProductinventoryRepoMock.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productinventory/ProductinventoryRepoMock.scala index 9c06fc15d..5ba22cada 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productinventory/ProductinventoryRepoMock.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productinventory/ProductinventoryRepoMock.scala @@ -105,4 +105,23 @@ class ProductinventoryRepoMock(toRow: Function1[ProductinventoryRowUnsaved, Prod unsaved } } + override def upsertBatch(unsaved: List[ProductinventoryRow]): Stream[ConnectionIO, ProductinventoryRow] = { + Stream.emits { + unsaved.map { row => + map += (row.compositeId -> row) + row + } + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, ProductinventoryRow], batchSize: Int = 10000): ConnectionIO[Int] = { + unsaved.compile.toList.map { rows => + var num = 0 + rows.foreach { row => + map += (row.compositeId -> row) + num += 1 + } + num + } + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productinventory/ProductinventoryRow.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productinventory/ProductinventoryRow.scala index 193540dcf..10487a5ef 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productinventory/ProductinventoryRow.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productinventory/ProductinventoryRow.scala @@ -16,6 +16,7 @@ import adventureworks.production.product.ProductId import doobie.enumerated.Nullability import doobie.postgres.Text import doobie.util.Read +import doobie.util.Write import doobie.util.meta.Meta import io.circe.Decoder import io.circe.Encoder @@ -90,4 +91,32 @@ object ProductinventoryRow { sb.append(Text.DELIMETER) TypoLocalDateTime.text.unsafeEncode(row.modifieddate, sb) } + implicit lazy val write: Write[ProductinventoryRow] = new Write[ProductinventoryRow]( + puts = List((ProductId.put, Nullability.NoNulls), + (LocationId.put, Nullability.NoNulls), + (Meta.StringMeta.put, Nullability.NoNulls), + (TypoShort.put, Nullability.NoNulls), + (TypoShort.put, Nullability.NoNulls), + (TypoUUID.put, Nullability.NoNulls), + (TypoLocalDateTime.put, Nullability.NoNulls)), + toList = x => List(x.productid, x.locationid, x.shelf, x.bin, x.quantity, x.rowguid, x.modifieddate), + unsafeSet = (rs, i, a) => { + ProductId.put.unsafeSetNonNullable(rs, i + 0, a.productid) + LocationId.put.unsafeSetNonNullable(rs, i + 1, a.locationid) + Meta.StringMeta.put.unsafeSetNonNullable(rs, i + 2, a.shelf) + TypoShort.put.unsafeSetNonNullable(rs, i + 3, a.bin) + TypoShort.put.unsafeSetNonNullable(rs, i + 4, a.quantity) + TypoUUID.put.unsafeSetNonNullable(rs, i + 5, a.rowguid) + TypoLocalDateTime.put.unsafeSetNonNullable(rs, i + 6, a.modifieddate) + }, + unsafeUpdate = (ps, i, a) => { + ProductId.put.unsafeUpdateNonNullable(ps, i + 0, a.productid) + LocationId.put.unsafeUpdateNonNullable(ps, i + 1, a.locationid) + Meta.StringMeta.put.unsafeUpdateNonNullable(ps, i + 2, a.shelf) + TypoShort.put.unsafeUpdateNonNullable(ps, i + 3, a.bin) + TypoShort.put.unsafeUpdateNonNullable(ps, i + 4, a.quantity) + TypoUUID.put.unsafeUpdateNonNullable(ps, i + 5, a.rowguid) + TypoLocalDateTime.put.unsafeUpdateNonNullable(ps, i + 6, a.modifieddate) + } + ) } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productlistpricehistory/ProductlistpricehistoryRepo.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productlistpricehistory/ProductlistpricehistoryRepo.scala index c0ae6041a..ff5fa80fc 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productlistpricehistory/ProductlistpricehistoryRepo.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productlistpricehistory/ProductlistpricehistoryRepo.scala @@ -30,4 +30,7 @@ trait ProductlistpricehistoryRepo { def update: UpdateBuilder[ProductlistpricehistoryFields, ProductlistpricehistoryRow] def update(row: ProductlistpricehistoryRow): ConnectionIO[Boolean] def upsert(unsaved: ProductlistpricehistoryRow): ConnectionIO[ProductlistpricehistoryRow] + def upsertBatch(unsaved: List[ProductlistpricehistoryRow]): Stream[ConnectionIO, ProductlistpricehistoryRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Stream[ConnectionIO, ProductlistpricehistoryRow], batchSize: Int = 10000): ConnectionIO[Int] } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productlistpricehistory/ProductlistpricehistoryRepoImpl.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productlistpricehistory/ProductlistpricehistoryRepoImpl.scala index 8db559386..e493e1683 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productlistpricehistory/ProductlistpricehistoryRepoImpl.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productlistpricehistory/ProductlistpricehistoryRepoImpl.scala @@ -10,6 +10,7 @@ package productlistpricehistory import adventureworks.customtypes.Defaulted import adventureworks.customtypes.TypoLocalDateTime import adventureworks.production.product.ProductId +import cats.instances.list.catsStdInstancesForList import doobie.free.connection.ConnectionIO import doobie.postgres.syntax.FragmentOps import doobie.syntax.SqlInterpolator.SingleFragment.fromWrite @@ -17,6 +18,7 @@ import doobie.syntax.string.toSqlInterpolator import doobie.util.Write import doobie.util.fragment.Fragment import doobie.util.meta.Meta +import doobie.util.update.Update import fs2.Stream import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder @@ -135,4 +137,33 @@ class ProductlistpricehistoryRepoImpl extends ProductlistpricehistoryRepo { returning "productid", "startdate"::text, "enddate"::text, "listprice", "modifieddate"::text """.query(using ProductlistpricehistoryRow.read).unique } + override def upsertBatch(unsaved: List[ProductlistpricehistoryRow]): Stream[ConnectionIO, ProductlistpricehistoryRow] = { + Update[ProductlistpricehistoryRow]( + s"""insert into production.productlistpricehistory("productid", "startdate", "enddate", "listprice", "modifieddate") + values (?::int4,?::timestamp,?::timestamp,?::numeric,?::timestamp) + on conflict ("productid", "startdate") + do update set + "enddate" = EXCLUDED."enddate", + "listprice" = EXCLUDED."listprice", + "modifieddate" = EXCLUDED."modifieddate" + returning "productid", "startdate"::text, "enddate"::text, "listprice", "modifieddate"::text""" + )(using ProductlistpricehistoryRow.write) + .updateManyWithGeneratedKeys[ProductlistpricehistoryRow]("productid", "startdate", "enddate", "listprice", "modifieddate")(unsaved)(using catsStdInstancesForList, ProductlistpricehistoryRow.read) + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, ProductlistpricehistoryRow], batchSize: Int = 10000): ConnectionIO[Int] = { + for { + _ <- sql"create temporary table productlistpricehistory_TEMP (like production.productlistpricehistory) on commit drop".update.run + _ <- new FragmentOps(sql"""copy productlistpricehistory_TEMP("productid", "startdate", "enddate", "listprice", "modifieddate") from stdin""").copyIn(unsaved, batchSize)(using ProductlistpricehistoryRow.text) + res <- sql"""insert into production.productlistpricehistory("productid", "startdate", "enddate", "listprice", "modifieddate") + select * from productlistpricehistory_TEMP + on conflict ("productid", "startdate") + do update set + "enddate" = EXCLUDED."enddate", + "listprice" = EXCLUDED."listprice", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table productlistpricehistory_TEMP;""".update.run + } yield res + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productlistpricehistory/ProductlistpricehistoryRepoMock.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productlistpricehistory/ProductlistpricehistoryRepoMock.scala index d56124d38..b82e79076 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productlistpricehistory/ProductlistpricehistoryRepoMock.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productlistpricehistory/ProductlistpricehistoryRepoMock.scala @@ -105,4 +105,23 @@ class ProductlistpricehistoryRepoMock(toRow: Function1[ProductlistpricehistoryRo unsaved } } + override def upsertBatch(unsaved: List[ProductlistpricehistoryRow]): Stream[ConnectionIO, ProductlistpricehistoryRow] = { + Stream.emits { + unsaved.map { row => + map += (row.compositeId -> row) + row + } + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, ProductlistpricehistoryRow], batchSize: Int = 10000): ConnectionIO[Int] = { + unsaved.compile.toList.map { rows => + var num = 0 + rows.foreach { row => + map += (row.compositeId -> row) + num += 1 + } + num + } + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productlistpricehistory/ProductlistpricehistoryRow.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productlistpricehistory/ProductlistpricehistoryRow.scala index e8061f6d4..55ad132ac 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productlistpricehistory/ProductlistpricehistoryRow.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productlistpricehistory/ProductlistpricehistoryRow.scala @@ -13,6 +13,7 @@ import adventureworks.production.product.ProductId import doobie.enumerated.Nullability import doobie.postgres.Text import doobie.util.Read +import doobie.util.Write import doobie.util.meta.Meta import io.circe.Decoder import io.circe.Encoder @@ -75,4 +76,26 @@ object ProductlistpricehistoryRow { sb.append(Text.DELIMETER) TypoLocalDateTime.text.unsafeEncode(row.modifieddate, sb) } + implicit lazy val write: Write[ProductlistpricehistoryRow] = new Write[ProductlistpricehistoryRow]( + puts = List((ProductId.put, Nullability.NoNulls), + (TypoLocalDateTime.put, Nullability.NoNulls), + (TypoLocalDateTime.put, Nullability.Nullable), + (Meta.ScalaBigDecimalMeta.put, Nullability.NoNulls), + (TypoLocalDateTime.put, Nullability.NoNulls)), + toList = x => List(x.productid, x.startdate, x.enddate, x.listprice, x.modifieddate), + unsafeSet = (rs, i, a) => { + ProductId.put.unsafeSetNonNullable(rs, i + 0, a.productid) + TypoLocalDateTime.put.unsafeSetNonNullable(rs, i + 1, a.startdate) + TypoLocalDateTime.put.unsafeSetNullable(rs, i + 2, a.enddate) + Meta.ScalaBigDecimalMeta.put.unsafeSetNonNullable(rs, i + 3, a.listprice) + TypoLocalDateTime.put.unsafeSetNonNullable(rs, i + 4, a.modifieddate) + }, + unsafeUpdate = (ps, i, a) => { + ProductId.put.unsafeUpdateNonNullable(ps, i + 0, a.productid) + TypoLocalDateTime.put.unsafeUpdateNonNullable(ps, i + 1, a.startdate) + TypoLocalDateTime.put.unsafeUpdateNullable(ps, i + 2, a.enddate) + Meta.ScalaBigDecimalMeta.put.unsafeUpdateNonNullable(ps, i + 3, a.listprice) + TypoLocalDateTime.put.unsafeUpdateNonNullable(ps, i + 4, a.modifieddate) + } + ) } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productmodel/ProductmodelRepo.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productmodel/ProductmodelRepo.scala index 5b87a5c97..f5f9262ca 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productmodel/ProductmodelRepo.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productmodel/ProductmodelRepo.scala @@ -30,4 +30,7 @@ trait ProductmodelRepo { def update: UpdateBuilder[ProductmodelFields, ProductmodelRow] def update(row: ProductmodelRow): ConnectionIO[Boolean] def upsert(unsaved: ProductmodelRow): ConnectionIO[ProductmodelRow] + def upsertBatch(unsaved: List[ProductmodelRow]): Stream[ConnectionIO, ProductmodelRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Stream[ConnectionIO, ProductmodelRow], batchSize: Int = 10000): ConnectionIO[Int] } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productmodel/ProductmodelRepoImpl.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productmodel/ProductmodelRepoImpl.scala index ffce7940f..f5588b958 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productmodel/ProductmodelRepoImpl.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productmodel/ProductmodelRepoImpl.scala @@ -12,12 +12,14 @@ import adventureworks.customtypes.TypoLocalDateTime import adventureworks.customtypes.TypoUUID import adventureworks.customtypes.TypoXml import adventureworks.public.Name +import cats.instances.list.catsStdInstancesForList import doobie.free.connection.ConnectionIO import doobie.postgres.syntax.FragmentOps import doobie.syntax.SqlInterpolator.SingleFragment.fromWrite import doobie.syntax.string.toSqlInterpolator import doobie.util.Write import doobie.util.fragment.Fragment +import doobie.util.update.Update import fs2.Stream import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder @@ -134,4 +136,37 @@ class ProductmodelRepoImpl extends ProductmodelRepo { returning "productmodelid", "name", "catalogdescription", "instructions", "rowguid", "modifieddate"::text """.query(using ProductmodelRow.read).unique } + override def upsertBatch(unsaved: List[ProductmodelRow]): Stream[ConnectionIO, ProductmodelRow] = { + Update[ProductmodelRow]( + s"""insert into production.productmodel("productmodelid", "name", "catalogdescription", "instructions", "rowguid", "modifieddate") + values (?::int4,?::varchar,?::xml,?::xml,?::uuid,?::timestamp) + on conflict ("productmodelid") + do update set + "name" = EXCLUDED."name", + "catalogdescription" = EXCLUDED."catalogdescription", + "instructions" = EXCLUDED."instructions", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + returning "productmodelid", "name", "catalogdescription", "instructions", "rowguid", "modifieddate"::text""" + )(using ProductmodelRow.write) + .updateManyWithGeneratedKeys[ProductmodelRow]("productmodelid", "name", "catalogdescription", "instructions", "rowguid", "modifieddate")(unsaved)(using catsStdInstancesForList, ProductmodelRow.read) + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, ProductmodelRow], batchSize: Int = 10000): ConnectionIO[Int] = { + for { + _ <- sql"create temporary table productmodel_TEMP (like production.productmodel) on commit drop".update.run + _ <- new FragmentOps(sql"""copy productmodel_TEMP("productmodelid", "name", "catalogdescription", "instructions", "rowguid", "modifieddate") from stdin""").copyIn(unsaved, batchSize)(using ProductmodelRow.text) + res <- sql"""insert into production.productmodel("productmodelid", "name", "catalogdescription", "instructions", "rowguid", "modifieddate") + select * from productmodel_TEMP + on conflict ("productmodelid") + do update set + "name" = EXCLUDED."name", + "catalogdescription" = EXCLUDED."catalogdescription", + "instructions" = EXCLUDED."instructions", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table productmodel_TEMP;""".update.run + } yield res + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productmodel/ProductmodelRepoMock.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productmodel/ProductmodelRepoMock.scala index f27941dc8..ceba04f8f 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productmodel/ProductmodelRepoMock.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productmodel/ProductmodelRepoMock.scala @@ -105,4 +105,23 @@ class ProductmodelRepoMock(toRow: Function1[ProductmodelRowUnsaved, Productmodel unsaved } } + override def upsertBatch(unsaved: List[ProductmodelRow]): Stream[ConnectionIO, ProductmodelRow] = { + Stream.emits { + unsaved.map { row => + map += (row.productmodelid -> row) + row + } + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, ProductmodelRow], batchSize: Int = 10000): ConnectionIO[Int] = { + unsaved.compile.toList.map { rows => + var num = 0 + rows.foreach { row => + map += (row.productmodelid -> row) + num += 1 + } + num + } + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productmodel/ProductmodelRow.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productmodel/ProductmodelRow.scala index 6efd3c512..fcf9fc93e 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productmodel/ProductmodelRow.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productmodel/ProductmodelRow.scala @@ -15,6 +15,7 @@ import adventureworks.public.Name import doobie.enumerated.Nullability import doobie.postgres.Text import doobie.util.Read +import doobie.util.Write import io.circe.Decoder import io.circe.Encoder import java.sql.ResultSet @@ -76,4 +77,29 @@ object ProductmodelRow { sb.append(Text.DELIMETER) TypoLocalDateTime.text.unsafeEncode(row.modifieddate, sb) } + implicit lazy val write: Write[ProductmodelRow] = new Write[ProductmodelRow]( + puts = List((ProductmodelId.put, Nullability.NoNulls), + (Name.put, Nullability.NoNulls), + (TypoXml.put, Nullability.Nullable), + (TypoXml.put, Nullability.Nullable), + (TypoUUID.put, Nullability.NoNulls), + (TypoLocalDateTime.put, Nullability.NoNulls)), + toList = x => List(x.productmodelid, x.name, x.catalogdescription, x.instructions, x.rowguid, x.modifieddate), + unsafeSet = (rs, i, a) => { + ProductmodelId.put.unsafeSetNonNullable(rs, i + 0, a.productmodelid) + Name.put.unsafeSetNonNullable(rs, i + 1, a.name) + TypoXml.put.unsafeSetNullable(rs, i + 2, a.catalogdescription) + TypoXml.put.unsafeSetNullable(rs, i + 3, a.instructions) + TypoUUID.put.unsafeSetNonNullable(rs, i + 4, a.rowguid) + TypoLocalDateTime.put.unsafeSetNonNullable(rs, i + 5, a.modifieddate) + }, + unsafeUpdate = (ps, i, a) => { + ProductmodelId.put.unsafeUpdateNonNullable(ps, i + 0, a.productmodelid) + Name.put.unsafeUpdateNonNullable(ps, i + 1, a.name) + TypoXml.put.unsafeUpdateNullable(ps, i + 2, a.catalogdescription) + TypoXml.put.unsafeUpdateNullable(ps, i + 3, a.instructions) + TypoUUID.put.unsafeUpdateNonNullable(ps, i + 4, a.rowguid) + TypoLocalDateTime.put.unsafeUpdateNonNullable(ps, i + 5, a.modifieddate) + } + ) } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productmodelillustration/ProductmodelillustrationRepo.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productmodelillustration/ProductmodelillustrationRepo.scala index eacb7fec2..0be204b67 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productmodelillustration/ProductmodelillustrationRepo.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productmodelillustration/ProductmodelillustrationRepo.scala @@ -30,4 +30,7 @@ trait ProductmodelillustrationRepo { def update: UpdateBuilder[ProductmodelillustrationFields, ProductmodelillustrationRow] def update(row: ProductmodelillustrationRow): ConnectionIO[Boolean] def upsert(unsaved: ProductmodelillustrationRow): ConnectionIO[ProductmodelillustrationRow] + def upsertBatch(unsaved: List[ProductmodelillustrationRow]): Stream[ConnectionIO, ProductmodelillustrationRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Stream[ConnectionIO, ProductmodelillustrationRow], batchSize: Int = 10000): ConnectionIO[Int] } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productmodelillustration/ProductmodelillustrationRepoImpl.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productmodelillustration/ProductmodelillustrationRepoImpl.scala index e70731460..2a5343b67 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productmodelillustration/ProductmodelillustrationRepoImpl.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productmodelillustration/ProductmodelillustrationRepoImpl.scala @@ -11,12 +11,14 @@ import adventureworks.customtypes.Defaulted import adventureworks.customtypes.TypoLocalDateTime import adventureworks.production.illustration.IllustrationId import adventureworks.production.productmodel.ProductmodelId +import cats.instances.list.catsStdInstancesForList import doobie.free.connection.ConnectionIO import doobie.postgres.syntax.FragmentOps import doobie.syntax.SqlInterpolator.SingleFragment.fromWrite import doobie.syntax.string.toSqlInterpolator import doobie.util.Write import doobie.util.fragment.Fragment +import doobie.util.update.Update import fs2.Stream import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder @@ -127,4 +129,29 @@ class ProductmodelillustrationRepoImpl extends ProductmodelillustrationRepo { returning "productmodelid", "illustrationid", "modifieddate"::text """.query(using ProductmodelillustrationRow.read).unique } + override def upsertBatch(unsaved: List[ProductmodelillustrationRow]): Stream[ConnectionIO, ProductmodelillustrationRow] = { + Update[ProductmodelillustrationRow]( + s"""insert into production.productmodelillustration("productmodelid", "illustrationid", "modifieddate") + values (?::int4,?::int4,?::timestamp) + on conflict ("productmodelid", "illustrationid") + do update set + "modifieddate" = EXCLUDED."modifieddate" + returning "productmodelid", "illustrationid", "modifieddate"::text""" + )(using ProductmodelillustrationRow.write) + .updateManyWithGeneratedKeys[ProductmodelillustrationRow]("productmodelid", "illustrationid", "modifieddate")(unsaved)(using catsStdInstancesForList, ProductmodelillustrationRow.read) + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, ProductmodelillustrationRow], batchSize: Int = 10000): ConnectionIO[Int] = { + for { + _ <- sql"create temporary table productmodelillustration_TEMP (like production.productmodelillustration) on commit drop".update.run + _ <- new FragmentOps(sql"""copy productmodelillustration_TEMP("productmodelid", "illustrationid", "modifieddate") from stdin""").copyIn(unsaved, batchSize)(using ProductmodelillustrationRow.text) + res <- sql"""insert into production.productmodelillustration("productmodelid", "illustrationid", "modifieddate") + select * from productmodelillustration_TEMP + on conflict ("productmodelid", "illustrationid") + do update set + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table productmodelillustration_TEMP;""".update.run + } yield res + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productmodelillustration/ProductmodelillustrationRepoMock.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productmodelillustration/ProductmodelillustrationRepoMock.scala index 4454e0bfd..baafb37cc 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productmodelillustration/ProductmodelillustrationRepoMock.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productmodelillustration/ProductmodelillustrationRepoMock.scala @@ -105,4 +105,23 @@ class ProductmodelillustrationRepoMock(toRow: Function1[Productmodelillustration unsaved } } + override def upsertBatch(unsaved: List[ProductmodelillustrationRow]): Stream[ConnectionIO, ProductmodelillustrationRow] = { + Stream.emits { + unsaved.map { row => + map += (row.compositeId -> row) + row + } + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, ProductmodelillustrationRow], batchSize: Int = 10000): ConnectionIO[Int] = { + unsaved.compile.toList.map { rows => + var num = 0 + rows.foreach { row => + map += (row.compositeId -> row) + num += 1 + } + num + } + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productmodelillustration/ProductmodelillustrationRow.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productmodelillustration/ProductmodelillustrationRow.scala index 882805de7..01b9fc10f 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productmodelillustration/ProductmodelillustrationRow.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productmodelillustration/ProductmodelillustrationRow.scala @@ -14,6 +14,7 @@ import adventureworks.production.productmodel.ProductmodelId import doobie.enumerated.Nullability import doobie.postgres.Text import doobie.util.Read +import doobie.util.Write import io.circe.Decoder import io.circe.Encoder import java.sql.ResultSet @@ -61,4 +62,20 @@ object ProductmodelillustrationRow { sb.append(Text.DELIMETER) TypoLocalDateTime.text.unsafeEncode(row.modifieddate, sb) } + implicit lazy val write: Write[ProductmodelillustrationRow] = new Write[ProductmodelillustrationRow]( + puts = List((ProductmodelId.put, Nullability.NoNulls), + (IllustrationId.put, Nullability.NoNulls), + (TypoLocalDateTime.put, Nullability.NoNulls)), + toList = x => List(x.productmodelid, x.illustrationid, x.modifieddate), + unsafeSet = (rs, i, a) => { + ProductmodelId.put.unsafeSetNonNullable(rs, i + 0, a.productmodelid) + IllustrationId.put.unsafeSetNonNullable(rs, i + 1, a.illustrationid) + TypoLocalDateTime.put.unsafeSetNonNullable(rs, i + 2, a.modifieddate) + }, + unsafeUpdate = (ps, i, a) => { + ProductmodelId.put.unsafeUpdateNonNullable(ps, i + 0, a.productmodelid) + IllustrationId.put.unsafeUpdateNonNullable(ps, i + 1, a.illustrationid) + TypoLocalDateTime.put.unsafeUpdateNonNullable(ps, i + 2, a.modifieddate) + } + ) } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productmodelproductdescriptionculture/ProductmodelproductdescriptioncultureRepo.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productmodelproductdescriptionculture/ProductmodelproductdescriptioncultureRepo.scala index 49c91cc70..2cb61f47f 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productmodelproductdescriptionculture/ProductmodelproductdescriptioncultureRepo.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productmodelproductdescriptionculture/ProductmodelproductdescriptioncultureRepo.scala @@ -30,4 +30,7 @@ trait ProductmodelproductdescriptioncultureRepo { def update: UpdateBuilder[ProductmodelproductdescriptioncultureFields, ProductmodelproductdescriptioncultureRow] def update(row: ProductmodelproductdescriptioncultureRow): ConnectionIO[Boolean] def upsert(unsaved: ProductmodelproductdescriptioncultureRow): ConnectionIO[ProductmodelproductdescriptioncultureRow] + def upsertBatch(unsaved: List[ProductmodelproductdescriptioncultureRow]): Stream[ConnectionIO, ProductmodelproductdescriptioncultureRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Stream[ConnectionIO, ProductmodelproductdescriptioncultureRow], batchSize: Int = 10000): ConnectionIO[Int] } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productmodelproductdescriptionculture/ProductmodelproductdescriptioncultureRepoImpl.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productmodelproductdescriptionculture/ProductmodelproductdescriptioncultureRepoImpl.scala index 943dbfa15..0a44aaed4 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productmodelproductdescriptionculture/ProductmodelproductdescriptioncultureRepoImpl.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productmodelproductdescriptionculture/ProductmodelproductdescriptioncultureRepoImpl.scala @@ -12,12 +12,14 @@ import adventureworks.customtypes.TypoLocalDateTime import adventureworks.production.culture.CultureId import adventureworks.production.productdescription.ProductdescriptionId import adventureworks.production.productmodel.ProductmodelId +import cats.instances.list.catsStdInstancesForList import doobie.free.connection.ConnectionIO import doobie.postgres.syntax.FragmentOps import doobie.syntax.SqlInterpolator.SingleFragment.fromWrite import doobie.syntax.string.toSqlInterpolator import doobie.util.Write import doobie.util.fragment.Fragment +import doobie.util.update.Update import fs2.Stream import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder @@ -132,4 +134,29 @@ class ProductmodelproductdescriptioncultureRepoImpl extends Productmodelproductd returning "productmodelid", "productdescriptionid", "cultureid", "modifieddate"::text """.query(using ProductmodelproductdescriptioncultureRow.read).unique } + override def upsertBatch(unsaved: List[ProductmodelproductdescriptioncultureRow]): Stream[ConnectionIO, ProductmodelproductdescriptioncultureRow] = { + Update[ProductmodelproductdescriptioncultureRow]( + s"""insert into production.productmodelproductdescriptionculture("productmodelid", "productdescriptionid", "cultureid", "modifieddate") + values (?::int4,?::int4,?::bpchar,?::timestamp) + on conflict ("productmodelid", "productdescriptionid", "cultureid") + do update set + "modifieddate" = EXCLUDED."modifieddate" + returning "productmodelid", "productdescriptionid", "cultureid", "modifieddate"::text""" + )(using ProductmodelproductdescriptioncultureRow.write) + .updateManyWithGeneratedKeys[ProductmodelproductdescriptioncultureRow]("productmodelid", "productdescriptionid", "cultureid", "modifieddate")(unsaved)(using catsStdInstancesForList, ProductmodelproductdescriptioncultureRow.read) + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, ProductmodelproductdescriptioncultureRow], batchSize: Int = 10000): ConnectionIO[Int] = { + for { + _ <- sql"create temporary table productmodelproductdescriptionculture_TEMP (like production.productmodelproductdescriptionculture) on commit drop".update.run + _ <- new FragmentOps(sql"""copy productmodelproductdescriptionculture_TEMP("productmodelid", "productdescriptionid", "cultureid", "modifieddate") from stdin""").copyIn(unsaved, batchSize)(using ProductmodelproductdescriptioncultureRow.text) + res <- sql"""insert into production.productmodelproductdescriptionculture("productmodelid", "productdescriptionid", "cultureid", "modifieddate") + select * from productmodelproductdescriptionculture_TEMP + on conflict ("productmodelid", "productdescriptionid", "cultureid") + do update set + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table productmodelproductdescriptionculture_TEMP;""".update.run + } yield res + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productmodelproductdescriptionculture/ProductmodelproductdescriptioncultureRepoMock.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productmodelproductdescriptionculture/ProductmodelproductdescriptioncultureRepoMock.scala index 6774728e3..81b962953 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productmodelproductdescriptionculture/ProductmodelproductdescriptioncultureRepoMock.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productmodelproductdescriptionculture/ProductmodelproductdescriptioncultureRepoMock.scala @@ -105,4 +105,23 @@ class ProductmodelproductdescriptioncultureRepoMock(toRow: Function1[Productmode unsaved } } + override def upsertBatch(unsaved: List[ProductmodelproductdescriptioncultureRow]): Stream[ConnectionIO, ProductmodelproductdescriptioncultureRow] = { + Stream.emits { + unsaved.map { row => + map += (row.compositeId -> row) + row + } + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, ProductmodelproductdescriptioncultureRow], batchSize: Int = 10000): ConnectionIO[Int] = { + unsaved.compile.toList.map { rows => + var num = 0 + rows.foreach { row => + map += (row.compositeId -> row) + num += 1 + } + num + } + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productmodelproductdescriptionculture/ProductmodelproductdescriptioncultureRow.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productmodelproductdescriptionculture/ProductmodelproductdescriptioncultureRow.scala index 61ae5c5b9..a8b9b8679 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productmodelproductdescriptionculture/ProductmodelproductdescriptioncultureRow.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productmodelproductdescriptionculture/ProductmodelproductdescriptioncultureRow.scala @@ -15,6 +15,7 @@ import adventureworks.production.productmodel.ProductmodelId import doobie.enumerated.Nullability import doobie.postgres.Text import doobie.util.Read +import doobie.util.Write import io.circe.Decoder import io.circe.Encoder import java.sql.ResultSet @@ -69,4 +70,23 @@ object ProductmodelproductdescriptioncultureRow { sb.append(Text.DELIMETER) TypoLocalDateTime.text.unsafeEncode(row.modifieddate, sb) } + implicit lazy val write: Write[ProductmodelproductdescriptioncultureRow] = new Write[ProductmodelproductdescriptioncultureRow]( + puts = List((ProductmodelId.put, Nullability.NoNulls), + (ProductdescriptionId.put, Nullability.NoNulls), + (CultureId.put, Nullability.NoNulls), + (TypoLocalDateTime.put, Nullability.NoNulls)), + toList = x => List(x.productmodelid, x.productdescriptionid, x.cultureid, x.modifieddate), + unsafeSet = (rs, i, a) => { + ProductmodelId.put.unsafeSetNonNullable(rs, i + 0, a.productmodelid) + ProductdescriptionId.put.unsafeSetNonNullable(rs, i + 1, a.productdescriptionid) + CultureId.put.unsafeSetNonNullable(rs, i + 2, a.cultureid) + TypoLocalDateTime.put.unsafeSetNonNullable(rs, i + 3, a.modifieddate) + }, + unsafeUpdate = (ps, i, a) => { + ProductmodelId.put.unsafeUpdateNonNullable(ps, i + 0, a.productmodelid) + ProductdescriptionId.put.unsafeUpdateNonNullable(ps, i + 1, a.productdescriptionid) + CultureId.put.unsafeUpdateNonNullable(ps, i + 2, a.cultureid) + TypoLocalDateTime.put.unsafeUpdateNonNullable(ps, i + 3, a.modifieddate) + } + ) } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productphoto/ProductphotoRepo.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productphoto/ProductphotoRepo.scala index eb2dd313d..fc4939e05 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productphoto/ProductphotoRepo.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productphoto/ProductphotoRepo.scala @@ -30,4 +30,7 @@ trait ProductphotoRepo { def update: UpdateBuilder[ProductphotoFields, ProductphotoRow] def update(row: ProductphotoRow): ConnectionIO[Boolean] def upsert(unsaved: ProductphotoRow): ConnectionIO[ProductphotoRow] + def upsertBatch(unsaved: List[ProductphotoRow]): Stream[ConnectionIO, ProductphotoRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Stream[ConnectionIO, ProductphotoRow], batchSize: Int = 10000): ConnectionIO[Int] } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productphoto/ProductphotoRepoImpl.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productphoto/ProductphotoRepoImpl.scala index fb10e15b0..5e8a6fa6e 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productphoto/ProductphotoRepoImpl.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productphoto/ProductphotoRepoImpl.scala @@ -10,6 +10,7 @@ package productphoto import adventureworks.customtypes.Defaulted import adventureworks.customtypes.TypoBytea import adventureworks.customtypes.TypoLocalDateTime +import cats.instances.list.catsStdInstancesForList import doobie.free.connection.ConnectionIO import doobie.postgres.syntax.FragmentOps import doobie.syntax.SqlInterpolator.SingleFragment.fromWrite @@ -17,6 +18,7 @@ import doobie.syntax.string.toSqlInterpolator import doobie.util.Write import doobie.util.fragment.Fragment import doobie.util.meta.Meta +import doobie.util.update.Update import fs2.Stream import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder @@ -130,4 +132,37 @@ class ProductphotoRepoImpl extends ProductphotoRepo { returning "productphotoid", "thumbnailphoto", "thumbnailphotofilename", "largephoto", "largephotofilename", "modifieddate"::text """.query(using ProductphotoRow.read).unique } + override def upsertBatch(unsaved: List[ProductphotoRow]): Stream[ConnectionIO, ProductphotoRow] = { + Update[ProductphotoRow]( + s"""insert into production.productphoto("productphotoid", "thumbnailphoto", "thumbnailphotofilename", "largephoto", "largephotofilename", "modifieddate") + values (?::int4,?::bytea,?,?::bytea,?,?::timestamp) + on conflict ("productphotoid") + do update set + "thumbnailphoto" = EXCLUDED."thumbnailphoto", + "thumbnailphotofilename" = EXCLUDED."thumbnailphotofilename", + "largephoto" = EXCLUDED."largephoto", + "largephotofilename" = EXCLUDED."largephotofilename", + "modifieddate" = EXCLUDED."modifieddate" + returning "productphotoid", "thumbnailphoto", "thumbnailphotofilename", "largephoto", "largephotofilename", "modifieddate"::text""" + )(using ProductphotoRow.write) + .updateManyWithGeneratedKeys[ProductphotoRow]("productphotoid", "thumbnailphoto", "thumbnailphotofilename", "largephoto", "largephotofilename", "modifieddate")(unsaved)(using catsStdInstancesForList, ProductphotoRow.read) + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, ProductphotoRow], batchSize: Int = 10000): ConnectionIO[Int] = { + for { + _ <- sql"create temporary table productphoto_TEMP (like production.productphoto) on commit drop".update.run + _ <- new FragmentOps(sql"""copy productphoto_TEMP("productphotoid", "thumbnailphoto", "thumbnailphotofilename", "largephoto", "largephotofilename", "modifieddate") from stdin""").copyIn(unsaved, batchSize)(using ProductphotoRow.text) + res <- sql"""insert into production.productphoto("productphotoid", "thumbnailphoto", "thumbnailphotofilename", "largephoto", "largephotofilename", "modifieddate") + select * from productphoto_TEMP + on conflict ("productphotoid") + do update set + "thumbnailphoto" = EXCLUDED."thumbnailphoto", + "thumbnailphotofilename" = EXCLUDED."thumbnailphotofilename", + "largephoto" = EXCLUDED."largephoto", + "largephotofilename" = EXCLUDED."largephotofilename", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table productphoto_TEMP;""".update.run + } yield res + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productphoto/ProductphotoRepoMock.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productphoto/ProductphotoRepoMock.scala index 5c1f27851..e055c7921 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productphoto/ProductphotoRepoMock.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productphoto/ProductphotoRepoMock.scala @@ -105,4 +105,23 @@ class ProductphotoRepoMock(toRow: Function1[ProductphotoRowUnsaved, Productphoto unsaved } } + override def upsertBatch(unsaved: List[ProductphotoRow]): Stream[ConnectionIO, ProductphotoRow] = { + Stream.emits { + unsaved.map { row => + map += (row.productphotoid -> row) + row + } + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, ProductphotoRow], batchSize: Int = 10000): ConnectionIO[Int] = { + unsaved.compile.toList.map { rows => + var num = 0 + rows.foreach { row => + map += (row.productphotoid -> row) + num += 1 + } + num + } + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productphoto/ProductphotoRow.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productphoto/ProductphotoRow.scala index f15fce481..5b00c0504 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productphoto/ProductphotoRow.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productphoto/ProductphotoRow.scala @@ -13,6 +13,7 @@ import adventureworks.customtypes.TypoLocalDateTime import doobie.enumerated.Nullability import doobie.postgres.Text import doobie.util.Read +import doobie.util.Write import doobie.util.meta.Meta import io.circe.Decoder import io.circe.Encoder @@ -75,4 +76,29 @@ object ProductphotoRow { sb.append(Text.DELIMETER) TypoLocalDateTime.text.unsafeEncode(row.modifieddate, sb) } + implicit lazy val write: Write[ProductphotoRow] = new Write[ProductphotoRow]( + puts = List((ProductphotoId.put, Nullability.NoNulls), + (TypoBytea.put, Nullability.Nullable), + (Meta.StringMeta.put, Nullability.Nullable), + (TypoBytea.put, Nullability.Nullable), + (Meta.StringMeta.put, Nullability.Nullable), + (TypoLocalDateTime.put, Nullability.NoNulls)), + toList = x => List(x.productphotoid, x.thumbnailphoto, x.thumbnailphotofilename, x.largephoto, x.largephotofilename, x.modifieddate), + unsafeSet = (rs, i, a) => { + ProductphotoId.put.unsafeSetNonNullable(rs, i + 0, a.productphotoid) + TypoBytea.put.unsafeSetNullable(rs, i + 1, a.thumbnailphoto) + Meta.StringMeta.put.unsafeSetNullable(rs, i + 2, a.thumbnailphotofilename) + TypoBytea.put.unsafeSetNullable(rs, i + 3, a.largephoto) + Meta.StringMeta.put.unsafeSetNullable(rs, i + 4, a.largephotofilename) + TypoLocalDateTime.put.unsafeSetNonNullable(rs, i + 5, a.modifieddate) + }, + unsafeUpdate = (ps, i, a) => { + ProductphotoId.put.unsafeUpdateNonNullable(ps, i + 0, a.productphotoid) + TypoBytea.put.unsafeUpdateNullable(ps, i + 1, a.thumbnailphoto) + Meta.StringMeta.put.unsafeUpdateNullable(ps, i + 2, a.thumbnailphotofilename) + TypoBytea.put.unsafeUpdateNullable(ps, i + 3, a.largephoto) + Meta.StringMeta.put.unsafeUpdateNullable(ps, i + 4, a.largephotofilename) + TypoLocalDateTime.put.unsafeUpdateNonNullable(ps, i + 5, a.modifieddate) + } + ) } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productproductphoto/ProductproductphotoRepo.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productproductphoto/ProductproductphotoRepo.scala index b57a106ec..8df69d710 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productproductphoto/ProductproductphotoRepo.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productproductphoto/ProductproductphotoRepo.scala @@ -30,4 +30,7 @@ trait ProductproductphotoRepo { def update: UpdateBuilder[ProductproductphotoFields, ProductproductphotoRow] def update(row: ProductproductphotoRow): ConnectionIO[Boolean] def upsert(unsaved: ProductproductphotoRow): ConnectionIO[ProductproductphotoRow] + def upsertBatch(unsaved: List[ProductproductphotoRow]): Stream[ConnectionIO, ProductproductphotoRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Stream[ConnectionIO, ProductproductphotoRow], batchSize: Int = 10000): ConnectionIO[Int] } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productproductphoto/ProductproductphotoRepoImpl.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productproductphoto/ProductproductphotoRepoImpl.scala index 57a57afaf..bd1e2db8f 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productproductphoto/ProductproductphotoRepoImpl.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productproductphoto/ProductproductphotoRepoImpl.scala @@ -12,12 +12,14 @@ import adventureworks.customtypes.TypoLocalDateTime import adventureworks.production.product.ProductId import adventureworks.production.productphoto.ProductphotoId import adventureworks.public.Flag +import cats.instances.list.catsStdInstancesForList import doobie.free.connection.ConnectionIO import doobie.postgres.syntax.FragmentOps import doobie.syntax.SqlInterpolator.SingleFragment.fromWrite import doobie.syntax.string.toSqlInterpolator import doobie.util.Write import doobie.util.fragment.Fragment +import doobie.util.update.Update import fs2.Stream import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder @@ -135,4 +137,31 @@ class ProductproductphotoRepoImpl extends ProductproductphotoRepo { returning "productid", "productphotoid", "primary", "modifieddate"::text """.query(using ProductproductphotoRow.read).unique } + override def upsertBatch(unsaved: List[ProductproductphotoRow]): Stream[ConnectionIO, ProductproductphotoRow] = { + Update[ProductproductphotoRow]( + s"""insert into production.productproductphoto("productid", "productphotoid", "primary", "modifieddate") + values (?::int4,?::int4,?::bool,?::timestamp) + on conflict ("productid", "productphotoid") + do update set + "primary" = EXCLUDED."primary", + "modifieddate" = EXCLUDED."modifieddate" + returning "productid", "productphotoid", "primary", "modifieddate"::text""" + )(using ProductproductphotoRow.write) + .updateManyWithGeneratedKeys[ProductproductphotoRow]("productid", "productphotoid", "primary", "modifieddate")(unsaved)(using catsStdInstancesForList, ProductproductphotoRow.read) + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, ProductproductphotoRow], batchSize: Int = 10000): ConnectionIO[Int] = { + for { + _ <- sql"create temporary table productproductphoto_TEMP (like production.productproductphoto) on commit drop".update.run + _ <- new FragmentOps(sql"""copy productproductphoto_TEMP("productid", "productphotoid", "primary", "modifieddate") from stdin""").copyIn(unsaved, batchSize)(using ProductproductphotoRow.text) + res <- sql"""insert into production.productproductphoto("productid", "productphotoid", "primary", "modifieddate") + select * from productproductphoto_TEMP + on conflict ("productid", "productphotoid") + do update set + "primary" = EXCLUDED."primary", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table productproductphoto_TEMP;""".update.run + } yield res + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productproductphoto/ProductproductphotoRepoMock.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productproductphoto/ProductproductphotoRepoMock.scala index d16a4b9bb..f383f4b2c 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productproductphoto/ProductproductphotoRepoMock.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productproductphoto/ProductproductphotoRepoMock.scala @@ -105,4 +105,23 @@ class ProductproductphotoRepoMock(toRow: Function1[ProductproductphotoRowUnsaved unsaved } } + override def upsertBatch(unsaved: List[ProductproductphotoRow]): Stream[ConnectionIO, ProductproductphotoRow] = { + Stream.emits { + unsaved.map { row => + map += (row.compositeId -> row) + row + } + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, ProductproductphotoRow], batchSize: Int = 10000): ConnectionIO[Int] = { + unsaved.compile.toList.map { rows => + var num = 0 + rows.foreach { row => + map += (row.compositeId -> row) + num += 1 + } + num + } + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productproductphoto/ProductproductphotoRow.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productproductphoto/ProductproductphotoRow.scala index 682bb79a8..9d03a28f2 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productproductphoto/ProductproductphotoRow.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productproductphoto/ProductproductphotoRow.scala @@ -15,6 +15,7 @@ import adventureworks.public.Flag import doobie.enumerated.Nullability import doobie.postgres.Text import doobie.util.Read +import doobie.util.Write import io.circe.Decoder import io.circe.Encoder import java.sql.ResultSet @@ -69,4 +70,23 @@ object ProductproductphotoRow { sb.append(Text.DELIMETER) TypoLocalDateTime.text.unsafeEncode(row.modifieddate, sb) } + implicit lazy val write: Write[ProductproductphotoRow] = new Write[ProductproductphotoRow]( + puts = List((ProductId.put, Nullability.NoNulls), + (ProductphotoId.put, Nullability.NoNulls), + (Flag.put, Nullability.NoNulls), + (TypoLocalDateTime.put, Nullability.NoNulls)), + toList = x => List(x.productid, x.productphotoid, x.primary, x.modifieddate), + unsafeSet = (rs, i, a) => { + ProductId.put.unsafeSetNonNullable(rs, i + 0, a.productid) + ProductphotoId.put.unsafeSetNonNullable(rs, i + 1, a.productphotoid) + Flag.put.unsafeSetNonNullable(rs, i + 2, a.primary) + TypoLocalDateTime.put.unsafeSetNonNullable(rs, i + 3, a.modifieddate) + }, + unsafeUpdate = (ps, i, a) => { + ProductId.put.unsafeUpdateNonNullable(ps, i + 0, a.productid) + ProductphotoId.put.unsafeUpdateNonNullable(ps, i + 1, a.productphotoid) + Flag.put.unsafeUpdateNonNullable(ps, i + 2, a.primary) + TypoLocalDateTime.put.unsafeUpdateNonNullable(ps, i + 3, a.modifieddate) + } + ) } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productreview/ProductreviewRepo.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productreview/ProductreviewRepo.scala index 2fc839da1..605352384 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productreview/ProductreviewRepo.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productreview/ProductreviewRepo.scala @@ -30,4 +30,7 @@ trait ProductreviewRepo { def update: UpdateBuilder[ProductreviewFields, ProductreviewRow] def update(row: ProductreviewRow): ConnectionIO[Boolean] def upsert(unsaved: ProductreviewRow): ConnectionIO[ProductreviewRow] + def upsertBatch(unsaved: List[ProductreviewRow]): Stream[ConnectionIO, ProductreviewRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Stream[ConnectionIO, ProductreviewRow], batchSize: Int = 10000): ConnectionIO[Int] } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productreview/ProductreviewRepoImpl.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productreview/ProductreviewRepoImpl.scala index 9bb72aa51..e773add32 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productreview/ProductreviewRepoImpl.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productreview/ProductreviewRepoImpl.scala @@ -11,6 +11,7 @@ import adventureworks.customtypes.Defaulted import adventureworks.customtypes.TypoLocalDateTime import adventureworks.production.product.ProductId import adventureworks.public.Name +import cats.instances.list.catsStdInstancesForList import doobie.free.connection.ConnectionIO import doobie.postgres.syntax.FragmentOps import doobie.syntax.SqlInterpolator.SingleFragment.fromWrite @@ -18,6 +19,7 @@ import doobie.syntax.string.toSqlInterpolator import doobie.util.Write import doobie.util.fragment.Fragment import doobie.util.meta.Meta +import doobie.util.update.Update import fs2.Stream import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder @@ -142,4 +144,41 @@ class ProductreviewRepoImpl extends ProductreviewRepo { returning "productreviewid", "productid", "reviewername", "reviewdate"::text, "emailaddress", "rating", "comments", "modifieddate"::text """.query(using ProductreviewRow.read).unique } + override def upsertBatch(unsaved: List[ProductreviewRow]): Stream[ConnectionIO, ProductreviewRow] = { + Update[ProductreviewRow]( + s"""insert into production.productreview("productreviewid", "productid", "reviewername", "reviewdate", "emailaddress", "rating", "comments", "modifieddate") + values (?::int4,?::int4,?::varchar,?::timestamp,?,?::int4,?,?::timestamp) + on conflict ("productreviewid") + do update set + "productid" = EXCLUDED."productid", + "reviewername" = EXCLUDED."reviewername", + "reviewdate" = EXCLUDED."reviewdate", + "emailaddress" = EXCLUDED."emailaddress", + "rating" = EXCLUDED."rating", + "comments" = EXCLUDED."comments", + "modifieddate" = EXCLUDED."modifieddate" + returning "productreviewid", "productid", "reviewername", "reviewdate"::text, "emailaddress", "rating", "comments", "modifieddate"::text""" + )(using ProductreviewRow.write) + .updateManyWithGeneratedKeys[ProductreviewRow]("productreviewid", "productid", "reviewername", "reviewdate", "emailaddress", "rating", "comments", "modifieddate")(unsaved)(using catsStdInstancesForList, ProductreviewRow.read) + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, ProductreviewRow], batchSize: Int = 10000): ConnectionIO[Int] = { + for { + _ <- sql"create temporary table productreview_TEMP (like production.productreview) on commit drop".update.run + _ <- new FragmentOps(sql"""copy productreview_TEMP("productreviewid", "productid", "reviewername", "reviewdate", "emailaddress", "rating", "comments", "modifieddate") from stdin""").copyIn(unsaved, batchSize)(using ProductreviewRow.text) + res <- sql"""insert into production.productreview("productreviewid", "productid", "reviewername", "reviewdate", "emailaddress", "rating", "comments", "modifieddate") + select * from productreview_TEMP + on conflict ("productreviewid") + do update set + "productid" = EXCLUDED."productid", + "reviewername" = EXCLUDED."reviewername", + "reviewdate" = EXCLUDED."reviewdate", + "emailaddress" = EXCLUDED."emailaddress", + "rating" = EXCLUDED."rating", + "comments" = EXCLUDED."comments", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table productreview_TEMP;""".update.run + } yield res + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productreview/ProductreviewRepoMock.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productreview/ProductreviewRepoMock.scala index 46a33caf8..54780e129 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productreview/ProductreviewRepoMock.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productreview/ProductreviewRepoMock.scala @@ -105,4 +105,23 @@ class ProductreviewRepoMock(toRow: Function1[ProductreviewRowUnsaved, Productrev unsaved } } + override def upsertBatch(unsaved: List[ProductreviewRow]): Stream[ConnectionIO, ProductreviewRow] = { + Stream.emits { + unsaved.map { row => + map += (row.productreviewid -> row) + row + } + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, ProductreviewRow], batchSize: Int = 10000): ConnectionIO[Int] = { + unsaved.compile.toList.map { rows => + var num = 0 + rows.foreach { row => + map += (row.productreviewid -> row) + num += 1 + } + num + } + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productreview/ProductreviewRow.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productreview/ProductreviewRow.scala index 990385733..0fb4a1a39 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productreview/ProductreviewRow.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productreview/ProductreviewRow.scala @@ -14,6 +14,7 @@ import adventureworks.public.Name import doobie.enumerated.Nullability import doobie.postgres.Text import doobie.util.Read +import doobie.util.Write import doobie.util.meta.Meta import io.circe.Decoder import io.circe.Encoder @@ -91,4 +92,35 @@ object ProductreviewRow { sb.append(Text.DELIMETER) TypoLocalDateTime.text.unsafeEncode(row.modifieddate, sb) } + implicit lazy val write: Write[ProductreviewRow] = new Write[ProductreviewRow]( + puts = List((ProductreviewId.put, Nullability.NoNulls), + (ProductId.put, Nullability.NoNulls), + (Name.put, Nullability.NoNulls), + (TypoLocalDateTime.put, Nullability.NoNulls), + (Meta.StringMeta.put, Nullability.NoNulls), + (Meta.IntMeta.put, Nullability.NoNulls), + (Meta.StringMeta.put, Nullability.Nullable), + (TypoLocalDateTime.put, Nullability.NoNulls)), + toList = x => List(x.productreviewid, x.productid, x.reviewername, x.reviewdate, x.emailaddress, x.rating, x.comments, x.modifieddate), + unsafeSet = (rs, i, a) => { + ProductreviewId.put.unsafeSetNonNullable(rs, i + 0, a.productreviewid) + ProductId.put.unsafeSetNonNullable(rs, i + 1, a.productid) + Name.put.unsafeSetNonNullable(rs, i + 2, a.reviewername) + TypoLocalDateTime.put.unsafeSetNonNullable(rs, i + 3, a.reviewdate) + Meta.StringMeta.put.unsafeSetNonNullable(rs, i + 4, a.emailaddress) + Meta.IntMeta.put.unsafeSetNonNullable(rs, i + 5, a.rating) + Meta.StringMeta.put.unsafeSetNullable(rs, i + 6, a.comments) + TypoLocalDateTime.put.unsafeSetNonNullable(rs, i + 7, a.modifieddate) + }, + unsafeUpdate = (ps, i, a) => { + ProductreviewId.put.unsafeUpdateNonNullable(ps, i + 0, a.productreviewid) + ProductId.put.unsafeUpdateNonNullable(ps, i + 1, a.productid) + Name.put.unsafeUpdateNonNullable(ps, i + 2, a.reviewername) + TypoLocalDateTime.put.unsafeUpdateNonNullable(ps, i + 3, a.reviewdate) + Meta.StringMeta.put.unsafeUpdateNonNullable(ps, i + 4, a.emailaddress) + Meta.IntMeta.put.unsafeUpdateNonNullable(ps, i + 5, a.rating) + Meta.StringMeta.put.unsafeUpdateNullable(ps, i + 6, a.comments) + TypoLocalDateTime.put.unsafeUpdateNonNullable(ps, i + 7, a.modifieddate) + } + ) } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productsubcategory/ProductsubcategoryRepo.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productsubcategory/ProductsubcategoryRepo.scala index c5381c27a..4227314ff 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productsubcategory/ProductsubcategoryRepo.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productsubcategory/ProductsubcategoryRepo.scala @@ -30,4 +30,7 @@ trait ProductsubcategoryRepo { def update: UpdateBuilder[ProductsubcategoryFields, ProductsubcategoryRow] def update(row: ProductsubcategoryRow): ConnectionIO[Boolean] def upsert(unsaved: ProductsubcategoryRow): ConnectionIO[ProductsubcategoryRow] + def upsertBatch(unsaved: List[ProductsubcategoryRow]): Stream[ConnectionIO, ProductsubcategoryRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Stream[ConnectionIO, ProductsubcategoryRow], batchSize: Int = 10000): ConnectionIO[Int] } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productsubcategory/ProductsubcategoryRepoImpl.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productsubcategory/ProductsubcategoryRepoImpl.scala index 895d5a4b9..d54dc753a 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productsubcategory/ProductsubcategoryRepoImpl.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productsubcategory/ProductsubcategoryRepoImpl.scala @@ -12,12 +12,14 @@ import adventureworks.customtypes.TypoLocalDateTime import adventureworks.customtypes.TypoUUID import adventureworks.production.productcategory.ProductcategoryId import adventureworks.public.Name +import cats.instances.list.catsStdInstancesForList import doobie.free.connection.ConnectionIO import doobie.postgres.syntax.FragmentOps import doobie.syntax.SqlInterpolator.SingleFragment.fromWrite import doobie.syntax.string.toSqlInterpolator import doobie.util.Write import doobie.util.fragment.Fragment +import doobie.util.update.Update import fs2.Stream import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder @@ -130,4 +132,35 @@ class ProductsubcategoryRepoImpl extends ProductsubcategoryRepo { returning "productsubcategoryid", "productcategoryid", "name", "rowguid", "modifieddate"::text """.query(using ProductsubcategoryRow.read).unique } + override def upsertBatch(unsaved: List[ProductsubcategoryRow]): Stream[ConnectionIO, ProductsubcategoryRow] = { + Update[ProductsubcategoryRow]( + s"""insert into production.productsubcategory("productsubcategoryid", "productcategoryid", "name", "rowguid", "modifieddate") + values (?::int4,?::int4,?::varchar,?::uuid,?::timestamp) + on conflict ("productsubcategoryid") + do update set + "productcategoryid" = EXCLUDED."productcategoryid", + "name" = EXCLUDED."name", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + returning "productsubcategoryid", "productcategoryid", "name", "rowguid", "modifieddate"::text""" + )(using ProductsubcategoryRow.write) + .updateManyWithGeneratedKeys[ProductsubcategoryRow]("productsubcategoryid", "productcategoryid", "name", "rowguid", "modifieddate")(unsaved)(using catsStdInstancesForList, ProductsubcategoryRow.read) + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, ProductsubcategoryRow], batchSize: Int = 10000): ConnectionIO[Int] = { + for { + _ <- sql"create temporary table productsubcategory_TEMP (like production.productsubcategory) on commit drop".update.run + _ <- new FragmentOps(sql"""copy productsubcategory_TEMP("productsubcategoryid", "productcategoryid", "name", "rowguid", "modifieddate") from stdin""").copyIn(unsaved, batchSize)(using ProductsubcategoryRow.text) + res <- sql"""insert into production.productsubcategory("productsubcategoryid", "productcategoryid", "name", "rowguid", "modifieddate") + select * from productsubcategory_TEMP + on conflict ("productsubcategoryid") + do update set + "productcategoryid" = EXCLUDED."productcategoryid", + "name" = EXCLUDED."name", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table productsubcategory_TEMP;""".update.run + } yield res + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productsubcategory/ProductsubcategoryRepoMock.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productsubcategory/ProductsubcategoryRepoMock.scala index d3bf0c605..2718ff9f0 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productsubcategory/ProductsubcategoryRepoMock.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productsubcategory/ProductsubcategoryRepoMock.scala @@ -105,4 +105,23 @@ class ProductsubcategoryRepoMock(toRow: Function1[ProductsubcategoryRowUnsaved, unsaved } } + override def upsertBatch(unsaved: List[ProductsubcategoryRow]): Stream[ConnectionIO, ProductsubcategoryRow] = { + Stream.emits { + unsaved.map { row => + map += (row.productsubcategoryid -> row) + row + } + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, ProductsubcategoryRow], batchSize: Int = 10000): ConnectionIO[Int] = { + unsaved.compile.toList.map { rows => + var num = 0 + rows.foreach { row => + map += (row.productsubcategoryid -> row) + num += 1 + } + num + } + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productsubcategory/ProductsubcategoryRow.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productsubcategory/ProductsubcategoryRow.scala index 73c98d42f..ad0981952 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productsubcategory/ProductsubcategoryRow.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productsubcategory/ProductsubcategoryRow.scala @@ -15,6 +15,7 @@ import adventureworks.public.Name import doobie.enumerated.Nullability import doobie.postgres.Text import doobie.util.Read +import doobie.util.Write import io.circe.Decoder import io.circe.Encoder import java.sql.ResultSet @@ -71,4 +72,26 @@ object ProductsubcategoryRow { sb.append(Text.DELIMETER) TypoLocalDateTime.text.unsafeEncode(row.modifieddate, sb) } + implicit lazy val write: Write[ProductsubcategoryRow] = new Write[ProductsubcategoryRow]( + puts = List((ProductsubcategoryId.put, Nullability.NoNulls), + (ProductcategoryId.put, Nullability.NoNulls), + (Name.put, Nullability.NoNulls), + (TypoUUID.put, Nullability.NoNulls), + (TypoLocalDateTime.put, Nullability.NoNulls)), + toList = x => List(x.productsubcategoryid, x.productcategoryid, x.name, x.rowguid, x.modifieddate), + unsafeSet = (rs, i, a) => { + ProductsubcategoryId.put.unsafeSetNonNullable(rs, i + 0, a.productsubcategoryid) + ProductcategoryId.put.unsafeSetNonNullable(rs, i + 1, a.productcategoryid) + Name.put.unsafeSetNonNullable(rs, i + 2, a.name) + TypoUUID.put.unsafeSetNonNullable(rs, i + 3, a.rowguid) + TypoLocalDateTime.put.unsafeSetNonNullable(rs, i + 4, a.modifieddate) + }, + unsafeUpdate = (ps, i, a) => { + ProductsubcategoryId.put.unsafeUpdateNonNullable(ps, i + 0, a.productsubcategoryid) + ProductcategoryId.put.unsafeUpdateNonNullable(ps, i + 1, a.productcategoryid) + Name.put.unsafeUpdateNonNullable(ps, i + 2, a.name) + TypoUUID.put.unsafeUpdateNonNullable(ps, i + 3, a.rowguid) + TypoLocalDateTime.put.unsafeUpdateNonNullable(ps, i + 4, a.modifieddate) + } + ) } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/scrapreason/ScrapreasonRepo.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/scrapreason/ScrapreasonRepo.scala index 63e63855f..b9589f1f7 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/scrapreason/ScrapreasonRepo.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/scrapreason/ScrapreasonRepo.scala @@ -30,4 +30,7 @@ trait ScrapreasonRepo { def update: UpdateBuilder[ScrapreasonFields, ScrapreasonRow] def update(row: ScrapreasonRow): ConnectionIO[Boolean] def upsert(unsaved: ScrapreasonRow): ConnectionIO[ScrapreasonRow] + def upsertBatch(unsaved: List[ScrapreasonRow]): Stream[ConnectionIO, ScrapreasonRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Stream[ConnectionIO, ScrapreasonRow], batchSize: Int = 10000): ConnectionIO[Int] } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/scrapreason/ScrapreasonRepoImpl.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/scrapreason/ScrapreasonRepoImpl.scala index 08dd8d1f5..23015215e 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/scrapreason/ScrapreasonRepoImpl.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/scrapreason/ScrapreasonRepoImpl.scala @@ -10,12 +10,14 @@ package scrapreason import adventureworks.customtypes.Defaulted import adventureworks.customtypes.TypoLocalDateTime import adventureworks.public.Name +import cats.instances.list.catsStdInstancesForList import doobie.free.connection.ConnectionIO import doobie.postgres.syntax.FragmentOps import doobie.syntax.SqlInterpolator.SingleFragment.fromWrite import doobie.syntax.string.toSqlInterpolator import doobie.util.Write import doobie.util.fragment.Fragment +import doobie.util.update.Update import fs2.Stream import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder @@ -117,4 +119,31 @@ class ScrapreasonRepoImpl extends ScrapreasonRepo { returning "scrapreasonid", "name", "modifieddate"::text """.query(using ScrapreasonRow.read).unique } + override def upsertBatch(unsaved: List[ScrapreasonRow]): Stream[ConnectionIO, ScrapreasonRow] = { + Update[ScrapreasonRow]( + s"""insert into production.scrapreason("scrapreasonid", "name", "modifieddate") + values (?::int4,?::varchar,?::timestamp) + on conflict ("scrapreasonid") + do update set + "name" = EXCLUDED."name", + "modifieddate" = EXCLUDED."modifieddate" + returning "scrapreasonid", "name", "modifieddate"::text""" + )(using ScrapreasonRow.write) + .updateManyWithGeneratedKeys[ScrapreasonRow]("scrapreasonid", "name", "modifieddate")(unsaved)(using catsStdInstancesForList, ScrapreasonRow.read) + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, ScrapreasonRow], batchSize: Int = 10000): ConnectionIO[Int] = { + for { + _ <- sql"create temporary table scrapreason_TEMP (like production.scrapreason) on commit drop".update.run + _ <- new FragmentOps(sql"""copy scrapreason_TEMP("scrapreasonid", "name", "modifieddate") from stdin""").copyIn(unsaved, batchSize)(using ScrapreasonRow.text) + res <- sql"""insert into production.scrapreason("scrapreasonid", "name", "modifieddate") + select * from scrapreason_TEMP + on conflict ("scrapreasonid") + do update set + "name" = EXCLUDED."name", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table scrapreason_TEMP;""".update.run + } yield res + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/scrapreason/ScrapreasonRepoMock.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/scrapreason/ScrapreasonRepoMock.scala index 327f41aae..3576e6fe0 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/scrapreason/ScrapreasonRepoMock.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/scrapreason/ScrapreasonRepoMock.scala @@ -105,4 +105,23 @@ class ScrapreasonRepoMock(toRow: Function1[ScrapreasonRowUnsaved, ScrapreasonRow unsaved } } + override def upsertBatch(unsaved: List[ScrapreasonRow]): Stream[ConnectionIO, ScrapreasonRow] = { + Stream.emits { + unsaved.map { row => + map += (row.scrapreasonid -> row) + row + } + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, ScrapreasonRow], batchSize: Int = 10000): ConnectionIO[Int] = { + unsaved.compile.toList.map { rows => + var num = 0 + rows.foreach { row => + map += (row.scrapreasonid -> row) + num += 1 + } + num + } + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/scrapreason/ScrapreasonRow.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/scrapreason/ScrapreasonRow.scala index bed531300..f8af862fc 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/scrapreason/ScrapreasonRow.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/scrapreason/ScrapreasonRow.scala @@ -13,6 +13,7 @@ import adventureworks.public.Name import doobie.enumerated.Nullability import doobie.postgres.Text import doobie.util.Read +import doobie.util.Write import io.circe.Decoder import io.circe.Encoder import java.sql.ResultSet @@ -56,4 +57,20 @@ object ScrapreasonRow { sb.append(Text.DELIMETER) TypoLocalDateTime.text.unsafeEncode(row.modifieddate, sb) } + implicit lazy val write: Write[ScrapreasonRow] = new Write[ScrapreasonRow]( + puts = List((ScrapreasonId.put, Nullability.NoNulls), + (Name.put, Nullability.NoNulls), + (TypoLocalDateTime.put, Nullability.NoNulls)), + toList = x => List(x.scrapreasonid, x.name, x.modifieddate), + unsafeSet = (rs, i, a) => { + ScrapreasonId.put.unsafeSetNonNullable(rs, i + 0, a.scrapreasonid) + Name.put.unsafeSetNonNullable(rs, i + 1, a.name) + TypoLocalDateTime.put.unsafeSetNonNullable(rs, i + 2, a.modifieddate) + }, + unsafeUpdate = (ps, i, a) => { + ScrapreasonId.put.unsafeUpdateNonNullable(ps, i + 0, a.scrapreasonid) + Name.put.unsafeUpdateNonNullable(ps, i + 1, a.name) + TypoLocalDateTime.put.unsafeUpdateNonNullable(ps, i + 2, a.modifieddate) + } + ) } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/transactionhistory/TransactionhistoryRepo.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/transactionhistory/TransactionhistoryRepo.scala index 1a2dd6821..22c9f5fd7 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/transactionhistory/TransactionhistoryRepo.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/transactionhistory/TransactionhistoryRepo.scala @@ -30,4 +30,7 @@ trait TransactionhistoryRepo { def update: UpdateBuilder[TransactionhistoryFields, TransactionhistoryRow] def update(row: TransactionhistoryRow): ConnectionIO[Boolean] def upsert(unsaved: TransactionhistoryRow): ConnectionIO[TransactionhistoryRow] + def upsertBatch(unsaved: List[TransactionhistoryRow]): Stream[ConnectionIO, TransactionhistoryRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Stream[ConnectionIO, TransactionhistoryRow], batchSize: Int = 10000): ConnectionIO[Int] } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/transactionhistory/TransactionhistoryRepoImpl.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/transactionhistory/TransactionhistoryRepoImpl.scala index 83252db02..ac1291791 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/transactionhistory/TransactionhistoryRepoImpl.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/transactionhistory/TransactionhistoryRepoImpl.scala @@ -10,6 +10,7 @@ package transactionhistory import adventureworks.customtypes.Defaulted import adventureworks.customtypes.TypoLocalDateTime import adventureworks.production.product.ProductId +import cats.instances.list.catsStdInstancesForList import doobie.free.connection.ConnectionIO import doobie.postgres.syntax.FragmentOps import doobie.syntax.SqlInterpolator.SingleFragment.fromWrite @@ -17,6 +18,7 @@ import doobie.syntax.string.toSqlInterpolator import doobie.util.Write import doobie.util.fragment.Fragment import doobie.util.meta.Meta +import doobie.util.update.Update import fs2.Stream import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder @@ -148,4 +150,43 @@ class TransactionhistoryRepoImpl extends TransactionhistoryRepo { returning "transactionid", "productid", "referenceorderid", "referenceorderlineid", "transactiondate"::text, "transactiontype", "quantity", "actualcost", "modifieddate"::text """.query(using TransactionhistoryRow.read).unique } + override def upsertBatch(unsaved: List[TransactionhistoryRow]): Stream[ConnectionIO, TransactionhistoryRow] = { + Update[TransactionhistoryRow]( + s"""insert into production.transactionhistory("transactionid", "productid", "referenceorderid", "referenceorderlineid", "transactiondate", "transactiontype", "quantity", "actualcost", "modifieddate") + values (?::int4,?::int4,?::int4,?::int4,?::timestamp,?::bpchar,?::int4,?::numeric,?::timestamp) + on conflict ("transactionid") + do update set + "productid" = EXCLUDED."productid", + "referenceorderid" = EXCLUDED."referenceorderid", + "referenceorderlineid" = EXCLUDED."referenceorderlineid", + "transactiondate" = EXCLUDED."transactiondate", + "transactiontype" = EXCLUDED."transactiontype", + "quantity" = EXCLUDED."quantity", + "actualcost" = EXCLUDED."actualcost", + "modifieddate" = EXCLUDED."modifieddate" + returning "transactionid", "productid", "referenceorderid", "referenceorderlineid", "transactiondate"::text, "transactiontype", "quantity", "actualcost", "modifieddate"::text""" + )(using TransactionhistoryRow.write) + .updateManyWithGeneratedKeys[TransactionhistoryRow]("transactionid", "productid", "referenceorderid", "referenceorderlineid", "transactiondate", "transactiontype", "quantity", "actualcost", "modifieddate")(unsaved)(using catsStdInstancesForList, TransactionhistoryRow.read) + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, TransactionhistoryRow], batchSize: Int = 10000): ConnectionIO[Int] = { + for { + _ <- sql"create temporary table transactionhistory_TEMP (like production.transactionhistory) on commit drop".update.run + _ <- new FragmentOps(sql"""copy transactionhistory_TEMP("transactionid", "productid", "referenceorderid", "referenceorderlineid", "transactiondate", "transactiontype", "quantity", "actualcost", "modifieddate") from stdin""").copyIn(unsaved, batchSize)(using TransactionhistoryRow.text) + res <- sql"""insert into production.transactionhistory("transactionid", "productid", "referenceorderid", "referenceorderlineid", "transactiondate", "transactiontype", "quantity", "actualcost", "modifieddate") + select * from transactionhistory_TEMP + on conflict ("transactionid") + do update set + "productid" = EXCLUDED."productid", + "referenceorderid" = EXCLUDED."referenceorderid", + "referenceorderlineid" = EXCLUDED."referenceorderlineid", + "transactiondate" = EXCLUDED."transactiondate", + "transactiontype" = EXCLUDED."transactiontype", + "quantity" = EXCLUDED."quantity", + "actualcost" = EXCLUDED."actualcost", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table transactionhistory_TEMP;""".update.run + } yield res + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/transactionhistory/TransactionhistoryRepoMock.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/transactionhistory/TransactionhistoryRepoMock.scala index ed72c28af..7e426383e 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/transactionhistory/TransactionhistoryRepoMock.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/transactionhistory/TransactionhistoryRepoMock.scala @@ -105,4 +105,23 @@ class TransactionhistoryRepoMock(toRow: Function1[TransactionhistoryRowUnsaved, unsaved } } + override def upsertBatch(unsaved: List[TransactionhistoryRow]): Stream[ConnectionIO, TransactionhistoryRow] = { + Stream.emits { + unsaved.map { row => + map += (row.transactionid -> row) + row + } + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, TransactionhistoryRow], batchSize: Int = 10000): ConnectionIO[Int] = { + unsaved.compile.toList.map { rows => + var num = 0 + rows.foreach { row => + map += (row.transactionid -> row) + num += 1 + } + num + } + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/transactionhistory/TransactionhistoryRow.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/transactionhistory/TransactionhistoryRow.scala index c33f24961..6fab4ee6b 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/transactionhistory/TransactionhistoryRow.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/transactionhistory/TransactionhistoryRow.scala @@ -13,6 +13,7 @@ import adventureworks.production.product.ProductId import doobie.enumerated.Nullability import doobie.postgres.Text import doobie.util.Read +import doobie.util.Write import doobie.util.meta.Meta import io.circe.Decoder import io.circe.Encoder @@ -97,4 +98,38 @@ object TransactionhistoryRow { sb.append(Text.DELIMETER) TypoLocalDateTime.text.unsafeEncode(row.modifieddate, sb) } + implicit lazy val write: Write[TransactionhistoryRow] = new Write[TransactionhistoryRow]( + puts = List((TransactionhistoryId.put, Nullability.NoNulls), + (ProductId.put, Nullability.NoNulls), + (Meta.IntMeta.put, Nullability.NoNulls), + (Meta.IntMeta.put, Nullability.NoNulls), + (TypoLocalDateTime.put, Nullability.NoNulls), + (Meta.StringMeta.put, Nullability.NoNulls), + (Meta.IntMeta.put, Nullability.NoNulls), + (Meta.ScalaBigDecimalMeta.put, Nullability.NoNulls), + (TypoLocalDateTime.put, Nullability.NoNulls)), + toList = x => List(x.transactionid, x.productid, x.referenceorderid, x.referenceorderlineid, x.transactiondate, x.transactiontype, x.quantity, x.actualcost, x.modifieddate), + unsafeSet = (rs, i, a) => { + TransactionhistoryId.put.unsafeSetNonNullable(rs, i + 0, a.transactionid) + ProductId.put.unsafeSetNonNullable(rs, i + 1, a.productid) + Meta.IntMeta.put.unsafeSetNonNullable(rs, i + 2, a.referenceorderid) + Meta.IntMeta.put.unsafeSetNonNullable(rs, i + 3, a.referenceorderlineid) + TypoLocalDateTime.put.unsafeSetNonNullable(rs, i + 4, a.transactiondate) + Meta.StringMeta.put.unsafeSetNonNullable(rs, i + 5, a.transactiontype) + Meta.IntMeta.put.unsafeSetNonNullable(rs, i + 6, a.quantity) + Meta.ScalaBigDecimalMeta.put.unsafeSetNonNullable(rs, i + 7, a.actualcost) + TypoLocalDateTime.put.unsafeSetNonNullable(rs, i + 8, a.modifieddate) + }, + unsafeUpdate = (ps, i, a) => { + TransactionhistoryId.put.unsafeUpdateNonNullable(ps, i + 0, a.transactionid) + ProductId.put.unsafeUpdateNonNullable(ps, i + 1, a.productid) + Meta.IntMeta.put.unsafeUpdateNonNullable(ps, i + 2, a.referenceorderid) + Meta.IntMeta.put.unsafeUpdateNonNullable(ps, i + 3, a.referenceorderlineid) + TypoLocalDateTime.put.unsafeUpdateNonNullable(ps, i + 4, a.transactiondate) + Meta.StringMeta.put.unsafeUpdateNonNullable(ps, i + 5, a.transactiontype) + Meta.IntMeta.put.unsafeUpdateNonNullable(ps, i + 6, a.quantity) + Meta.ScalaBigDecimalMeta.put.unsafeUpdateNonNullable(ps, i + 7, a.actualcost) + TypoLocalDateTime.put.unsafeUpdateNonNullable(ps, i + 8, a.modifieddate) + } + ) } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/transactionhistoryarchive/TransactionhistoryarchiveRepo.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/transactionhistoryarchive/TransactionhistoryarchiveRepo.scala index 13eac4daa..fdb69f9ef 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/transactionhistoryarchive/TransactionhistoryarchiveRepo.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/transactionhistoryarchive/TransactionhistoryarchiveRepo.scala @@ -30,4 +30,7 @@ trait TransactionhistoryarchiveRepo { def update: UpdateBuilder[TransactionhistoryarchiveFields, TransactionhistoryarchiveRow] def update(row: TransactionhistoryarchiveRow): ConnectionIO[Boolean] def upsert(unsaved: TransactionhistoryarchiveRow): ConnectionIO[TransactionhistoryarchiveRow] + def upsertBatch(unsaved: List[TransactionhistoryarchiveRow]): Stream[ConnectionIO, TransactionhistoryarchiveRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Stream[ConnectionIO, TransactionhistoryarchiveRow], batchSize: Int = 10000): ConnectionIO[Int] } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/transactionhistoryarchive/TransactionhistoryarchiveRepoImpl.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/transactionhistoryarchive/TransactionhistoryarchiveRepoImpl.scala index c79c17c4f..5b67db216 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/transactionhistoryarchive/TransactionhistoryarchiveRepoImpl.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/transactionhistoryarchive/TransactionhistoryarchiveRepoImpl.scala @@ -9,6 +9,7 @@ package transactionhistoryarchive import adventureworks.customtypes.Defaulted import adventureworks.customtypes.TypoLocalDateTime +import cats.instances.list.catsStdInstancesForList import doobie.free.connection.ConnectionIO import doobie.postgres.syntax.FragmentOps import doobie.syntax.SqlInterpolator.SingleFragment.fromWrite @@ -16,6 +17,7 @@ import doobie.syntax.string.toSqlInterpolator import doobie.util.Write import doobie.util.fragment.Fragment import doobie.util.meta.Meta +import doobie.util.update.Update import fs2.Stream import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder @@ -144,4 +146,43 @@ class TransactionhistoryarchiveRepoImpl extends TransactionhistoryarchiveRepo { returning "transactionid", "productid", "referenceorderid", "referenceorderlineid", "transactiondate"::text, "transactiontype", "quantity", "actualcost", "modifieddate"::text """.query(using TransactionhistoryarchiveRow.read).unique } + override def upsertBatch(unsaved: List[TransactionhistoryarchiveRow]): Stream[ConnectionIO, TransactionhistoryarchiveRow] = { + Update[TransactionhistoryarchiveRow]( + s"""insert into production.transactionhistoryarchive("transactionid", "productid", "referenceorderid", "referenceorderlineid", "transactiondate", "transactiontype", "quantity", "actualcost", "modifieddate") + values (?::int4,?::int4,?::int4,?::int4,?::timestamp,?::bpchar,?::int4,?::numeric,?::timestamp) + on conflict ("transactionid") + do update set + "productid" = EXCLUDED."productid", + "referenceorderid" = EXCLUDED."referenceorderid", + "referenceorderlineid" = EXCLUDED."referenceorderlineid", + "transactiondate" = EXCLUDED."transactiondate", + "transactiontype" = EXCLUDED."transactiontype", + "quantity" = EXCLUDED."quantity", + "actualcost" = EXCLUDED."actualcost", + "modifieddate" = EXCLUDED."modifieddate" + returning "transactionid", "productid", "referenceorderid", "referenceorderlineid", "transactiondate"::text, "transactiontype", "quantity", "actualcost", "modifieddate"::text""" + )(using TransactionhistoryarchiveRow.write) + .updateManyWithGeneratedKeys[TransactionhistoryarchiveRow]("transactionid", "productid", "referenceorderid", "referenceorderlineid", "transactiondate", "transactiontype", "quantity", "actualcost", "modifieddate")(unsaved)(using catsStdInstancesForList, TransactionhistoryarchiveRow.read) + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, TransactionhistoryarchiveRow], batchSize: Int = 10000): ConnectionIO[Int] = { + for { + _ <- sql"create temporary table transactionhistoryarchive_TEMP (like production.transactionhistoryarchive) on commit drop".update.run + _ <- new FragmentOps(sql"""copy transactionhistoryarchive_TEMP("transactionid", "productid", "referenceorderid", "referenceorderlineid", "transactiondate", "transactiontype", "quantity", "actualcost", "modifieddate") from stdin""").copyIn(unsaved, batchSize)(using TransactionhistoryarchiveRow.text) + res <- sql"""insert into production.transactionhistoryarchive("transactionid", "productid", "referenceorderid", "referenceorderlineid", "transactiondate", "transactiontype", "quantity", "actualcost", "modifieddate") + select * from transactionhistoryarchive_TEMP + on conflict ("transactionid") + do update set + "productid" = EXCLUDED."productid", + "referenceorderid" = EXCLUDED."referenceorderid", + "referenceorderlineid" = EXCLUDED."referenceorderlineid", + "transactiondate" = EXCLUDED."transactiondate", + "transactiontype" = EXCLUDED."transactiontype", + "quantity" = EXCLUDED."quantity", + "actualcost" = EXCLUDED."actualcost", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table transactionhistoryarchive_TEMP;""".update.run + } yield res + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/transactionhistoryarchive/TransactionhistoryarchiveRepoMock.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/transactionhistoryarchive/TransactionhistoryarchiveRepoMock.scala index 3114db348..fda35a872 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/transactionhistoryarchive/TransactionhistoryarchiveRepoMock.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/transactionhistoryarchive/TransactionhistoryarchiveRepoMock.scala @@ -105,4 +105,23 @@ class TransactionhistoryarchiveRepoMock(toRow: Function1[Transactionhistoryarchi unsaved } } + override def upsertBatch(unsaved: List[TransactionhistoryarchiveRow]): Stream[ConnectionIO, TransactionhistoryarchiveRow] = { + Stream.emits { + unsaved.map { row => + map += (row.transactionid -> row) + row + } + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, TransactionhistoryarchiveRow], batchSize: Int = 10000): ConnectionIO[Int] = { + unsaved.compile.toList.map { rows => + var num = 0 + rows.foreach { row => + map += (row.transactionid -> row) + num += 1 + } + num + } + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/transactionhistoryarchive/TransactionhistoryarchiveRow.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/transactionhistoryarchive/TransactionhistoryarchiveRow.scala index a468b213a..8c7c8a74c 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/transactionhistoryarchive/TransactionhistoryarchiveRow.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/transactionhistoryarchive/TransactionhistoryarchiveRow.scala @@ -12,6 +12,7 @@ import adventureworks.customtypes.TypoLocalDateTime import doobie.enumerated.Nullability import doobie.postgres.Text import doobie.util.Read +import doobie.util.Write import doobie.util.meta.Meta import io.circe.Decoder import io.circe.Encoder @@ -94,4 +95,38 @@ object TransactionhistoryarchiveRow { sb.append(Text.DELIMETER) TypoLocalDateTime.text.unsafeEncode(row.modifieddate, sb) } + implicit lazy val write: Write[TransactionhistoryarchiveRow] = new Write[TransactionhistoryarchiveRow]( + puts = List((TransactionhistoryarchiveId.put, Nullability.NoNulls), + (Meta.IntMeta.put, Nullability.NoNulls), + (Meta.IntMeta.put, Nullability.NoNulls), + (Meta.IntMeta.put, Nullability.NoNulls), + (TypoLocalDateTime.put, Nullability.NoNulls), + (Meta.StringMeta.put, Nullability.NoNulls), + (Meta.IntMeta.put, Nullability.NoNulls), + (Meta.ScalaBigDecimalMeta.put, Nullability.NoNulls), + (TypoLocalDateTime.put, Nullability.NoNulls)), + toList = x => List(x.transactionid, x.productid, x.referenceorderid, x.referenceorderlineid, x.transactiondate, x.transactiontype, x.quantity, x.actualcost, x.modifieddate), + unsafeSet = (rs, i, a) => { + TransactionhistoryarchiveId.put.unsafeSetNonNullable(rs, i + 0, a.transactionid) + Meta.IntMeta.put.unsafeSetNonNullable(rs, i + 1, a.productid) + Meta.IntMeta.put.unsafeSetNonNullable(rs, i + 2, a.referenceorderid) + Meta.IntMeta.put.unsafeSetNonNullable(rs, i + 3, a.referenceorderlineid) + TypoLocalDateTime.put.unsafeSetNonNullable(rs, i + 4, a.transactiondate) + Meta.StringMeta.put.unsafeSetNonNullable(rs, i + 5, a.transactiontype) + Meta.IntMeta.put.unsafeSetNonNullable(rs, i + 6, a.quantity) + Meta.ScalaBigDecimalMeta.put.unsafeSetNonNullable(rs, i + 7, a.actualcost) + TypoLocalDateTime.put.unsafeSetNonNullable(rs, i + 8, a.modifieddate) + }, + unsafeUpdate = (ps, i, a) => { + TransactionhistoryarchiveId.put.unsafeUpdateNonNullable(ps, i + 0, a.transactionid) + Meta.IntMeta.put.unsafeUpdateNonNullable(ps, i + 1, a.productid) + Meta.IntMeta.put.unsafeUpdateNonNullable(ps, i + 2, a.referenceorderid) + Meta.IntMeta.put.unsafeUpdateNonNullable(ps, i + 3, a.referenceorderlineid) + TypoLocalDateTime.put.unsafeUpdateNonNullable(ps, i + 4, a.transactiondate) + Meta.StringMeta.put.unsafeUpdateNonNullable(ps, i + 5, a.transactiontype) + Meta.IntMeta.put.unsafeUpdateNonNullable(ps, i + 6, a.quantity) + Meta.ScalaBigDecimalMeta.put.unsafeUpdateNonNullable(ps, i + 7, a.actualcost) + TypoLocalDateTime.put.unsafeUpdateNonNullable(ps, i + 8, a.modifieddate) + } + ) } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/unitmeasure/UnitmeasureRepo.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/unitmeasure/UnitmeasureRepo.scala index fc98c1f28..01c49bbe2 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/unitmeasure/UnitmeasureRepo.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/unitmeasure/UnitmeasureRepo.scala @@ -30,4 +30,7 @@ trait UnitmeasureRepo { def update: UpdateBuilder[UnitmeasureFields, UnitmeasureRow] def update(row: UnitmeasureRow): ConnectionIO[Boolean] def upsert(unsaved: UnitmeasureRow): ConnectionIO[UnitmeasureRow] + def upsertBatch(unsaved: List[UnitmeasureRow]): Stream[ConnectionIO, UnitmeasureRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Stream[ConnectionIO, UnitmeasureRow], batchSize: Int = 10000): ConnectionIO[Int] } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/unitmeasure/UnitmeasureRepoImpl.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/unitmeasure/UnitmeasureRepoImpl.scala index 4ac62d7c7..92f8198c2 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/unitmeasure/UnitmeasureRepoImpl.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/unitmeasure/UnitmeasureRepoImpl.scala @@ -10,12 +10,14 @@ package unitmeasure import adventureworks.customtypes.Defaulted import adventureworks.customtypes.TypoLocalDateTime import adventureworks.public.Name +import cats.instances.list.catsStdInstancesForList import doobie.free.connection.ConnectionIO import doobie.postgres.syntax.FragmentOps import doobie.syntax.SqlInterpolator.SingleFragment.fromWrite import doobie.syntax.string.toSqlInterpolator import doobie.util.Write import doobie.util.fragment.Fragment +import doobie.util.update.Update import fs2.Stream import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder @@ -114,4 +116,31 @@ class UnitmeasureRepoImpl extends UnitmeasureRepo { returning "unitmeasurecode", "name", "modifieddate"::text """.query(using UnitmeasureRow.read).unique } + override def upsertBatch(unsaved: List[UnitmeasureRow]): Stream[ConnectionIO, UnitmeasureRow] = { + Update[UnitmeasureRow]( + s"""insert into production.unitmeasure("unitmeasurecode", "name", "modifieddate") + values (?::bpchar,?::varchar,?::timestamp) + on conflict ("unitmeasurecode") + do update set + "name" = EXCLUDED."name", + "modifieddate" = EXCLUDED."modifieddate" + returning "unitmeasurecode", "name", "modifieddate"::text""" + )(using UnitmeasureRow.write) + .updateManyWithGeneratedKeys[UnitmeasureRow]("unitmeasurecode", "name", "modifieddate")(unsaved)(using catsStdInstancesForList, UnitmeasureRow.read) + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, UnitmeasureRow], batchSize: Int = 10000): ConnectionIO[Int] = { + for { + _ <- sql"create temporary table unitmeasure_TEMP (like production.unitmeasure) on commit drop".update.run + _ <- new FragmentOps(sql"""copy unitmeasure_TEMP("unitmeasurecode", "name", "modifieddate") from stdin""").copyIn(unsaved, batchSize)(using UnitmeasureRow.text) + res <- sql"""insert into production.unitmeasure("unitmeasurecode", "name", "modifieddate") + select * from unitmeasure_TEMP + on conflict ("unitmeasurecode") + do update set + "name" = EXCLUDED."name", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table unitmeasure_TEMP;""".update.run + } yield res + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/unitmeasure/UnitmeasureRepoMock.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/unitmeasure/UnitmeasureRepoMock.scala index 9d6ff9e62..6bbf56384 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/unitmeasure/UnitmeasureRepoMock.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/unitmeasure/UnitmeasureRepoMock.scala @@ -105,4 +105,23 @@ class UnitmeasureRepoMock(toRow: Function1[UnitmeasureRowUnsaved, UnitmeasureRow unsaved } } + override def upsertBatch(unsaved: List[UnitmeasureRow]): Stream[ConnectionIO, UnitmeasureRow] = { + Stream.emits { + unsaved.map { row => + map += (row.unitmeasurecode -> row) + row + } + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, UnitmeasureRow], batchSize: Int = 10000): ConnectionIO[Int] = { + unsaved.compile.toList.map { rows => + var num = 0 + rows.foreach { row => + map += (row.unitmeasurecode -> row) + num += 1 + } + num + } + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/unitmeasure/UnitmeasureRow.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/unitmeasure/UnitmeasureRow.scala index 334faa1a8..34a631e61 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/unitmeasure/UnitmeasureRow.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/unitmeasure/UnitmeasureRow.scala @@ -13,6 +13,7 @@ import adventureworks.public.Name import doobie.enumerated.Nullability import doobie.postgres.Text import doobie.util.Read +import doobie.util.Write import io.circe.Decoder import io.circe.Encoder import java.sql.ResultSet @@ -55,4 +56,20 @@ object UnitmeasureRow { sb.append(Text.DELIMETER) TypoLocalDateTime.text.unsafeEncode(row.modifieddate, sb) } + implicit lazy val write: Write[UnitmeasureRow] = new Write[UnitmeasureRow]( + puts = List((UnitmeasureId.put, Nullability.NoNulls), + (Name.put, Nullability.NoNulls), + (TypoLocalDateTime.put, Nullability.NoNulls)), + toList = x => List(x.unitmeasurecode, x.name, x.modifieddate), + unsafeSet = (rs, i, a) => { + UnitmeasureId.put.unsafeSetNonNullable(rs, i + 0, a.unitmeasurecode) + Name.put.unsafeSetNonNullable(rs, i + 1, a.name) + TypoLocalDateTime.put.unsafeSetNonNullable(rs, i + 2, a.modifieddate) + }, + unsafeUpdate = (ps, i, a) => { + UnitmeasureId.put.unsafeUpdateNonNullable(ps, i + 0, a.unitmeasurecode) + Name.put.unsafeUpdateNonNullable(ps, i + 1, a.name) + TypoLocalDateTime.put.unsafeUpdateNonNullable(ps, i + 2, a.modifieddate) + } + ) } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/workorder/WorkorderRepo.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/workorder/WorkorderRepo.scala index 1a92eb4ba..ac6b353b0 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/workorder/WorkorderRepo.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/workorder/WorkorderRepo.scala @@ -30,4 +30,7 @@ trait WorkorderRepo { def update: UpdateBuilder[WorkorderFields, WorkorderRow] def update(row: WorkorderRow): ConnectionIO[Boolean] def upsert(unsaved: WorkorderRow): ConnectionIO[WorkorderRow] + def upsertBatch(unsaved: List[WorkorderRow]): Stream[ConnectionIO, WorkorderRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Stream[ConnectionIO, WorkorderRow], batchSize: Int = 10000): ConnectionIO[Int] } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/workorder/WorkorderRepoImpl.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/workorder/WorkorderRepoImpl.scala index 81dd79548..0dc8211fc 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/workorder/WorkorderRepoImpl.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/workorder/WorkorderRepoImpl.scala @@ -12,6 +12,7 @@ import adventureworks.customtypes.TypoLocalDateTime import adventureworks.customtypes.TypoShort import adventureworks.production.product.ProductId import adventureworks.production.scrapreason.ScrapreasonId +import cats.instances.list.catsStdInstancesForList import doobie.free.connection.ConnectionIO import doobie.postgres.syntax.FragmentOps import doobie.syntax.SqlInterpolator.SingleFragment.fromWrite @@ -19,6 +20,7 @@ import doobie.syntax.string.toSqlInterpolator import doobie.util.Write import doobie.util.fragment.Fragment import doobie.util.meta.Meta +import doobie.util.update.Update import fs2.Stream import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder @@ -144,4 +146,43 @@ class WorkorderRepoImpl extends WorkorderRepo { returning "workorderid", "productid", "orderqty", "scrappedqty", "startdate"::text, "enddate"::text, "duedate"::text, "scrapreasonid", "modifieddate"::text """.query(using WorkorderRow.read).unique } + override def upsertBatch(unsaved: List[WorkorderRow]): Stream[ConnectionIO, WorkorderRow] = { + Update[WorkorderRow]( + s"""insert into production.workorder("workorderid", "productid", "orderqty", "scrappedqty", "startdate", "enddate", "duedate", "scrapreasonid", "modifieddate") + values (?::int4,?::int4,?::int4,?::int2,?::timestamp,?::timestamp,?::timestamp,?::int2,?::timestamp) + on conflict ("workorderid") + do update set + "productid" = EXCLUDED."productid", + "orderqty" = EXCLUDED."orderqty", + "scrappedqty" = EXCLUDED."scrappedqty", + "startdate" = EXCLUDED."startdate", + "enddate" = EXCLUDED."enddate", + "duedate" = EXCLUDED."duedate", + "scrapreasonid" = EXCLUDED."scrapreasonid", + "modifieddate" = EXCLUDED."modifieddate" + returning "workorderid", "productid", "orderqty", "scrappedqty", "startdate"::text, "enddate"::text, "duedate"::text, "scrapreasonid", "modifieddate"::text""" + )(using WorkorderRow.write) + .updateManyWithGeneratedKeys[WorkorderRow]("workorderid", "productid", "orderqty", "scrappedqty", "startdate", "enddate", "duedate", "scrapreasonid", "modifieddate")(unsaved)(using catsStdInstancesForList, WorkorderRow.read) + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, WorkorderRow], batchSize: Int = 10000): ConnectionIO[Int] = { + for { + _ <- sql"create temporary table workorder_TEMP (like production.workorder) on commit drop".update.run + _ <- new FragmentOps(sql"""copy workorder_TEMP("workorderid", "productid", "orderqty", "scrappedqty", "startdate", "enddate", "duedate", "scrapreasonid", "modifieddate") from stdin""").copyIn(unsaved, batchSize)(using WorkorderRow.text) + res <- sql"""insert into production.workorder("workorderid", "productid", "orderqty", "scrappedqty", "startdate", "enddate", "duedate", "scrapreasonid", "modifieddate") + select * from workorder_TEMP + on conflict ("workorderid") + do update set + "productid" = EXCLUDED."productid", + "orderqty" = EXCLUDED."orderqty", + "scrappedqty" = EXCLUDED."scrappedqty", + "startdate" = EXCLUDED."startdate", + "enddate" = EXCLUDED."enddate", + "duedate" = EXCLUDED."duedate", + "scrapreasonid" = EXCLUDED."scrapreasonid", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table workorder_TEMP;""".update.run + } yield res + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/workorder/WorkorderRepoMock.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/workorder/WorkorderRepoMock.scala index 98318108e..1f085b978 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/workorder/WorkorderRepoMock.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/workorder/WorkorderRepoMock.scala @@ -105,4 +105,23 @@ class WorkorderRepoMock(toRow: Function1[WorkorderRowUnsaved, WorkorderRow], unsaved } } + override def upsertBatch(unsaved: List[WorkorderRow]): Stream[ConnectionIO, WorkorderRow] = { + Stream.emits { + unsaved.map { row => + map += (row.workorderid -> row) + row + } + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, WorkorderRow], batchSize: Int = 10000): ConnectionIO[Int] = { + unsaved.compile.toList.map { rows => + var num = 0 + rows.foreach { row => + map += (row.workorderid -> row) + num += 1 + } + num + } + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/workorder/WorkorderRow.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/workorder/WorkorderRow.scala index 850745711..f57d5b8d9 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/workorder/WorkorderRow.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/workorder/WorkorderRow.scala @@ -15,6 +15,7 @@ import adventureworks.production.scrapreason.ScrapreasonId import doobie.enumerated.Nullability import doobie.postgres.Text import doobie.util.Read +import doobie.util.Write import doobie.util.meta.Meta import io.circe.Decoder import io.circe.Encoder @@ -101,4 +102,38 @@ object WorkorderRow { sb.append(Text.DELIMETER) TypoLocalDateTime.text.unsafeEncode(row.modifieddate, sb) } + implicit lazy val write: Write[WorkorderRow] = new Write[WorkorderRow]( + puts = List((WorkorderId.put, Nullability.NoNulls), + (ProductId.put, Nullability.NoNulls), + (Meta.IntMeta.put, Nullability.NoNulls), + (TypoShort.put, Nullability.NoNulls), + (TypoLocalDateTime.put, Nullability.NoNulls), + (TypoLocalDateTime.put, Nullability.Nullable), + (TypoLocalDateTime.put, Nullability.NoNulls), + (ScrapreasonId.put, Nullability.Nullable), + (TypoLocalDateTime.put, Nullability.NoNulls)), + toList = x => List(x.workorderid, x.productid, x.orderqty, x.scrappedqty, x.startdate, x.enddate, x.duedate, x.scrapreasonid, x.modifieddate), + unsafeSet = (rs, i, a) => { + WorkorderId.put.unsafeSetNonNullable(rs, i + 0, a.workorderid) + ProductId.put.unsafeSetNonNullable(rs, i + 1, a.productid) + Meta.IntMeta.put.unsafeSetNonNullable(rs, i + 2, a.orderqty) + TypoShort.put.unsafeSetNonNullable(rs, i + 3, a.scrappedqty) + TypoLocalDateTime.put.unsafeSetNonNullable(rs, i + 4, a.startdate) + TypoLocalDateTime.put.unsafeSetNullable(rs, i + 5, a.enddate) + TypoLocalDateTime.put.unsafeSetNonNullable(rs, i + 6, a.duedate) + ScrapreasonId.put.unsafeSetNullable(rs, i + 7, a.scrapreasonid) + TypoLocalDateTime.put.unsafeSetNonNullable(rs, i + 8, a.modifieddate) + }, + unsafeUpdate = (ps, i, a) => { + WorkorderId.put.unsafeUpdateNonNullable(ps, i + 0, a.workorderid) + ProductId.put.unsafeUpdateNonNullable(ps, i + 1, a.productid) + Meta.IntMeta.put.unsafeUpdateNonNullable(ps, i + 2, a.orderqty) + TypoShort.put.unsafeUpdateNonNullable(ps, i + 3, a.scrappedqty) + TypoLocalDateTime.put.unsafeUpdateNonNullable(ps, i + 4, a.startdate) + TypoLocalDateTime.put.unsafeUpdateNullable(ps, i + 5, a.enddate) + TypoLocalDateTime.put.unsafeUpdateNonNullable(ps, i + 6, a.duedate) + ScrapreasonId.put.unsafeUpdateNullable(ps, i + 7, a.scrapreasonid) + TypoLocalDateTime.put.unsafeUpdateNonNullable(ps, i + 8, a.modifieddate) + } + ) } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/workorderrouting/WorkorderroutingRepo.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/workorderrouting/WorkorderroutingRepo.scala index 57e3d06fb..8d736904c 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/workorderrouting/WorkorderroutingRepo.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/workorderrouting/WorkorderroutingRepo.scala @@ -30,4 +30,7 @@ trait WorkorderroutingRepo { def update: UpdateBuilder[WorkorderroutingFields, WorkorderroutingRow] def update(row: WorkorderroutingRow): ConnectionIO[Boolean] def upsert(unsaved: WorkorderroutingRow): ConnectionIO[WorkorderroutingRow] + def upsertBatch(unsaved: List[WorkorderroutingRow]): Stream[ConnectionIO, WorkorderroutingRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Stream[ConnectionIO, WorkorderroutingRow], batchSize: Int = 10000): ConnectionIO[Int] } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/workorderrouting/WorkorderroutingRepoImpl.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/workorderrouting/WorkorderroutingRepoImpl.scala index 938e836f6..42abe87fd 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/workorderrouting/WorkorderroutingRepoImpl.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/workorderrouting/WorkorderroutingRepoImpl.scala @@ -12,6 +12,7 @@ import adventureworks.customtypes.TypoLocalDateTime import adventureworks.customtypes.TypoShort import adventureworks.production.location.LocationId import adventureworks.production.workorder.WorkorderId +import cats.instances.list.catsStdInstancesForList import doobie.free.connection.ConnectionIO import doobie.postgres.syntax.FragmentOps import doobie.syntax.SqlInterpolator.SingleFragment.fromWrite @@ -19,6 +20,7 @@ import doobie.syntax.string.toSqlInterpolator import doobie.util.Write import doobie.util.fragment.Fragment import doobie.util.meta.Meta +import doobie.util.update.Update import fs2.Stream import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder @@ -165,4 +167,45 @@ class WorkorderroutingRepoImpl extends WorkorderroutingRepo { returning "workorderid", "productid", "operationsequence", "locationid", "scheduledstartdate"::text, "scheduledenddate"::text, "actualstartdate"::text, "actualenddate"::text, "actualresourcehrs", "plannedcost", "actualcost", "modifieddate"::text """.query(using WorkorderroutingRow.read).unique } + override def upsertBatch(unsaved: List[WorkorderroutingRow]): Stream[ConnectionIO, WorkorderroutingRow] = { + Update[WorkorderroutingRow]( + s"""insert into production.workorderrouting("workorderid", "productid", "operationsequence", "locationid", "scheduledstartdate", "scheduledenddate", "actualstartdate", "actualenddate", "actualresourcehrs", "plannedcost", "actualcost", "modifieddate") + values (?::int4,?::int4,?::int2,?::int2,?::timestamp,?::timestamp,?::timestamp,?::timestamp,?::numeric,?::numeric,?::numeric,?::timestamp) + on conflict ("workorderid", "productid", "operationsequence") + do update set + "locationid" = EXCLUDED."locationid", + "scheduledstartdate" = EXCLUDED."scheduledstartdate", + "scheduledenddate" = EXCLUDED."scheduledenddate", + "actualstartdate" = EXCLUDED."actualstartdate", + "actualenddate" = EXCLUDED."actualenddate", + "actualresourcehrs" = EXCLUDED."actualresourcehrs", + "plannedcost" = EXCLUDED."plannedcost", + "actualcost" = EXCLUDED."actualcost", + "modifieddate" = EXCLUDED."modifieddate" + returning "workorderid", "productid", "operationsequence", "locationid", "scheduledstartdate"::text, "scheduledenddate"::text, "actualstartdate"::text, "actualenddate"::text, "actualresourcehrs", "plannedcost", "actualcost", "modifieddate"::text""" + )(using WorkorderroutingRow.write) + .updateManyWithGeneratedKeys[WorkorderroutingRow]("workorderid", "productid", "operationsequence", "locationid", "scheduledstartdate", "scheduledenddate", "actualstartdate", "actualenddate", "actualresourcehrs", "plannedcost", "actualcost", "modifieddate")(unsaved)(using catsStdInstancesForList, WorkorderroutingRow.read) + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, WorkorderroutingRow], batchSize: Int = 10000): ConnectionIO[Int] = { + for { + _ <- sql"create temporary table workorderrouting_TEMP (like production.workorderrouting) on commit drop".update.run + _ <- new FragmentOps(sql"""copy workorderrouting_TEMP("workorderid", "productid", "operationsequence", "locationid", "scheduledstartdate", "scheduledenddate", "actualstartdate", "actualenddate", "actualresourcehrs", "plannedcost", "actualcost", "modifieddate") from stdin""").copyIn(unsaved, batchSize)(using WorkorderroutingRow.text) + res <- sql"""insert into production.workorderrouting("workorderid", "productid", "operationsequence", "locationid", "scheduledstartdate", "scheduledenddate", "actualstartdate", "actualenddate", "actualresourcehrs", "plannedcost", "actualcost", "modifieddate") + select * from workorderrouting_TEMP + on conflict ("workorderid", "productid", "operationsequence") + do update set + "locationid" = EXCLUDED."locationid", + "scheduledstartdate" = EXCLUDED."scheduledstartdate", + "scheduledenddate" = EXCLUDED."scheduledenddate", + "actualstartdate" = EXCLUDED."actualstartdate", + "actualenddate" = EXCLUDED."actualenddate", + "actualresourcehrs" = EXCLUDED."actualresourcehrs", + "plannedcost" = EXCLUDED."plannedcost", + "actualcost" = EXCLUDED."actualcost", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table workorderrouting_TEMP;""".update.run + } yield res + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/workorderrouting/WorkorderroutingRepoMock.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/workorderrouting/WorkorderroutingRepoMock.scala index f695135c5..cadb2c7c8 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/workorderrouting/WorkorderroutingRepoMock.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/workorderrouting/WorkorderroutingRepoMock.scala @@ -105,4 +105,23 @@ class WorkorderroutingRepoMock(toRow: Function1[WorkorderroutingRowUnsaved, Work unsaved } } + override def upsertBatch(unsaved: List[WorkorderroutingRow]): Stream[ConnectionIO, WorkorderroutingRow] = { + Stream.emits { + unsaved.map { row => + map += (row.compositeId -> row) + row + } + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, WorkorderroutingRow], batchSize: Int = 10000): ConnectionIO[Int] = { + unsaved.compile.toList.map { rows => + var num = 0 + rows.foreach { row => + map += (row.compositeId -> row) + num += 1 + } + num + } + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/workorderrouting/WorkorderroutingRow.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/workorderrouting/WorkorderroutingRow.scala index 10de94006..0e3dc4678 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/workorderrouting/WorkorderroutingRow.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/workorderrouting/WorkorderroutingRow.scala @@ -15,6 +15,7 @@ import adventureworks.production.workorder.WorkorderId import doobie.enumerated.Nullability import doobie.postgres.Text import doobie.util.Read +import doobie.util.Write import doobie.util.meta.Meta import io.circe.Decoder import io.circe.Encoder @@ -124,4 +125,47 @@ object WorkorderroutingRow { sb.append(Text.DELIMETER) TypoLocalDateTime.text.unsafeEncode(row.modifieddate, sb) } + implicit lazy val write: Write[WorkorderroutingRow] = new Write[WorkorderroutingRow]( + puts = List((WorkorderId.put, Nullability.NoNulls), + (Meta.IntMeta.put, Nullability.NoNulls), + (TypoShort.put, Nullability.NoNulls), + (LocationId.put, Nullability.NoNulls), + (TypoLocalDateTime.put, Nullability.NoNulls), + (TypoLocalDateTime.put, Nullability.NoNulls), + (TypoLocalDateTime.put, Nullability.Nullable), + (TypoLocalDateTime.put, Nullability.Nullable), + (Meta.ScalaBigDecimalMeta.put, Nullability.Nullable), + (Meta.ScalaBigDecimalMeta.put, Nullability.NoNulls), + (Meta.ScalaBigDecimalMeta.put, Nullability.Nullable), + (TypoLocalDateTime.put, Nullability.NoNulls)), + toList = x => List(x.workorderid, x.productid, x.operationsequence, x.locationid, x.scheduledstartdate, x.scheduledenddate, x.actualstartdate, x.actualenddate, x.actualresourcehrs, x.plannedcost, x.actualcost, x.modifieddate), + unsafeSet = (rs, i, a) => { + WorkorderId.put.unsafeSetNonNullable(rs, i + 0, a.workorderid) + Meta.IntMeta.put.unsafeSetNonNullable(rs, i + 1, a.productid) + TypoShort.put.unsafeSetNonNullable(rs, i + 2, a.operationsequence) + LocationId.put.unsafeSetNonNullable(rs, i + 3, a.locationid) + TypoLocalDateTime.put.unsafeSetNonNullable(rs, i + 4, a.scheduledstartdate) + TypoLocalDateTime.put.unsafeSetNonNullable(rs, i + 5, a.scheduledenddate) + TypoLocalDateTime.put.unsafeSetNullable(rs, i + 6, a.actualstartdate) + TypoLocalDateTime.put.unsafeSetNullable(rs, i + 7, a.actualenddate) + Meta.ScalaBigDecimalMeta.put.unsafeSetNullable(rs, i + 8, a.actualresourcehrs) + Meta.ScalaBigDecimalMeta.put.unsafeSetNonNullable(rs, i + 9, a.plannedcost) + Meta.ScalaBigDecimalMeta.put.unsafeSetNullable(rs, i + 10, a.actualcost) + TypoLocalDateTime.put.unsafeSetNonNullable(rs, i + 11, a.modifieddate) + }, + unsafeUpdate = (ps, i, a) => { + WorkorderId.put.unsafeUpdateNonNullable(ps, i + 0, a.workorderid) + Meta.IntMeta.put.unsafeUpdateNonNullable(ps, i + 1, a.productid) + TypoShort.put.unsafeUpdateNonNullable(ps, i + 2, a.operationsequence) + LocationId.put.unsafeUpdateNonNullable(ps, i + 3, a.locationid) + TypoLocalDateTime.put.unsafeUpdateNonNullable(ps, i + 4, a.scheduledstartdate) + TypoLocalDateTime.put.unsafeUpdateNonNullable(ps, i + 5, a.scheduledenddate) + TypoLocalDateTime.put.unsafeUpdateNullable(ps, i + 6, a.actualstartdate) + TypoLocalDateTime.put.unsafeUpdateNullable(ps, i + 7, a.actualenddate) + Meta.ScalaBigDecimalMeta.put.unsafeUpdateNullable(ps, i + 8, a.actualresourcehrs) + Meta.ScalaBigDecimalMeta.put.unsafeUpdateNonNullable(ps, i + 9, a.plannedcost) + Meta.ScalaBigDecimalMeta.put.unsafeUpdateNullable(ps, i + 10, a.actualcost) + TypoLocalDateTime.put.unsafeUpdateNonNullable(ps, i + 11, a.modifieddate) + } + ) } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/public/flaff/FlaffRepo.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/public/flaff/FlaffRepo.scala index 2f8a4a5a8..9689ef735 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/public/flaff/FlaffRepo.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/public/flaff/FlaffRepo.scala @@ -27,4 +27,7 @@ trait FlaffRepo { def update: UpdateBuilder[FlaffFields, FlaffRow] def update(row: FlaffRow): ConnectionIO[Boolean] def upsert(unsaved: FlaffRow): ConnectionIO[FlaffRow] + def upsertBatch(unsaved: List[FlaffRow]): Stream[ConnectionIO, FlaffRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Stream[ConnectionIO, FlaffRow], batchSize: Int = 10000): ConnectionIO[Int] } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/public/flaff/FlaffRepoImpl.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/public/flaff/FlaffRepoImpl.scala index 83b99eb18..7e9ab7153 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/public/flaff/FlaffRepoImpl.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/public/flaff/FlaffRepoImpl.scala @@ -7,12 +7,14 @@ package adventureworks package public package flaff +import cats.instances.list.catsStdInstancesForList import doobie.free.connection.ConnectionIO import doobie.postgres.syntax.FragmentOps import doobie.syntax.SqlInterpolator.SingleFragment.fromWrite import doobie.syntax.string.toSqlInterpolator import doobie.util.Write import doobie.util.meta.Meta +import doobie.util.update.Update import fs2.Stream import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder @@ -101,4 +103,29 @@ class FlaffRepoImpl extends FlaffRepo { returning "code", "another_code", "some_number", "specifier", "parentspecifier" """.query(using FlaffRow.read).unique } + override def upsertBatch(unsaved: List[FlaffRow]): Stream[ConnectionIO, FlaffRow] = { + Update[FlaffRow]( + s"""insert into public.flaff("code", "another_code", "some_number", "specifier", "parentspecifier") + values (?::text,?,?::int4,?::text,?::text) + on conflict ("code", "another_code", "some_number", "specifier") + do update set + "parentspecifier" = EXCLUDED."parentspecifier" + returning "code", "another_code", "some_number", "specifier", "parentspecifier"""" + )(using FlaffRow.write) + .updateManyWithGeneratedKeys[FlaffRow]("code", "another_code", "some_number", "specifier", "parentspecifier")(unsaved)(using catsStdInstancesForList, FlaffRow.read) + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, FlaffRow], batchSize: Int = 10000): ConnectionIO[Int] = { + for { + _ <- sql"create temporary table flaff_TEMP (like public.flaff) on commit drop".update.run + _ <- new FragmentOps(sql"""copy flaff_TEMP("code", "another_code", "some_number", "specifier", "parentspecifier") from stdin""").copyIn(unsaved, batchSize)(using FlaffRow.text) + res <- sql"""insert into public.flaff("code", "another_code", "some_number", "specifier", "parentspecifier") + select * from flaff_TEMP + on conflict ("code", "another_code", "some_number", "specifier") + do update set + "parentspecifier" = EXCLUDED."parentspecifier" + ; + drop table flaff_TEMP;""".update.run + } yield res + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/public/flaff/FlaffRepoMock.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/public/flaff/FlaffRepoMock.scala index 3ecac6119..1694635db 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/public/flaff/FlaffRepoMock.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/public/flaff/FlaffRepoMock.scala @@ -89,4 +89,23 @@ class FlaffRepoMock(map: scala.collection.mutable.Map[FlaffId, FlaffRow] = scala unsaved } } + override def upsertBatch(unsaved: List[FlaffRow]): Stream[ConnectionIO, FlaffRow] = { + Stream.emits { + unsaved.map { row => + map += (row.compositeId -> row) + row + } + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, FlaffRow], batchSize: Int = 10000): ConnectionIO[Int] = { + unsaved.compile.toList.map { rows => + var num = 0 + rows.foreach { row => + map += (row.compositeId -> row) + num += 1 + } + num + } + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/public/flaff/FlaffRow.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/public/flaff/FlaffRow.scala index 21172f45e..dd4732091 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/public/flaff/FlaffRow.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/public/flaff/FlaffRow.scala @@ -10,6 +10,7 @@ package flaff import doobie.enumerated.Nullability import doobie.postgres.Text import doobie.util.Read +import doobie.util.Write import doobie.util.meta.Meta import io.circe.Decoder import io.circe.Encoder @@ -64,4 +65,26 @@ object FlaffRow { sb.append(Text.DELIMETER) Text.option(ShortText.text).unsafeEncode(row.parentspecifier, sb) } + implicit lazy val write: Write[FlaffRow] = new Write[FlaffRow]( + puts = List((ShortText.put, Nullability.NoNulls), + (Meta.StringMeta.put, Nullability.NoNulls), + (Meta.IntMeta.put, Nullability.NoNulls), + (ShortText.put, Nullability.NoNulls), + (ShortText.put, Nullability.Nullable)), + toList = x => List(x.code, x.anotherCode, x.someNumber, x.specifier, x.parentspecifier), + unsafeSet = (rs, i, a) => { + ShortText.put.unsafeSetNonNullable(rs, i + 0, a.code) + Meta.StringMeta.put.unsafeSetNonNullable(rs, i + 1, a.anotherCode) + Meta.IntMeta.put.unsafeSetNonNullable(rs, i + 2, a.someNumber) + ShortText.put.unsafeSetNonNullable(rs, i + 3, a.specifier) + ShortText.put.unsafeSetNullable(rs, i + 4, a.parentspecifier) + }, + unsafeUpdate = (ps, i, a) => { + ShortText.put.unsafeUpdateNonNullable(ps, i + 0, a.code) + Meta.StringMeta.put.unsafeUpdateNonNullable(ps, i + 1, a.anotherCode) + Meta.IntMeta.put.unsafeUpdateNonNullable(ps, i + 2, a.someNumber) + ShortText.put.unsafeUpdateNonNullable(ps, i + 3, a.specifier) + ShortText.put.unsafeUpdateNullable(ps, i + 4, a.parentspecifier) + } + ) } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/public/identity_test/IdentityTestRepo.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/public/identity_test/IdentityTestRepo.scala index 126c7aaa5..4b07896fb 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/public/identity_test/IdentityTestRepo.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/public/identity_test/IdentityTestRepo.scala @@ -30,4 +30,7 @@ trait IdentityTestRepo { def update: UpdateBuilder[IdentityTestFields, IdentityTestRow] def update(row: IdentityTestRow): ConnectionIO[Boolean] def upsert(unsaved: IdentityTestRow): ConnectionIO[IdentityTestRow] + def upsertBatch(unsaved: List[IdentityTestRow]): Stream[ConnectionIO, IdentityTestRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Stream[ConnectionIO, IdentityTestRow], batchSize: Int = 10000): ConnectionIO[Int] } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/public/identity_test/IdentityTestRepoImpl.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/public/identity_test/IdentityTestRepoImpl.scala index c54ae45a2..7b60b6694 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/public/identity_test/IdentityTestRepoImpl.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/public/identity_test/IdentityTestRepoImpl.scala @@ -8,6 +8,7 @@ package public package identity_test import adventureworks.customtypes.Defaulted +import cats.instances.list.catsStdInstancesForList import doobie.free.connection.ConnectionIO import doobie.postgres.syntax.FragmentOps import doobie.syntax.SqlInterpolator.SingleFragment.fromWrite @@ -15,6 +16,7 @@ import doobie.syntax.string.toSqlInterpolator import doobie.util.Write import doobie.util.fragment.Fragment import doobie.util.meta.Meta +import doobie.util.update.Update import fs2.Stream import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder @@ -112,4 +114,31 @@ class IdentityTestRepoImpl extends IdentityTestRepo { returning "always_generated", "default_generated", "name" """.query(using IdentityTestRow.read).unique } + override def upsertBatch(unsaved: List[IdentityTestRow]): Stream[ConnectionIO, IdentityTestRow] = { + Update[IdentityTestRow]( + s"""insert into public.identity-test("always_generated", "default_generated", "name") + values (?::int4,?::int4,?) + on conflict ("name") + do update set + "always_generated" = EXCLUDED."always_generated", + "default_generated" = EXCLUDED."default_generated" + returning "always_generated", "default_generated", "name"""" + )(using IdentityTestRow.write) + .updateManyWithGeneratedKeys[IdentityTestRow]("always_generated", "default_generated", "name")(unsaved)(using catsStdInstancesForList, IdentityTestRow.read) + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, IdentityTestRow], batchSize: Int = 10000): ConnectionIO[Int] = { + for { + _ <- sql"create temporary table identity-test_TEMP (like public.identity-test) on commit drop".update.run + _ <- new FragmentOps(sql"""copy identity-test_TEMP("always_generated", "default_generated", "name") from stdin""").copyIn(unsaved, batchSize)(using IdentityTestRow.text) + res <- sql"""insert into public.identity-test("always_generated", "default_generated", "name") + select * from identity-test_TEMP + on conflict ("name") + do update set + "always_generated" = EXCLUDED."always_generated", + "default_generated" = EXCLUDED."default_generated" + ; + drop table identity-test_TEMP;""".update.run + } yield res + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/public/identity_test/IdentityTestRepoMock.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/public/identity_test/IdentityTestRepoMock.scala index 3635a9fc6..ece36dd42 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/public/identity_test/IdentityTestRepoMock.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/public/identity_test/IdentityTestRepoMock.scala @@ -105,4 +105,23 @@ class IdentityTestRepoMock(toRow: Function1[IdentityTestRowUnsaved, IdentityTest unsaved } } + override def upsertBatch(unsaved: List[IdentityTestRow]): Stream[ConnectionIO, IdentityTestRow] = { + Stream.emits { + unsaved.map { row => + map += (row.name -> row) + row + } + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, IdentityTestRow], batchSize: Int = 10000): ConnectionIO[Int] = { + unsaved.compile.toList.map { rows => + var num = 0 + rows.foreach { row => + map += (row.name -> row) + num += 1 + } + num + } + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/public/identity_test/IdentityTestRow.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/public/identity_test/IdentityTestRow.scala index f3b51cef7..1837195e5 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/public/identity_test/IdentityTestRow.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/public/identity_test/IdentityTestRow.scala @@ -11,6 +11,7 @@ import adventureworks.customtypes.Defaulted import doobie.enumerated.Nullability import doobie.postgres.Text import doobie.util.Read +import doobie.util.Write import doobie.util.meta.Meta import io.circe.Decoder import io.circe.Encoder @@ -52,4 +53,20 @@ object IdentityTestRow { sb.append(Text.DELIMETER) IdentityTestId.text.unsafeEncode(row.name, sb) } + implicit lazy val write: Write[IdentityTestRow] = new Write[IdentityTestRow]( + puts = List((Meta.IntMeta.put, Nullability.NoNulls), + (Meta.IntMeta.put, Nullability.NoNulls), + (IdentityTestId.put, Nullability.NoNulls)), + toList = x => List(x.alwaysGenerated, x.defaultGenerated, x.name), + unsafeSet = (rs, i, a) => { + Meta.IntMeta.put.unsafeSetNonNullable(rs, i + 0, a.alwaysGenerated) + Meta.IntMeta.put.unsafeSetNonNullable(rs, i + 1, a.defaultGenerated) + IdentityTestId.put.unsafeSetNonNullable(rs, i + 2, a.name) + }, + unsafeUpdate = (ps, i, a) => { + Meta.IntMeta.put.unsafeUpdateNonNullable(ps, i + 0, a.alwaysGenerated) + Meta.IntMeta.put.unsafeUpdateNonNullable(ps, i + 1, a.defaultGenerated) + IdentityTestId.put.unsafeUpdateNonNullable(ps, i + 2, a.name) + } + ) } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/public/pgtest/PgtestRow.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/public/pgtest/PgtestRow.scala index 27c3e255d..1a2382de9 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/public/pgtest/PgtestRow.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/public/pgtest/PgtestRow.scala @@ -34,6 +34,7 @@ import adventureworks.customtypes.TypoXml import doobie.enumerated.Nullability import doobie.postgres.Text import doobie.util.Read +import doobie.util.Write import doobie.util.meta.Meta import io.circe.Decoder import io.circe.DecodingFailure @@ -559,4 +560,221 @@ object PgtestRow { sb.append(Text.DELIMETER) Text.iterableInstance[Array, TypoXml](TypoXml.text, implicitly).unsafeEncode(row.xmles, sb) } + implicit lazy val write: Write[PgtestRow] = new Write[PgtestRow]( + puts = List((Meta.BooleanMeta.put, Nullability.NoNulls), + (TypoBox.put, Nullability.NoNulls), + (Meta.StringMeta.put, Nullability.NoNulls), + (TypoBytea.put, Nullability.NoNulls), + (Meta.StringMeta.put, Nullability.NoNulls), + (TypoCircle.put, Nullability.NoNulls), + (TypoLocalDate.put, Nullability.NoNulls), + (Meta.FloatMeta.put, Nullability.NoNulls), + (Meta.DoubleMeta.put, Nullability.NoNulls), + (TypoHStore.put, Nullability.NoNulls), + (TypoInet.put, Nullability.NoNulls), + (TypoShort.put, Nullability.NoNulls), + (TypoInt2Vector.put, Nullability.NoNulls), + (Meta.IntMeta.put, Nullability.NoNulls), + (Meta.LongMeta.put, Nullability.NoNulls), + (TypoInterval.put, Nullability.NoNulls), + (TypoJson.put, Nullability.NoNulls), + (TypoJsonb.put, Nullability.NoNulls), + (TypoLine.put, Nullability.NoNulls), + (TypoLineSegment.put, Nullability.NoNulls), + (TypoMoney.put, Nullability.NoNulls), + (Mydomain.put, Nullability.NoNulls), + (Myenum.put, Nullability.NoNulls), + (Meta.StringMeta.put, Nullability.NoNulls), + (Meta.ScalaBigDecimalMeta.put, Nullability.NoNulls), + (TypoPath.put, Nullability.NoNulls), + (TypoPoint.put, Nullability.NoNulls), + (TypoPolygon.put, Nullability.NoNulls), + (Meta.StringMeta.put, Nullability.NoNulls), + (TypoLocalTime.put, Nullability.NoNulls), + (TypoLocalDateTime.put, Nullability.NoNulls), + (TypoInstant.put, Nullability.NoNulls), + (TypoOffsetTime.put, Nullability.NoNulls), + (TypoUUID.put, Nullability.NoNulls), + (Meta.StringMeta.put, Nullability.NoNulls), + (TypoVector.put, Nullability.NoNulls), + (TypoXml.put, Nullability.NoNulls), + (TypoBox.arrayPut, Nullability.NoNulls), + (adventureworks.StringArrayMeta.put, Nullability.NoNulls), + (adventureworks.StringArrayMeta.put, Nullability.NoNulls), + (TypoCircle.arrayPut, Nullability.NoNulls), + (TypoLocalDate.arrayPut, Nullability.NoNulls), + (adventureworks.FloatArrayMeta.put, Nullability.NoNulls), + (adventureworks.DoubleArrayMeta.put, Nullability.NoNulls), + (TypoInet.arrayPut, Nullability.NoNulls), + (TypoShort.arrayPut, Nullability.NoNulls), + (TypoInt2Vector.arrayPut, Nullability.NoNulls), + (adventureworks.IntegerArrayMeta.put, Nullability.NoNulls), + (adventureworks.LongArrayMeta.put, Nullability.NoNulls), + (TypoInterval.arrayPut, Nullability.NoNulls), + (TypoJson.arrayPut, Nullability.NoNulls), + (TypoJsonb.arrayPut, Nullability.NoNulls), + (TypoLine.arrayPut, Nullability.NoNulls), + (TypoLineSegment.arrayPut, Nullability.NoNulls), + (TypoMoney.arrayPut, Nullability.NoNulls), + (Mydomain.arrayPut, Nullability.NoNulls), + (Myenum.arrayPut, Nullability.NoNulls), + (adventureworks.StringArrayMeta.put, Nullability.NoNulls), + (adventureworks.BigDecimalMeta.put, Nullability.NoNulls), + (TypoPath.arrayPut, Nullability.NoNulls), + (TypoPoint.arrayPut, Nullability.NoNulls), + (TypoPolygon.arrayPut, Nullability.NoNulls), + (adventureworks.StringArrayMeta.put, Nullability.NoNulls), + (TypoLocalTime.arrayPut, Nullability.NoNulls), + (TypoLocalDateTime.arrayPut, Nullability.NoNulls), + (TypoInstant.arrayPut, Nullability.NoNulls), + (TypoOffsetTime.arrayPut, Nullability.NoNulls), + (TypoUUID.arrayPut, Nullability.NoNulls), + (adventureworks.StringArrayMeta.put, Nullability.NoNulls), + (TypoXml.arrayPut, Nullability.NoNulls)), + toList = x => List(x.bool, x.box, x.bpchar, x.bytea, x.char, x.circle, x.date, x.float4, x.float8, x.hstore, x.inet, x.int2, x.int2vector, x.int4, x.int8, x.interval, x.json, x.jsonb, x.line, x.lseg, x.money, x.mydomain, x.myenum, x.name, x.numeric, x.path, x.point, x.polygon, x.text, x.time, x.timestamp, x.timestampz, x.timez, x.uuid, x.varchar, x.vector, x.xml, x.boxes, x.bpchares, x.chares, x.circlees, x.datees, x.float4es, x.float8es, x.inetes, x.int2es, x.int2vectores, x.int4es, x.int8es, x.intervales, x.jsones, x.jsonbes, x.linees, x.lseges, x.moneyes, x.mydomaines, x.myenumes, x.namees, x.numerices, x.pathes, x.pointes, x.polygones, x.textes, x.timees, x.timestampes, x.timestampzes, x.timezes, x.uuides, x.varchares, x.xmles), + unsafeSet = (rs, i, a) => { + Meta.BooleanMeta.put.unsafeSetNonNullable(rs, i + 0, a.bool) + TypoBox.put.unsafeSetNonNullable(rs, i + 1, a.box) + Meta.StringMeta.put.unsafeSetNonNullable(rs, i + 2, a.bpchar) + TypoBytea.put.unsafeSetNonNullable(rs, i + 3, a.bytea) + Meta.StringMeta.put.unsafeSetNonNullable(rs, i + 4, a.char) + TypoCircle.put.unsafeSetNonNullable(rs, i + 5, a.circle) + TypoLocalDate.put.unsafeSetNonNullable(rs, i + 6, a.date) + Meta.FloatMeta.put.unsafeSetNonNullable(rs, i + 7, a.float4) + Meta.DoubleMeta.put.unsafeSetNonNullable(rs, i + 8, a.float8) + TypoHStore.put.unsafeSetNonNullable(rs, i + 9, a.hstore) + TypoInet.put.unsafeSetNonNullable(rs, i + 10, a.inet) + TypoShort.put.unsafeSetNonNullable(rs, i + 11, a.int2) + TypoInt2Vector.put.unsafeSetNonNullable(rs, i + 12, a.int2vector) + Meta.IntMeta.put.unsafeSetNonNullable(rs, i + 13, a.int4) + Meta.LongMeta.put.unsafeSetNonNullable(rs, i + 14, a.int8) + TypoInterval.put.unsafeSetNonNullable(rs, i + 15, a.interval) + TypoJson.put.unsafeSetNonNullable(rs, i + 16, a.json) + TypoJsonb.put.unsafeSetNonNullable(rs, i + 17, a.jsonb) + TypoLine.put.unsafeSetNonNullable(rs, i + 18, a.line) + TypoLineSegment.put.unsafeSetNonNullable(rs, i + 19, a.lseg) + TypoMoney.put.unsafeSetNonNullable(rs, i + 20, a.money) + Mydomain.put.unsafeSetNonNullable(rs, i + 21, a.mydomain) + Myenum.put.unsafeSetNonNullable(rs, i + 22, a.myenum) + Meta.StringMeta.put.unsafeSetNonNullable(rs, i + 23, a.name) + Meta.ScalaBigDecimalMeta.put.unsafeSetNonNullable(rs, i + 24, a.numeric) + TypoPath.put.unsafeSetNonNullable(rs, i + 25, a.path) + TypoPoint.put.unsafeSetNonNullable(rs, i + 26, a.point) + TypoPolygon.put.unsafeSetNonNullable(rs, i + 27, a.polygon) + Meta.StringMeta.put.unsafeSetNonNullable(rs, i + 28, a.text) + TypoLocalTime.put.unsafeSetNonNullable(rs, i + 29, a.time) + TypoLocalDateTime.put.unsafeSetNonNullable(rs, i + 30, a.timestamp) + TypoInstant.put.unsafeSetNonNullable(rs, i + 31, a.timestampz) + TypoOffsetTime.put.unsafeSetNonNullable(rs, i + 32, a.timez) + TypoUUID.put.unsafeSetNonNullable(rs, i + 33, a.uuid) + Meta.StringMeta.put.unsafeSetNonNullable(rs, i + 34, a.varchar) + TypoVector.put.unsafeSetNonNullable(rs, i + 35, a.vector) + TypoXml.put.unsafeSetNonNullable(rs, i + 36, a.xml) + TypoBox.arrayPut.unsafeSetNonNullable(rs, i + 37, a.boxes) + adventureworks.StringArrayMeta.put.unsafeSetNonNullable(rs, i + 38, a.bpchares) + adventureworks.StringArrayMeta.put.unsafeSetNonNullable(rs, i + 39, a.chares) + TypoCircle.arrayPut.unsafeSetNonNullable(rs, i + 40, a.circlees) + TypoLocalDate.arrayPut.unsafeSetNonNullable(rs, i + 41, a.datees) + adventureworks.FloatArrayMeta.put.unsafeSetNonNullable(rs, i + 42, a.float4es) + adventureworks.DoubleArrayMeta.put.unsafeSetNonNullable(rs, i + 43, a.float8es) + TypoInet.arrayPut.unsafeSetNonNullable(rs, i + 44, a.inetes) + TypoShort.arrayPut.unsafeSetNonNullable(rs, i + 45, a.int2es) + TypoInt2Vector.arrayPut.unsafeSetNonNullable(rs, i + 46, a.int2vectores) + adventureworks.IntegerArrayMeta.put.unsafeSetNonNullable(rs, i + 47, a.int4es) + adventureworks.LongArrayMeta.put.unsafeSetNonNullable(rs, i + 48, a.int8es) + TypoInterval.arrayPut.unsafeSetNonNullable(rs, i + 49, a.intervales) + TypoJson.arrayPut.unsafeSetNonNullable(rs, i + 50, a.jsones) + TypoJsonb.arrayPut.unsafeSetNonNullable(rs, i + 51, a.jsonbes) + TypoLine.arrayPut.unsafeSetNonNullable(rs, i + 52, a.linees) + TypoLineSegment.arrayPut.unsafeSetNonNullable(rs, i + 53, a.lseges) + TypoMoney.arrayPut.unsafeSetNonNullable(rs, i + 54, a.moneyes) + Mydomain.arrayPut.unsafeSetNonNullable(rs, i + 55, a.mydomaines) + Myenum.arrayPut.unsafeSetNonNullable(rs, i + 56, a.myenumes) + adventureworks.StringArrayMeta.put.unsafeSetNonNullable(rs, i + 57, a.namees) + adventureworks.BigDecimalMeta.put.unsafeSetNonNullable(rs, i + 58, a.numerices) + TypoPath.arrayPut.unsafeSetNonNullable(rs, i + 59, a.pathes) + TypoPoint.arrayPut.unsafeSetNonNullable(rs, i + 60, a.pointes) + TypoPolygon.arrayPut.unsafeSetNonNullable(rs, i + 61, a.polygones) + adventureworks.StringArrayMeta.put.unsafeSetNonNullable(rs, i + 62, a.textes) + TypoLocalTime.arrayPut.unsafeSetNonNullable(rs, i + 63, a.timees) + TypoLocalDateTime.arrayPut.unsafeSetNonNullable(rs, i + 64, a.timestampes) + TypoInstant.arrayPut.unsafeSetNonNullable(rs, i + 65, a.timestampzes) + TypoOffsetTime.arrayPut.unsafeSetNonNullable(rs, i + 66, a.timezes) + TypoUUID.arrayPut.unsafeSetNonNullable(rs, i + 67, a.uuides) + adventureworks.StringArrayMeta.put.unsafeSetNonNullable(rs, i + 68, a.varchares) + TypoXml.arrayPut.unsafeSetNonNullable(rs, i + 69, a.xmles) + }, + unsafeUpdate = (ps, i, a) => { + Meta.BooleanMeta.put.unsafeUpdateNonNullable(ps, i + 0, a.bool) + TypoBox.put.unsafeUpdateNonNullable(ps, i + 1, a.box) + Meta.StringMeta.put.unsafeUpdateNonNullable(ps, i + 2, a.bpchar) + TypoBytea.put.unsafeUpdateNonNullable(ps, i + 3, a.bytea) + Meta.StringMeta.put.unsafeUpdateNonNullable(ps, i + 4, a.char) + TypoCircle.put.unsafeUpdateNonNullable(ps, i + 5, a.circle) + TypoLocalDate.put.unsafeUpdateNonNullable(ps, i + 6, a.date) + Meta.FloatMeta.put.unsafeUpdateNonNullable(ps, i + 7, a.float4) + Meta.DoubleMeta.put.unsafeUpdateNonNullable(ps, i + 8, a.float8) + TypoHStore.put.unsafeUpdateNonNullable(ps, i + 9, a.hstore) + TypoInet.put.unsafeUpdateNonNullable(ps, i + 10, a.inet) + TypoShort.put.unsafeUpdateNonNullable(ps, i + 11, a.int2) + TypoInt2Vector.put.unsafeUpdateNonNullable(ps, i + 12, a.int2vector) + Meta.IntMeta.put.unsafeUpdateNonNullable(ps, i + 13, a.int4) + Meta.LongMeta.put.unsafeUpdateNonNullable(ps, i + 14, a.int8) + TypoInterval.put.unsafeUpdateNonNullable(ps, i + 15, a.interval) + TypoJson.put.unsafeUpdateNonNullable(ps, i + 16, a.json) + TypoJsonb.put.unsafeUpdateNonNullable(ps, i + 17, a.jsonb) + TypoLine.put.unsafeUpdateNonNullable(ps, i + 18, a.line) + TypoLineSegment.put.unsafeUpdateNonNullable(ps, i + 19, a.lseg) + TypoMoney.put.unsafeUpdateNonNullable(ps, i + 20, a.money) + Mydomain.put.unsafeUpdateNonNullable(ps, i + 21, a.mydomain) + Myenum.put.unsafeUpdateNonNullable(ps, i + 22, a.myenum) + Meta.StringMeta.put.unsafeUpdateNonNullable(ps, i + 23, a.name) + Meta.ScalaBigDecimalMeta.put.unsafeUpdateNonNullable(ps, i + 24, a.numeric) + TypoPath.put.unsafeUpdateNonNullable(ps, i + 25, a.path) + TypoPoint.put.unsafeUpdateNonNullable(ps, i + 26, a.point) + TypoPolygon.put.unsafeUpdateNonNullable(ps, i + 27, a.polygon) + Meta.StringMeta.put.unsafeUpdateNonNullable(ps, i + 28, a.text) + TypoLocalTime.put.unsafeUpdateNonNullable(ps, i + 29, a.time) + TypoLocalDateTime.put.unsafeUpdateNonNullable(ps, i + 30, a.timestamp) + TypoInstant.put.unsafeUpdateNonNullable(ps, i + 31, a.timestampz) + TypoOffsetTime.put.unsafeUpdateNonNullable(ps, i + 32, a.timez) + TypoUUID.put.unsafeUpdateNonNullable(ps, i + 33, a.uuid) + Meta.StringMeta.put.unsafeUpdateNonNullable(ps, i + 34, a.varchar) + TypoVector.put.unsafeUpdateNonNullable(ps, i + 35, a.vector) + TypoXml.put.unsafeUpdateNonNullable(ps, i + 36, a.xml) + TypoBox.arrayPut.unsafeUpdateNonNullable(ps, i + 37, a.boxes) + adventureworks.StringArrayMeta.put.unsafeUpdateNonNullable(ps, i + 38, a.bpchares) + adventureworks.StringArrayMeta.put.unsafeUpdateNonNullable(ps, i + 39, a.chares) + TypoCircle.arrayPut.unsafeUpdateNonNullable(ps, i + 40, a.circlees) + TypoLocalDate.arrayPut.unsafeUpdateNonNullable(ps, i + 41, a.datees) + adventureworks.FloatArrayMeta.put.unsafeUpdateNonNullable(ps, i + 42, a.float4es) + adventureworks.DoubleArrayMeta.put.unsafeUpdateNonNullable(ps, i + 43, a.float8es) + TypoInet.arrayPut.unsafeUpdateNonNullable(ps, i + 44, a.inetes) + TypoShort.arrayPut.unsafeUpdateNonNullable(ps, i + 45, a.int2es) + TypoInt2Vector.arrayPut.unsafeUpdateNonNullable(ps, i + 46, a.int2vectores) + adventureworks.IntegerArrayMeta.put.unsafeUpdateNonNullable(ps, i + 47, a.int4es) + adventureworks.LongArrayMeta.put.unsafeUpdateNonNullable(ps, i + 48, a.int8es) + TypoInterval.arrayPut.unsafeUpdateNonNullable(ps, i + 49, a.intervales) + TypoJson.arrayPut.unsafeUpdateNonNullable(ps, i + 50, a.jsones) + TypoJsonb.arrayPut.unsafeUpdateNonNullable(ps, i + 51, a.jsonbes) + TypoLine.arrayPut.unsafeUpdateNonNullable(ps, i + 52, a.linees) + TypoLineSegment.arrayPut.unsafeUpdateNonNullable(ps, i + 53, a.lseges) + TypoMoney.arrayPut.unsafeUpdateNonNullable(ps, i + 54, a.moneyes) + Mydomain.arrayPut.unsafeUpdateNonNullable(ps, i + 55, a.mydomaines) + Myenum.arrayPut.unsafeUpdateNonNullable(ps, i + 56, a.myenumes) + adventureworks.StringArrayMeta.put.unsafeUpdateNonNullable(ps, i + 57, a.namees) + adventureworks.BigDecimalMeta.put.unsafeUpdateNonNullable(ps, i + 58, a.numerices) + TypoPath.arrayPut.unsafeUpdateNonNullable(ps, i + 59, a.pathes) + TypoPoint.arrayPut.unsafeUpdateNonNullable(ps, i + 60, a.pointes) + TypoPolygon.arrayPut.unsafeUpdateNonNullable(ps, i + 61, a.polygones) + adventureworks.StringArrayMeta.put.unsafeUpdateNonNullable(ps, i + 62, a.textes) + TypoLocalTime.arrayPut.unsafeUpdateNonNullable(ps, i + 63, a.timees) + TypoLocalDateTime.arrayPut.unsafeUpdateNonNullable(ps, i + 64, a.timestampes) + TypoInstant.arrayPut.unsafeUpdateNonNullable(ps, i + 65, a.timestampzes) + TypoOffsetTime.arrayPut.unsafeUpdateNonNullable(ps, i + 66, a.timezes) + TypoUUID.arrayPut.unsafeUpdateNonNullable(ps, i + 67, a.uuides) + adventureworks.StringArrayMeta.put.unsafeUpdateNonNullable(ps, i + 68, a.varchares) + TypoXml.arrayPut.unsafeUpdateNonNullable(ps, i + 69, a.xmles) + } + ) } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/public/pgtestnull/PgtestnullRow.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/public/pgtestnull/PgtestnullRow.scala index 890d126b3..0ca5e5d0a 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/public/pgtestnull/PgtestnullRow.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/public/pgtestnull/PgtestnullRow.scala @@ -34,6 +34,7 @@ import adventureworks.customtypes.TypoXml import doobie.enumerated.Nullability import doobie.postgres.Text import doobie.util.Read +import doobie.util.Write import doobie.util.meta.Meta import io.circe.Decoder import io.circe.DecodingFailure @@ -559,4 +560,221 @@ object PgtestnullRow { sb.append(Text.DELIMETER) Text.option(Text.iterableInstance[Array, TypoXml](TypoXml.text, implicitly)).unsafeEncode(row.xmles, sb) } + implicit lazy val write: Write[PgtestnullRow] = new Write[PgtestnullRow]( + puts = List((Meta.BooleanMeta.put, Nullability.Nullable), + (TypoBox.put, Nullability.Nullable), + (Meta.StringMeta.put, Nullability.Nullable), + (TypoBytea.put, Nullability.Nullable), + (Meta.StringMeta.put, Nullability.Nullable), + (TypoCircle.put, Nullability.Nullable), + (TypoLocalDate.put, Nullability.Nullable), + (Meta.FloatMeta.put, Nullability.Nullable), + (Meta.DoubleMeta.put, Nullability.Nullable), + (TypoHStore.put, Nullability.Nullable), + (TypoInet.put, Nullability.Nullable), + (TypoShort.put, Nullability.Nullable), + (TypoInt2Vector.put, Nullability.Nullable), + (Meta.IntMeta.put, Nullability.Nullable), + (Meta.LongMeta.put, Nullability.Nullable), + (TypoInterval.put, Nullability.Nullable), + (TypoJson.put, Nullability.Nullable), + (TypoJsonb.put, Nullability.Nullable), + (TypoLine.put, Nullability.Nullable), + (TypoLineSegment.put, Nullability.Nullable), + (TypoMoney.put, Nullability.Nullable), + (Mydomain.put, Nullability.Nullable), + (Myenum.put, Nullability.Nullable), + (Meta.StringMeta.put, Nullability.Nullable), + (Meta.ScalaBigDecimalMeta.put, Nullability.Nullable), + (TypoPath.put, Nullability.Nullable), + (TypoPoint.put, Nullability.Nullable), + (TypoPolygon.put, Nullability.Nullable), + (Meta.StringMeta.put, Nullability.Nullable), + (TypoLocalTime.put, Nullability.Nullable), + (TypoLocalDateTime.put, Nullability.Nullable), + (TypoInstant.put, Nullability.Nullable), + (TypoOffsetTime.put, Nullability.Nullable), + (TypoUUID.put, Nullability.Nullable), + (Meta.StringMeta.put, Nullability.Nullable), + (TypoVector.put, Nullability.Nullable), + (TypoXml.put, Nullability.Nullable), + (TypoBox.arrayPut, Nullability.Nullable), + (adventureworks.StringArrayMeta.put, Nullability.Nullable), + (adventureworks.StringArrayMeta.put, Nullability.Nullable), + (TypoCircle.arrayPut, Nullability.Nullable), + (TypoLocalDate.arrayPut, Nullability.Nullable), + (adventureworks.FloatArrayMeta.put, Nullability.Nullable), + (adventureworks.DoubleArrayMeta.put, Nullability.Nullable), + (TypoInet.arrayPut, Nullability.Nullable), + (TypoShort.arrayPut, Nullability.Nullable), + (TypoInt2Vector.arrayPut, Nullability.Nullable), + (adventureworks.IntegerArrayMeta.put, Nullability.Nullable), + (adventureworks.LongArrayMeta.put, Nullability.Nullable), + (TypoInterval.arrayPut, Nullability.Nullable), + (TypoJson.arrayPut, Nullability.Nullable), + (TypoJsonb.arrayPut, Nullability.Nullable), + (TypoLine.arrayPut, Nullability.Nullable), + (TypoLineSegment.arrayPut, Nullability.Nullable), + (TypoMoney.arrayPut, Nullability.Nullable), + (Mydomain.arrayPut, Nullability.Nullable), + (Myenum.arrayPut, Nullability.Nullable), + (adventureworks.StringArrayMeta.put, Nullability.Nullable), + (adventureworks.BigDecimalMeta.put, Nullability.Nullable), + (TypoPath.arrayPut, Nullability.Nullable), + (TypoPoint.arrayPut, Nullability.Nullable), + (TypoPolygon.arrayPut, Nullability.Nullable), + (adventureworks.StringArrayMeta.put, Nullability.Nullable), + (TypoLocalTime.arrayPut, Nullability.Nullable), + (TypoLocalDateTime.arrayPut, Nullability.Nullable), + (TypoInstant.arrayPut, Nullability.Nullable), + (TypoOffsetTime.arrayPut, Nullability.Nullable), + (TypoUUID.arrayPut, Nullability.Nullable), + (adventureworks.StringArrayMeta.put, Nullability.Nullable), + (TypoXml.arrayPut, Nullability.Nullable)), + toList = x => List(x.bool, x.box, x.bpchar, x.bytea, x.char, x.circle, x.date, x.float4, x.float8, x.hstore, x.inet, x.int2, x.int2vector, x.int4, x.int8, x.interval, x.json, x.jsonb, x.line, x.lseg, x.money, x.mydomain, x.myenum, x.name, x.numeric, x.path, x.point, x.polygon, x.text, x.time, x.timestamp, x.timestampz, x.timez, x.uuid, x.varchar, x.vector, x.xml, x.boxes, x.bpchares, x.chares, x.circlees, x.datees, x.float4es, x.float8es, x.inetes, x.int2es, x.int2vectores, x.int4es, x.int8es, x.intervales, x.jsones, x.jsonbes, x.linees, x.lseges, x.moneyes, x.mydomaines, x.myenumes, x.namees, x.numerices, x.pathes, x.pointes, x.polygones, x.textes, x.timees, x.timestampes, x.timestampzes, x.timezes, x.uuides, x.varchares, x.xmles), + unsafeSet = (rs, i, a) => { + Meta.BooleanMeta.put.unsafeSetNullable(rs, i + 0, a.bool) + TypoBox.put.unsafeSetNullable(rs, i + 1, a.box) + Meta.StringMeta.put.unsafeSetNullable(rs, i + 2, a.bpchar) + TypoBytea.put.unsafeSetNullable(rs, i + 3, a.bytea) + Meta.StringMeta.put.unsafeSetNullable(rs, i + 4, a.char) + TypoCircle.put.unsafeSetNullable(rs, i + 5, a.circle) + TypoLocalDate.put.unsafeSetNullable(rs, i + 6, a.date) + Meta.FloatMeta.put.unsafeSetNullable(rs, i + 7, a.float4) + Meta.DoubleMeta.put.unsafeSetNullable(rs, i + 8, a.float8) + TypoHStore.put.unsafeSetNullable(rs, i + 9, a.hstore) + TypoInet.put.unsafeSetNullable(rs, i + 10, a.inet) + TypoShort.put.unsafeSetNullable(rs, i + 11, a.int2) + TypoInt2Vector.put.unsafeSetNullable(rs, i + 12, a.int2vector) + Meta.IntMeta.put.unsafeSetNullable(rs, i + 13, a.int4) + Meta.LongMeta.put.unsafeSetNullable(rs, i + 14, a.int8) + TypoInterval.put.unsafeSetNullable(rs, i + 15, a.interval) + TypoJson.put.unsafeSetNullable(rs, i + 16, a.json) + TypoJsonb.put.unsafeSetNullable(rs, i + 17, a.jsonb) + TypoLine.put.unsafeSetNullable(rs, i + 18, a.line) + TypoLineSegment.put.unsafeSetNullable(rs, i + 19, a.lseg) + TypoMoney.put.unsafeSetNullable(rs, i + 20, a.money) + Mydomain.put.unsafeSetNullable(rs, i + 21, a.mydomain) + Myenum.put.unsafeSetNullable(rs, i + 22, a.myenum) + Meta.StringMeta.put.unsafeSetNullable(rs, i + 23, a.name) + Meta.ScalaBigDecimalMeta.put.unsafeSetNullable(rs, i + 24, a.numeric) + TypoPath.put.unsafeSetNullable(rs, i + 25, a.path) + TypoPoint.put.unsafeSetNullable(rs, i + 26, a.point) + TypoPolygon.put.unsafeSetNullable(rs, i + 27, a.polygon) + Meta.StringMeta.put.unsafeSetNullable(rs, i + 28, a.text) + TypoLocalTime.put.unsafeSetNullable(rs, i + 29, a.time) + TypoLocalDateTime.put.unsafeSetNullable(rs, i + 30, a.timestamp) + TypoInstant.put.unsafeSetNullable(rs, i + 31, a.timestampz) + TypoOffsetTime.put.unsafeSetNullable(rs, i + 32, a.timez) + TypoUUID.put.unsafeSetNullable(rs, i + 33, a.uuid) + Meta.StringMeta.put.unsafeSetNullable(rs, i + 34, a.varchar) + TypoVector.put.unsafeSetNullable(rs, i + 35, a.vector) + TypoXml.put.unsafeSetNullable(rs, i + 36, a.xml) + TypoBox.arrayPut.unsafeSetNullable(rs, i + 37, a.boxes) + adventureworks.StringArrayMeta.put.unsafeSetNullable(rs, i + 38, a.bpchares) + adventureworks.StringArrayMeta.put.unsafeSetNullable(rs, i + 39, a.chares) + TypoCircle.arrayPut.unsafeSetNullable(rs, i + 40, a.circlees) + TypoLocalDate.arrayPut.unsafeSetNullable(rs, i + 41, a.datees) + adventureworks.FloatArrayMeta.put.unsafeSetNullable(rs, i + 42, a.float4es) + adventureworks.DoubleArrayMeta.put.unsafeSetNullable(rs, i + 43, a.float8es) + TypoInet.arrayPut.unsafeSetNullable(rs, i + 44, a.inetes) + TypoShort.arrayPut.unsafeSetNullable(rs, i + 45, a.int2es) + TypoInt2Vector.arrayPut.unsafeSetNullable(rs, i + 46, a.int2vectores) + adventureworks.IntegerArrayMeta.put.unsafeSetNullable(rs, i + 47, a.int4es) + adventureworks.LongArrayMeta.put.unsafeSetNullable(rs, i + 48, a.int8es) + TypoInterval.arrayPut.unsafeSetNullable(rs, i + 49, a.intervales) + TypoJson.arrayPut.unsafeSetNullable(rs, i + 50, a.jsones) + TypoJsonb.arrayPut.unsafeSetNullable(rs, i + 51, a.jsonbes) + TypoLine.arrayPut.unsafeSetNullable(rs, i + 52, a.linees) + TypoLineSegment.arrayPut.unsafeSetNullable(rs, i + 53, a.lseges) + TypoMoney.arrayPut.unsafeSetNullable(rs, i + 54, a.moneyes) + Mydomain.arrayPut.unsafeSetNullable(rs, i + 55, a.mydomaines) + Myenum.arrayPut.unsafeSetNullable(rs, i + 56, a.myenumes) + adventureworks.StringArrayMeta.put.unsafeSetNullable(rs, i + 57, a.namees) + adventureworks.BigDecimalMeta.put.unsafeSetNullable(rs, i + 58, a.numerices) + TypoPath.arrayPut.unsafeSetNullable(rs, i + 59, a.pathes) + TypoPoint.arrayPut.unsafeSetNullable(rs, i + 60, a.pointes) + TypoPolygon.arrayPut.unsafeSetNullable(rs, i + 61, a.polygones) + adventureworks.StringArrayMeta.put.unsafeSetNullable(rs, i + 62, a.textes) + TypoLocalTime.arrayPut.unsafeSetNullable(rs, i + 63, a.timees) + TypoLocalDateTime.arrayPut.unsafeSetNullable(rs, i + 64, a.timestampes) + TypoInstant.arrayPut.unsafeSetNullable(rs, i + 65, a.timestampzes) + TypoOffsetTime.arrayPut.unsafeSetNullable(rs, i + 66, a.timezes) + TypoUUID.arrayPut.unsafeSetNullable(rs, i + 67, a.uuides) + adventureworks.StringArrayMeta.put.unsafeSetNullable(rs, i + 68, a.varchares) + TypoXml.arrayPut.unsafeSetNullable(rs, i + 69, a.xmles) + }, + unsafeUpdate = (ps, i, a) => { + Meta.BooleanMeta.put.unsafeUpdateNullable(ps, i + 0, a.bool) + TypoBox.put.unsafeUpdateNullable(ps, i + 1, a.box) + Meta.StringMeta.put.unsafeUpdateNullable(ps, i + 2, a.bpchar) + TypoBytea.put.unsafeUpdateNullable(ps, i + 3, a.bytea) + Meta.StringMeta.put.unsafeUpdateNullable(ps, i + 4, a.char) + TypoCircle.put.unsafeUpdateNullable(ps, i + 5, a.circle) + TypoLocalDate.put.unsafeUpdateNullable(ps, i + 6, a.date) + Meta.FloatMeta.put.unsafeUpdateNullable(ps, i + 7, a.float4) + Meta.DoubleMeta.put.unsafeUpdateNullable(ps, i + 8, a.float8) + TypoHStore.put.unsafeUpdateNullable(ps, i + 9, a.hstore) + TypoInet.put.unsafeUpdateNullable(ps, i + 10, a.inet) + TypoShort.put.unsafeUpdateNullable(ps, i + 11, a.int2) + TypoInt2Vector.put.unsafeUpdateNullable(ps, i + 12, a.int2vector) + Meta.IntMeta.put.unsafeUpdateNullable(ps, i + 13, a.int4) + Meta.LongMeta.put.unsafeUpdateNullable(ps, i + 14, a.int8) + TypoInterval.put.unsafeUpdateNullable(ps, i + 15, a.interval) + TypoJson.put.unsafeUpdateNullable(ps, i + 16, a.json) + TypoJsonb.put.unsafeUpdateNullable(ps, i + 17, a.jsonb) + TypoLine.put.unsafeUpdateNullable(ps, i + 18, a.line) + TypoLineSegment.put.unsafeUpdateNullable(ps, i + 19, a.lseg) + TypoMoney.put.unsafeUpdateNullable(ps, i + 20, a.money) + Mydomain.put.unsafeUpdateNullable(ps, i + 21, a.mydomain) + Myenum.put.unsafeUpdateNullable(ps, i + 22, a.myenum) + Meta.StringMeta.put.unsafeUpdateNullable(ps, i + 23, a.name) + Meta.ScalaBigDecimalMeta.put.unsafeUpdateNullable(ps, i + 24, a.numeric) + TypoPath.put.unsafeUpdateNullable(ps, i + 25, a.path) + TypoPoint.put.unsafeUpdateNullable(ps, i + 26, a.point) + TypoPolygon.put.unsafeUpdateNullable(ps, i + 27, a.polygon) + Meta.StringMeta.put.unsafeUpdateNullable(ps, i + 28, a.text) + TypoLocalTime.put.unsafeUpdateNullable(ps, i + 29, a.time) + TypoLocalDateTime.put.unsafeUpdateNullable(ps, i + 30, a.timestamp) + TypoInstant.put.unsafeUpdateNullable(ps, i + 31, a.timestampz) + TypoOffsetTime.put.unsafeUpdateNullable(ps, i + 32, a.timez) + TypoUUID.put.unsafeUpdateNullable(ps, i + 33, a.uuid) + Meta.StringMeta.put.unsafeUpdateNullable(ps, i + 34, a.varchar) + TypoVector.put.unsafeUpdateNullable(ps, i + 35, a.vector) + TypoXml.put.unsafeUpdateNullable(ps, i + 36, a.xml) + TypoBox.arrayPut.unsafeUpdateNullable(ps, i + 37, a.boxes) + adventureworks.StringArrayMeta.put.unsafeUpdateNullable(ps, i + 38, a.bpchares) + adventureworks.StringArrayMeta.put.unsafeUpdateNullable(ps, i + 39, a.chares) + TypoCircle.arrayPut.unsafeUpdateNullable(ps, i + 40, a.circlees) + TypoLocalDate.arrayPut.unsafeUpdateNullable(ps, i + 41, a.datees) + adventureworks.FloatArrayMeta.put.unsafeUpdateNullable(ps, i + 42, a.float4es) + adventureworks.DoubleArrayMeta.put.unsafeUpdateNullable(ps, i + 43, a.float8es) + TypoInet.arrayPut.unsafeUpdateNullable(ps, i + 44, a.inetes) + TypoShort.arrayPut.unsafeUpdateNullable(ps, i + 45, a.int2es) + TypoInt2Vector.arrayPut.unsafeUpdateNullable(ps, i + 46, a.int2vectores) + adventureworks.IntegerArrayMeta.put.unsafeUpdateNullable(ps, i + 47, a.int4es) + adventureworks.LongArrayMeta.put.unsafeUpdateNullable(ps, i + 48, a.int8es) + TypoInterval.arrayPut.unsafeUpdateNullable(ps, i + 49, a.intervales) + TypoJson.arrayPut.unsafeUpdateNullable(ps, i + 50, a.jsones) + TypoJsonb.arrayPut.unsafeUpdateNullable(ps, i + 51, a.jsonbes) + TypoLine.arrayPut.unsafeUpdateNullable(ps, i + 52, a.linees) + TypoLineSegment.arrayPut.unsafeUpdateNullable(ps, i + 53, a.lseges) + TypoMoney.arrayPut.unsafeUpdateNullable(ps, i + 54, a.moneyes) + Mydomain.arrayPut.unsafeUpdateNullable(ps, i + 55, a.mydomaines) + Myenum.arrayPut.unsafeUpdateNullable(ps, i + 56, a.myenumes) + adventureworks.StringArrayMeta.put.unsafeUpdateNullable(ps, i + 57, a.namees) + adventureworks.BigDecimalMeta.put.unsafeUpdateNullable(ps, i + 58, a.numerices) + TypoPath.arrayPut.unsafeUpdateNullable(ps, i + 59, a.pathes) + TypoPoint.arrayPut.unsafeUpdateNullable(ps, i + 60, a.pointes) + TypoPolygon.arrayPut.unsafeUpdateNullable(ps, i + 61, a.polygones) + adventureworks.StringArrayMeta.put.unsafeUpdateNullable(ps, i + 62, a.textes) + TypoLocalTime.arrayPut.unsafeUpdateNullable(ps, i + 63, a.timees) + TypoLocalDateTime.arrayPut.unsafeUpdateNullable(ps, i + 64, a.timestampes) + TypoInstant.arrayPut.unsafeUpdateNullable(ps, i + 65, a.timestampzes) + TypoOffsetTime.arrayPut.unsafeUpdateNullable(ps, i + 66, a.timezes) + TypoUUID.arrayPut.unsafeUpdateNullable(ps, i + 67, a.uuides) + adventureworks.StringArrayMeta.put.unsafeUpdateNullable(ps, i + 68, a.varchares) + TypoXml.arrayPut.unsafeUpdateNullable(ps, i + 69, a.xmles) + } + ) } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/public/users/UsersRepo.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/public/users/UsersRepo.scala index d7f095e13..b0b13c26f 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/public/users/UsersRepo.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/public/users/UsersRepo.scala @@ -32,4 +32,7 @@ trait UsersRepo { def update: UpdateBuilder[UsersFields, UsersRow] def update(row: UsersRow): ConnectionIO[Boolean] def upsert(unsaved: UsersRow): ConnectionIO[UsersRow] + def upsertBatch(unsaved: List[UsersRow]): Stream[ConnectionIO, UsersRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Stream[ConnectionIO, UsersRow], batchSize: Int = 10000): ConnectionIO[Int] } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/public/users/UsersRepoImpl.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/public/users/UsersRepoImpl.scala index 7df717b98..5f71691e7 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/public/users/UsersRepoImpl.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/public/users/UsersRepoImpl.scala @@ -10,6 +10,7 @@ package users import adventureworks.customtypes.Defaulted import adventureworks.customtypes.TypoInstant import adventureworks.customtypes.TypoUnknownCitext +import cats.instances.list.catsStdInstancesForList import doobie.free.connection.ConnectionIO import doobie.postgres.syntax.FragmentOps import doobie.syntax.SqlInterpolator.SingleFragment.fromWrite @@ -17,6 +18,7 @@ import doobie.syntax.string.toSqlInterpolator import doobie.util.Write import doobie.util.fragment.Fragment import doobie.util.meta.Meta +import doobie.util.update.Update import fs2.Stream import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder @@ -137,4 +139,39 @@ class UsersRepoImpl extends UsersRepo { returning "user_id", "name", "last_name", "email"::text, "password", "created_at"::text, "verified_on"::text """.query(using UsersRow.read).unique } + override def upsertBatch(unsaved: List[UsersRow]): Stream[ConnectionIO, UsersRow] = { + Update[UsersRow]( + s"""insert into public.users("user_id", "name", "last_name", "email", "password", "created_at", "verified_on") + values (?::uuid,?,?,?::citext,?,?::timestamptz,?::timestamptz) + on conflict ("user_id") + do update set + "name" = EXCLUDED."name", + "last_name" = EXCLUDED."last_name", + "email" = EXCLUDED."email", + "password" = EXCLUDED."password", + "created_at" = EXCLUDED."created_at", + "verified_on" = EXCLUDED."verified_on" + returning "user_id", "name", "last_name", "email"::text, "password", "created_at"::text, "verified_on"::text""" + )(using UsersRow.write) + .updateManyWithGeneratedKeys[UsersRow]("user_id", "name", "last_name", "email", "password", "created_at", "verified_on")(unsaved)(using catsStdInstancesForList, UsersRow.read) + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, UsersRow], batchSize: Int = 10000): ConnectionIO[Int] = { + for { + _ <- sql"create temporary table users_TEMP (like public.users) on commit drop".update.run + _ <- new FragmentOps(sql"""copy users_TEMP("user_id", "name", "last_name", "email", "password", "created_at", "verified_on") from stdin""").copyIn(unsaved, batchSize)(using UsersRow.text) + res <- sql"""insert into public.users("user_id", "name", "last_name", "email", "password", "created_at", "verified_on") + select * from users_TEMP + on conflict ("user_id") + do update set + "name" = EXCLUDED."name", + "last_name" = EXCLUDED."last_name", + "email" = EXCLUDED."email", + "password" = EXCLUDED."password", + "created_at" = EXCLUDED."created_at", + "verified_on" = EXCLUDED."verified_on" + ; + drop table users_TEMP;""".update.run + } yield res + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/public/users/UsersRepoMock.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/public/users/UsersRepoMock.scala index 17c513c08..a75ac8107 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/public/users/UsersRepoMock.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/public/users/UsersRepoMock.scala @@ -109,4 +109,23 @@ class UsersRepoMock(toRow: Function1[UsersRowUnsaved, UsersRow], unsaved } } + override def upsertBatch(unsaved: List[UsersRow]): Stream[ConnectionIO, UsersRow] = { + Stream.emits { + unsaved.map { row => + map += (row.userId -> row) + row + } + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, UsersRow], batchSize: Int = 10000): ConnectionIO[Int] = { + unsaved.compile.toList.map { rows => + var num = 0 + rows.foreach { row => + map += (row.userId -> row) + num += 1 + } + num + } + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/public/users/UsersRow.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/public/users/UsersRow.scala index 6b7c7f5e2..cb941331c 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/public/users/UsersRow.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/public/users/UsersRow.scala @@ -13,6 +13,7 @@ import adventureworks.customtypes.TypoUnknownCitext import doobie.enumerated.Nullability import doobie.postgres.Text import doobie.util.Read +import doobie.util.Write import doobie.util.meta.Meta import io.circe.Decoder import io.circe.Encoder @@ -73,4 +74,32 @@ object UsersRow { sb.append(Text.DELIMETER) Text.option(TypoInstant.text).unsafeEncode(row.verifiedOn, sb) } + implicit lazy val write: Write[UsersRow] = new Write[UsersRow]( + puts = List((UsersId.put, Nullability.NoNulls), + (Meta.StringMeta.put, Nullability.NoNulls), + (Meta.StringMeta.put, Nullability.Nullable), + (TypoUnknownCitext.put, Nullability.NoNulls), + (Meta.StringMeta.put, Nullability.NoNulls), + (TypoInstant.put, Nullability.NoNulls), + (TypoInstant.put, Nullability.Nullable)), + toList = x => List(x.userId, x.name, x.lastName, x.email, x.password, x.createdAt, x.verifiedOn), + unsafeSet = (rs, i, a) => { + UsersId.put.unsafeSetNonNullable(rs, i + 0, a.userId) + Meta.StringMeta.put.unsafeSetNonNullable(rs, i + 1, a.name) + Meta.StringMeta.put.unsafeSetNullable(rs, i + 2, a.lastName) + TypoUnknownCitext.put.unsafeSetNonNullable(rs, i + 3, a.email) + Meta.StringMeta.put.unsafeSetNonNullable(rs, i + 4, a.password) + TypoInstant.put.unsafeSetNonNullable(rs, i + 5, a.createdAt) + TypoInstant.put.unsafeSetNullable(rs, i + 6, a.verifiedOn) + }, + unsafeUpdate = (ps, i, a) => { + UsersId.put.unsafeUpdateNonNullable(ps, i + 0, a.userId) + Meta.StringMeta.put.unsafeUpdateNonNullable(ps, i + 1, a.name) + Meta.StringMeta.put.unsafeUpdateNullable(ps, i + 2, a.lastName) + TypoUnknownCitext.put.unsafeUpdateNonNullable(ps, i + 3, a.email) + Meta.StringMeta.put.unsafeUpdateNonNullable(ps, i + 4, a.password) + TypoInstant.put.unsafeUpdateNonNullable(ps, i + 5, a.createdAt) + TypoInstant.put.unsafeUpdateNullable(ps, i + 6, a.verifiedOn) + } + ) } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/purchasing/productvendor/ProductvendorRepo.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/purchasing/productvendor/ProductvendorRepo.scala index 9fe44954c..052eaa6a2 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/purchasing/productvendor/ProductvendorRepo.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/purchasing/productvendor/ProductvendorRepo.scala @@ -30,4 +30,7 @@ trait ProductvendorRepo { def update: UpdateBuilder[ProductvendorFields, ProductvendorRow] def update(row: ProductvendorRow): ConnectionIO[Boolean] def upsert(unsaved: ProductvendorRow): ConnectionIO[ProductvendorRow] + def upsertBatch(unsaved: List[ProductvendorRow]): Stream[ConnectionIO, ProductvendorRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Stream[ConnectionIO, ProductvendorRow], batchSize: Int = 10000): ConnectionIO[Int] } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/purchasing/productvendor/ProductvendorRepoImpl.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/purchasing/productvendor/ProductvendorRepoImpl.scala index b72c44197..32b589893 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/purchasing/productvendor/ProductvendorRepoImpl.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/purchasing/productvendor/ProductvendorRepoImpl.scala @@ -12,6 +12,7 @@ import adventureworks.customtypes.TypoLocalDateTime import adventureworks.person.businessentity.BusinessentityId import adventureworks.production.product.ProductId import adventureworks.production.unitmeasure.UnitmeasureId +import cats.instances.list.catsStdInstancesForList import doobie.free.connection.ConnectionIO import doobie.postgres.syntax.FragmentOps import doobie.syntax.SqlInterpolator.SingleFragment.fromWrite @@ -19,6 +20,7 @@ import doobie.syntax.string.toSqlInterpolator import doobie.util.Write import doobie.util.fragment.Fragment import doobie.util.meta.Meta +import doobie.util.update.Update import fs2.Stream import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder @@ -161,4 +163,45 @@ class ProductvendorRepoImpl extends ProductvendorRepo { returning "productid", "businessentityid", "averageleadtime", "standardprice", "lastreceiptcost", "lastreceiptdate"::text, "minorderqty", "maxorderqty", "onorderqty", "unitmeasurecode", "modifieddate"::text """.query(using ProductvendorRow.read).unique } + override def upsertBatch(unsaved: List[ProductvendorRow]): Stream[ConnectionIO, ProductvendorRow] = { + Update[ProductvendorRow]( + s"""insert into purchasing.productvendor("productid", "businessentityid", "averageleadtime", "standardprice", "lastreceiptcost", "lastreceiptdate", "minorderqty", "maxorderqty", "onorderqty", "unitmeasurecode", "modifieddate") + values (?::int4,?::int4,?::int4,?::numeric,?::numeric,?::timestamp,?::int4,?::int4,?::int4,?::bpchar,?::timestamp) + on conflict ("productid", "businessentityid") + do update set + "averageleadtime" = EXCLUDED."averageleadtime", + "standardprice" = EXCLUDED."standardprice", + "lastreceiptcost" = EXCLUDED."lastreceiptcost", + "lastreceiptdate" = EXCLUDED."lastreceiptdate", + "minorderqty" = EXCLUDED."minorderqty", + "maxorderqty" = EXCLUDED."maxorderqty", + "onorderqty" = EXCLUDED."onorderqty", + "unitmeasurecode" = EXCLUDED."unitmeasurecode", + "modifieddate" = EXCLUDED."modifieddate" + returning "productid", "businessentityid", "averageleadtime", "standardprice", "lastreceiptcost", "lastreceiptdate"::text, "minorderqty", "maxorderqty", "onorderqty", "unitmeasurecode", "modifieddate"::text""" + )(using ProductvendorRow.write) + .updateManyWithGeneratedKeys[ProductvendorRow]("productid", "businessentityid", "averageleadtime", "standardprice", "lastreceiptcost", "lastreceiptdate", "minorderqty", "maxorderqty", "onorderqty", "unitmeasurecode", "modifieddate")(unsaved)(using catsStdInstancesForList, ProductvendorRow.read) + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, ProductvendorRow], batchSize: Int = 10000): ConnectionIO[Int] = { + for { + _ <- sql"create temporary table productvendor_TEMP (like purchasing.productvendor) on commit drop".update.run + _ <- new FragmentOps(sql"""copy productvendor_TEMP("productid", "businessentityid", "averageleadtime", "standardprice", "lastreceiptcost", "lastreceiptdate", "minorderqty", "maxorderqty", "onorderqty", "unitmeasurecode", "modifieddate") from stdin""").copyIn(unsaved, batchSize)(using ProductvendorRow.text) + res <- sql"""insert into purchasing.productvendor("productid", "businessentityid", "averageleadtime", "standardprice", "lastreceiptcost", "lastreceiptdate", "minorderqty", "maxorderqty", "onorderqty", "unitmeasurecode", "modifieddate") + select * from productvendor_TEMP + on conflict ("productid", "businessentityid") + do update set + "averageleadtime" = EXCLUDED."averageleadtime", + "standardprice" = EXCLUDED."standardprice", + "lastreceiptcost" = EXCLUDED."lastreceiptcost", + "lastreceiptdate" = EXCLUDED."lastreceiptdate", + "minorderqty" = EXCLUDED."minorderqty", + "maxorderqty" = EXCLUDED."maxorderqty", + "onorderqty" = EXCLUDED."onorderqty", + "unitmeasurecode" = EXCLUDED."unitmeasurecode", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table productvendor_TEMP;""".update.run + } yield res + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/purchasing/productvendor/ProductvendorRepoMock.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/purchasing/productvendor/ProductvendorRepoMock.scala index 1a0511bf9..87768946a 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/purchasing/productvendor/ProductvendorRepoMock.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/purchasing/productvendor/ProductvendorRepoMock.scala @@ -105,4 +105,23 @@ class ProductvendorRepoMock(toRow: Function1[ProductvendorRowUnsaved, Productven unsaved } } + override def upsertBatch(unsaved: List[ProductvendorRow]): Stream[ConnectionIO, ProductvendorRow] = { + Stream.emits { + unsaved.map { row => + map += (row.compositeId -> row) + row + } + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, ProductvendorRow], batchSize: Int = 10000): ConnectionIO[Int] = { + unsaved.compile.toList.map { rows => + var num = 0 + rows.foreach { row => + map += (row.compositeId -> row) + num += 1 + } + num + } + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/purchasing/productvendor/ProductvendorRow.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/purchasing/productvendor/ProductvendorRow.scala index 7e7919972..70e91b70c 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/purchasing/productvendor/ProductvendorRow.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/purchasing/productvendor/ProductvendorRow.scala @@ -15,6 +15,7 @@ import adventureworks.production.unitmeasure.UnitmeasureId import doobie.enumerated.Nullability import doobie.postgres.Text import doobie.util.Read +import doobie.util.Write import doobie.util.meta.Meta import io.circe.Decoder import io.circe.Encoder @@ -118,4 +119,44 @@ object ProductvendorRow { sb.append(Text.DELIMETER) TypoLocalDateTime.text.unsafeEncode(row.modifieddate, sb) } + implicit lazy val write: Write[ProductvendorRow] = new Write[ProductvendorRow]( + puts = List((ProductId.put, Nullability.NoNulls), + (BusinessentityId.put, Nullability.NoNulls), + (Meta.IntMeta.put, Nullability.NoNulls), + (Meta.ScalaBigDecimalMeta.put, Nullability.NoNulls), + (Meta.ScalaBigDecimalMeta.put, Nullability.Nullable), + (TypoLocalDateTime.put, Nullability.Nullable), + (Meta.IntMeta.put, Nullability.NoNulls), + (Meta.IntMeta.put, Nullability.NoNulls), + (Meta.IntMeta.put, Nullability.Nullable), + (UnitmeasureId.put, Nullability.NoNulls), + (TypoLocalDateTime.put, Nullability.NoNulls)), + toList = x => List(x.productid, x.businessentityid, x.averageleadtime, x.standardprice, x.lastreceiptcost, x.lastreceiptdate, x.minorderqty, x.maxorderqty, x.onorderqty, x.unitmeasurecode, x.modifieddate), + unsafeSet = (rs, i, a) => { + ProductId.put.unsafeSetNonNullable(rs, i + 0, a.productid) + BusinessentityId.put.unsafeSetNonNullable(rs, i + 1, a.businessentityid) + Meta.IntMeta.put.unsafeSetNonNullable(rs, i + 2, a.averageleadtime) + Meta.ScalaBigDecimalMeta.put.unsafeSetNonNullable(rs, i + 3, a.standardprice) + Meta.ScalaBigDecimalMeta.put.unsafeSetNullable(rs, i + 4, a.lastreceiptcost) + TypoLocalDateTime.put.unsafeSetNullable(rs, i + 5, a.lastreceiptdate) + Meta.IntMeta.put.unsafeSetNonNullable(rs, i + 6, a.minorderqty) + Meta.IntMeta.put.unsafeSetNonNullable(rs, i + 7, a.maxorderqty) + Meta.IntMeta.put.unsafeSetNullable(rs, i + 8, a.onorderqty) + UnitmeasureId.put.unsafeSetNonNullable(rs, i + 9, a.unitmeasurecode) + TypoLocalDateTime.put.unsafeSetNonNullable(rs, i + 10, a.modifieddate) + }, + unsafeUpdate = (ps, i, a) => { + ProductId.put.unsafeUpdateNonNullable(ps, i + 0, a.productid) + BusinessentityId.put.unsafeUpdateNonNullable(ps, i + 1, a.businessentityid) + Meta.IntMeta.put.unsafeUpdateNonNullable(ps, i + 2, a.averageleadtime) + Meta.ScalaBigDecimalMeta.put.unsafeUpdateNonNullable(ps, i + 3, a.standardprice) + Meta.ScalaBigDecimalMeta.put.unsafeUpdateNullable(ps, i + 4, a.lastreceiptcost) + TypoLocalDateTime.put.unsafeUpdateNullable(ps, i + 5, a.lastreceiptdate) + Meta.IntMeta.put.unsafeUpdateNonNullable(ps, i + 6, a.minorderqty) + Meta.IntMeta.put.unsafeUpdateNonNullable(ps, i + 7, a.maxorderqty) + Meta.IntMeta.put.unsafeUpdateNullable(ps, i + 8, a.onorderqty) + UnitmeasureId.put.unsafeUpdateNonNullable(ps, i + 9, a.unitmeasurecode) + TypoLocalDateTime.put.unsafeUpdateNonNullable(ps, i + 10, a.modifieddate) + } + ) } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/purchasing/purchaseorderdetail/PurchaseorderdetailRow.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/purchasing/purchaseorderdetail/PurchaseorderdetailRow.scala index a97e0fa6d..403259997 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/purchasing/purchaseorderdetail/PurchaseorderdetailRow.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/purchasing/purchaseorderdetail/PurchaseorderdetailRow.scala @@ -15,6 +15,7 @@ import adventureworks.purchasing.purchaseorderheader.PurchaseorderheaderId import doobie.enumerated.Nullability import doobie.postgres.Text import doobie.util.Read +import doobie.util.Write import doobie.util.meta.Meta import io.circe.Decoder import io.circe.Encoder @@ -104,4 +105,38 @@ object PurchaseorderdetailRow { sb.append(Text.DELIMETER) TypoLocalDateTime.text.unsafeEncode(row.modifieddate, sb) } + implicit lazy val write: Write[PurchaseorderdetailRow] = new Write[PurchaseorderdetailRow]( + puts = List((PurchaseorderheaderId.put, Nullability.NoNulls), + (Meta.IntMeta.put, Nullability.NoNulls), + (TypoLocalDateTime.put, Nullability.NoNulls), + (TypoShort.put, Nullability.NoNulls), + (ProductId.put, Nullability.NoNulls), + (Meta.ScalaBigDecimalMeta.put, Nullability.NoNulls), + (Meta.ScalaBigDecimalMeta.put, Nullability.NoNulls), + (Meta.ScalaBigDecimalMeta.put, Nullability.NoNulls), + (TypoLocalDateTime.put, Nullability.NoNulls)), + toList = x => List(x.purchaseorderid, x.purchaseorderdetailid, x.duedate, x.orderqty, x.productid, x.unitprice, x.receivedqty, x.rejectedqty, x.modifieddate), + unsafeSet = (rs, i, a) => { + PurchaseorderheaderId.put.unsafeSetNonNullable(rs, i + 0, a.purchaseorderid) + Meta.IntMeta.put.unsafeSetNonNullable(rs, i + 1, a.purchaseorderdetailid) + TypoLocalDateTime.put.unsafeSetNonNullable(rs, i + 2, a.duedate) + TypoShort.put.unsafeSetNonNullable(rs, i + 3, a.orderqty) + ProductId.put.unsafeSetNonNullable(rs, i + 4, a.productid) + Meta.ScalaBigDecimalMeta.put.unsafeSetNonNullable(rs, i + 5, a.unitprice) + Meta.ScalaBigDecimalMeta.put.unsafeSetNonNullable(rs, i + 6, a.receivedqty) + Meta.ScalaBigDecimalMeta.put.unsafeSetNonNullable(rs, i + 7, a.rejectedqty) + TypoLocalDateTime.put.unsafeSetNonNullable(rs, i + 8, a.modifieddate) + }, + unsafeUpdate = (ps, i, a) => { + PurchaseorderheaderId.put.unsafeUpdateNonNullable(ps, i + 0, a.purchaseorderid) + Meta.IntMeta.put.unsafeUpdateNonNullable(ps, i + 1, a.purchaseorderdetailid) + TypoLocalDateTime.put.unsafeUpdateNonNullable(ps, i + 2, a.duedate) + TypoShort.put.unsafeUpdateNonNullable(ps, i + 3, a.orderqty) + ProductId.put.unsafeUpdateNonNullable(ps, i + 4, a.productid) + Meta.ScalaBigDecimalMeta.put.unsafeUpdateNonNullable(ps, i + 5, a.unitprice) + Meta.ScalaBigDecimalMeta.put.unsafeUpdateNonNullable(ps, i + 6, a.receivedqty) + Meta.ScalaBigDecimalMeta.put.unsafeUpdateNonNullable(ps, i + 7, a.rejectedqty) + TypoLocalDateTime.put.unsafeUpdateNonNullable(ps, i + 8, a.modifieddate) + } + ) } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/purchasing/purchaseorderheader/PurchaseorderheaderRepo.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/purchasing/purchaseorderheader/PurchaseorderheaderRepo.scala index 9d98ca58b..96272c6d1 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/purchasing/purchaseorderheader/PurchaseorderheaderRepo.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/purchasing/purchaseorderheader/PurchaseorderheaderRepo.scala @@ -30,4 +30,7 @@ trait PurchaseorderheaderRepo { def update: UpdateBuilder[PurchaseorderheaderFields, PurchaseorderheaderRow] def update(row: PurchaseorderheaderRow): ConnectionIO[Boolean] def upsert(unsaved: PurchaseorderheaderRow): ConnectionIO[PurchaseorderheaderRow] + def upsertBatch(unsaved: List[PurchaseorderheaderRow]): Stream[ConnectionIO, PurchaseorderheaderRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Stream[ConnectionIO, PurchaseorderheaderRow], batchSize: Int = 10000): ConnectionIO[Int] } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/purchasing/purchaseorderheader/PurchaseorderheaderRepoImpl.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/purchasing/purchaseorderheader/PurchaseorderheaderRepoImpl.scala index 2231f92f4..abe180c10 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/purchasing/purchaseorderheader/PurchaseorderheaderRepoImpl.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/purchasing/purchaseorderheader/PurchaseorderheaderRepoImpl.scala @@ -12,6 +12,7 @@ import adventureworks.customtypes.TypoLocalDateTime import adventureworks.customtypes.TypoShort import adventureworks.person.businessentity.BusinessentityId import adventureworks.purchasing.shipmethod.ShipmethodId +import cats.instances.list.catsStdInstancesForList import doobie.free.connection.ConnectionIO import doobie.postgres.syntax.FragmentOps import doobie.syntax.SqlInterpolator.SingleFragment.fromWrite @@ -19,6 +20,7 @@ import doobie.syntax.string.toSqlInterpolator import doobie.util.Write import doobie.util.fragment.Fragment import doobie.util.meta.Meta +import doobie.util.update.Update import fs2.Stream import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder @@ -174,4 +176,49 @@ class PurchaseorderheaderRepoImpl extends PurchaseorderheaderRepo { returning "purchaseorderid", "revisionnumber", "status", "employeeid", "vendorid", "shipmethodid", "orderdate"::text, "shipdate"::text, "subtotal", "taxamt", "freight", "modifieddate"::text """.query(using PurchaseorderheaderRow.read).unique } + override def upsertBatch(unsaved: List[PurchaseorderheaderRow]): Stream[ConnectionIO, PurchaseorderheaderRow] = { + Update[PurchaseorderheaderRow]( + s"""insert into purchasing.purchaseorderheader("purchaseorderid", "revisionnumber", "status", "employeeid", "vendorid", "shipmethodid", "orderdate", "shipdate", "subtotal", "taxamt", "freight", "modifieddate") + values (?::int4,?::int2,?::int2,?::int4,?::int4,?::int4,?::timestamp,?::timestamp,?::numeric,?::numeric,?::numeric,?::timestamp) + on conflict ("purchaseorderid") + do update set + "revisionnumber" = EXCLUDED."revisionnumber", + "status" = EXCLUDED."status", + "employeeid" = EXCLUDED."employeeid", + "vendorid" = EXCLUDED."vendorid", + "shipmethodid" = EXCLUDED."shipmethodid", + "orderdate" = EXCLUDED."orderdate", + "shipdate" = EXCLUDED."shipdate", + "subtotal" = EXCLUDED."subtotal", + "taxamt" = EXCLUDED."taxamt", + "freight" = EXCLUDED."freight", + "modifieddate" = EXCLUDED."modifieddate" + returning "purchaseorderid", "revisionnumber", "status", "employeeid", "vendorid", "shipmethodid", "orderdate"::text, "shipdate"::text, "subtotal", "taxamt", "freight", "modifieddate"::text""" + )(using PurchaseorderheaderRow.write) + .updateManyWithGeneratedKeys[PurchaseorderheaderRow]("purchaseorderid", "revisionnumber", "status", "employeeid", "vendorid", "shipmethodid", "orderdate", "shipdate", "subtotal", "taxamt", "freight", "modifieddate")(unsaved)(using catsStdInstancesForList, PurchaseorderheaderRow.read) + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, PurchaseorderheaderRow], batchSize: Int = 10000): ConnectionIO[Int] = { + for { + _ <- sql"create temporary table purchaseorderheader_TEMP (like purchasing.purchaseorderheader) on commit drop".update.run + _ <- new FragmentOps(sql"""copy purchaseorderheader_TEMP("purchaseorderid", "revisionnumber", "status", "employeeid", "vendorid", "shipmethodid", "orderdate", "shipdate", "subtotal", "taxamt", "freight", "modifieddate") from stdin""").copyIn(unsaved, batchSize)(using PurchaseorderheaderRow.text) + res <- sql"""insert into purchasing.purchaseorderheader("purchaseorderid", "revisionnumber", "status", "employeeid", "vendorid", "shipmethodid", "orderdate", "shipdate", "subtotal", "taxamt", "freight", "modifieddate") + select * from purchaseorderheader_TEMP + on conflict ("purchaseorderid") + do update set + "revisionnumber" = EXCLUDED."revisionnumber", + "status" = EXCLUDED."status", + "employeeid" = EXCLUDED."employeeid", + "vendorid" = EXCLUDED."vendorid", + "shipmethodid" = EXCLUDED."shipmethodid", + "orderdate" = EXCLUDED."orderdate", + "shipdate" = EXCLUDED."shipdate", + "subtotal" = EXCLUDED."subtotal", + "taxamt" = EXCLUDED."taxamt", + "freight" = EXCLUDED."freight", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table purchaseorderheader_TEMP;""".update.run + } yield res + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/purchasing/purchaseorderheader/PurchaseorderheaderRepoMock.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/purchasing/purchaseorderheader/PurchaseorderheaderRepoMock.scala index 554a88c92..ac27d9dbd 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/purchasing/purchaseorderheader/PurchaseorderheaderRepoMock.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/purchasing/purchaseorderheader/PurchaseorderheaderRepoMock.scala @@ -105,4 +105,23 @@ class PurchaseorderheaderRepoMock(toRow: Function1[PurchaseorderheaderRowUnsaved unsaved } } + override def upsertBatch(unsaved: List[PurchaseorderheaderRow]): Stream[ConnectionIO, PurchaseorderheaderRow] = { + Stream.emits { + unsaved.map { row => + map += (row.purchaseorderid -> row) + row + } + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, PurchaseorderheaderRow], batchSize: Int = 10000): ConnectionIO[Int] = { + unsaved.compile.toList.map { rows => + var num = 0 + rows.foreach { row => + map += (row.purchaseorderid -> row) + num += 1 + } + num + } + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/purchasing/purchaseorderheader/PurchaseorderheaderRow.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/purchasing/purchaseorderheader/PurchaseorderheaderRow.scala index df28d84b8..7a1f8db94 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/purchasing/purchaseorderheader/PurchaseorderheaderRow.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/purchasing/purchaseorderheader/PurchaseorderheaderRow.scala @@ -15,6 +15,7 @@ import adventureworks.purchasing.shipmethod.ShipmethodId import doobie.enumerated.Nullability import doobie.postgres.Text import doobie.util.Read +import doobie.util.Write import doobie.util.meta.Meta import io.circe.Decoder import io.circe.Encoder @@ -128,4 +129,47 @@ object PurchaseorderheaderRow { sb.append(Text.DELIMETER) TypoLocalDateTime.text.unsafeEncode(row.modifieddate, sb) } + implicit lazy val write: Write[PurchaseorderheaderRow] = new Write[PurchaseorderheaderRow]( + puts = List((PurchaseorderheaderId.put, Nullability.NoNulls), + (TypoShort.put, Nullability.NoNulls), + (TypoShort.put, Nullability.NoNulls), + (BusinessentityId.put, Nullability.NoNulls), + (BusinessentityId.put, Nullability.NoNulls), + (ShipmethodId.put, Nullability.NoNulls), + (TypoLocalDateTime.put, Nullability.NoNulls), + (TypoLocalDateTime.put, Nullability.Nullable), + (Meta.ScalaBigDecimalMeta.put, Nullability.NoNulls), + (Meta.ScalaBigDecimalMeta.put, Nullability.NoNulls), + (Meta.ScalaBigDecimalMeta.put, Nullability.NoNulls), + (TypoLocalDateTime.put, Nullability.NoNulls)), + toList = x => List(x.purchaseorderid, x.revisionnumber, x.status, x.employeeid, x.vendorid, x.shipmethodid, x.orderdate, x.shipdate, x.subtotal, x.taxamt, x.freight, x.modifieddate), + unsafeSet = (rs, i, a) => { + PurchaseorderheaderId.put.unsafeSetNonNullable(rs, i + 0, a.purchaseorderid) + TypoShort.put.unsafeSetNonNullable(rs, i + 1, a.revisionnumber) + TypoShort.put.unsafeSetNonNullable(rs, i + 2, a.status) + BusinessentityId.put.unsafeSetNonNullable(rs, i + 3, a.employeeid) + BusinessentityId.put.unsafeSetNonNullable(rs, i + 4, a.vendorid) + ShipmethodId.put.unsafeSetNonNullable(rs, i + 5, a.shipmethodid) + TypoLocalDateTime.put.unsafeSetNonNullable(rs, i + 6, a.orderdate) + TypoLocalDateTime.put.unsafeSetNullable(rs, i + 7, a.shipdate) + Meta.ScalaBigDecimalMeta.put.unsafeSetNonNullable(rs, i + 8, a.subtotal) + Meta.ScalaBigDecimalMeta.put.unsafeSetNonNullable(rs, i + 9, a.taxamt) + Meta.ScalaBigDecimalMeta.put.unsafeSetNonNullable(rs, i + 10, a.freight) + TypoLocalDateTime.put.unsafeSetNonNullable(rs, i + 11, a.modifieddate) + }, + unsafeUpdate = (ps, i, a) => { + PurchaseorderheaderId.put.unsafeUpdateNonNullable(ps, i + 0, a.purchaseorderid) + TypoShort.put.unsafeUpdateNonNullable(ps, i + 1, a.revisionnumber) + TypoShort.put.unsafeUpdateNonNullable(ps, i + 2, a.status) + BusinessentityId.put.unsafeUpdateNonNullable(ps, i + 3, a.employeeid) + BusinessentityId.put.unsafeUpdateNonNullable(ps, i + 4, a.vendorid) + ShipmethodId.put.unsafeUpdateNonNullable(ps, i + 5, a.shipmethodid) + TypoLocalDateTime.put.unsafeUpdateNonNullable(ps, i + 6, a.orderdate) + TypoLocalDateTime.put.unsafeUpdateNullable(ps, i + 7, a.shipdate) + Meta.ScalaBigDecimalMeta.put.unsafeUpdateNonNullable(ps, i + 8, a.subtotal) + Meta.ScalaBigDecimalMeta.put.unsafeUpdateNonNullable(ps, i + 9, a.taxamt) + Meta.ScalaBigDecimalMeta.put.unsafeUpdateNonNullable(ps, i + 10, a.freight) + TypoLocalDateTime.put.unsafeUpdateNonNullable(ps, i + 11, a.modifieddate) + } + ) } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/purchasing/shipmethod/ShipmethodRepo.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/purchasing/shipmethod/ShipmethodRepo.scala index 1e0e98212..6e7d70bd4 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/purchasing/shipmethod/ShipmethodRepo.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/purchasing/shipmethod/ShipmethodRepo.scala @@ -30,4 +30,7 @@ trait ShipmethodRepo { def update: UpdateBuilder[ShipmethodFields, ShipmethodRow] def update(row: ShipmethodRow): ConnectionIO[Boolean] def upsert(unsaved: ShipmethodRow): ConnectionIO[ShipmethodRow] + def upsertBatch(unsaved: List[ShipmethodRow]): Stream[ConnectionIO, ShipmethodRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Stream[ConnectionIO, ShipmethodRow], batchSize: Int = 10000): ConnectionIO[Int] } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/purchasing/shipmethod/ShipmethodRepoImpl.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/purchasing/shipmethod/ShipmethodRepoImpl.scala index 65c02c86c..b2291c283 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/purchasing/shipmethod/ShipmethodRepoImpl.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/purchasing/shipmethod/ShipmethodRepoImpl.scala @@ -11,6 +11,7 @@ import adventureworks.customtypes.Defaulted import adventureworks.customtypes.TypoLocalDateTime import adventureworks.customtypes.TypoUUID import adventureworks.public.Name +import cats.instances.list.catsStdInstancesForList import doobie.free.connection.ConnectionIO import doobie.postgres.syntax.FragmentOps import doobie.syntax.SqlInterpolator.SingleFragment.fromWrite @@ -18,6 +19,7 @@ import doobie.syntax.string.toSqlInterpolator import doobie.util.Write import doobie.util.fragment.Fragment import doobie.util.meta.Meta +import doobie.util.update.Update import fs2.Stream import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder @@ -140,4 +142,37 @@ class ShipmethodRepoImpl extends ShipmethodRepo { returning "shipmethodid", "name", "shipbase", "shiprate", "rowguid", "modifieddate"::text """.query(using ShipmethodRow.read).unique } + override def upsertBatch(unsaved: List[ShipmethodRow]): Stream[ConnectionIO, ShipmethodRow] = { + Update[ShipmethodRow]( + s"""insert into purchasing.shipmethod("shipmethodid", "name", "shipbase", "shiprate", "rowguid", "modifieddate") + values (?::int4,?::varchar,?::numeric,?::numeric,?::uuid,?::timestamp) + on conflict ("shipmethodid") + do update set + "name" = EXCLUDED."name", + "shipbase" = EXCLUDED."shipbase", + "shiprate" = EXCLUDED."shiprate", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + returning "shipmethodid", "name", "shipbase", "shiprate", "rowguid", "modifieddate"::text""" + )(using ShipmethodRow.write) + .updateManyWithGeneratedKeys[ShipmethodRow]("shipmethodid", "name", "shipbase", "shiprate", "rowguid", "modifieddate")(unsaved)(using catsStdInstancesForList, ShipmethodRow.read) + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, ShipmethodRow], batchSize: Int = 10000): ConnectionIO[Int] = { + for { + _ <- sql"create temporary table shipmethod_TEMP (like purchasing.shipmethod) on commit drop".update.run + _ <- new FragmentOps(sql"""copy shipmethod_TEMP("shipmethodid", "name", "shipbase", "shiprate", "rowguid", "modifieddate") from stdin""").copyIn(unsaved, batchSize)(using ShipmethodRow.text) + res <- sql"""insert into purchasing.shipmethod("shipmethodid", "name", "shipbase", "shiprate", "rowguid", "modifieddate") + select * from shipmethod_TEMP + on conflict ("shipmethodid") + do update set + "name" = EXCLUDED."name", + "shipbase" = EXCLUDED."shipbase", + "shiprate" = EXCLUDED."shiprate", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table shipmethod_TEMP;""".update.run + } yield res + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/purchasing/shipmethod/ShipmethodRepoMock.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/purchasing/shipmethod/ShipmethodRepoMock.scala index a9772608d..207da1e7e 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/purchasing/shipmethod/ShipmethodRepoMock.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/purchasing/shipmethod/ShipmethodRepoMock.scala @@ -105,4 +105,23 @@ class ShipmethodRepoMock(toRow: Function1[ShipmethodRowUnsaved, ShipmethodRow], unsaved } } + override def upsertBatch(unsaved: List[ShipmethodRow]): Stream[ConnectionIO, ShipmethodRow] = { + Stream.emits { + unsaved.map { row => + map += (row.shipmethodid -> row) + row + } + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, ShipmethodRow], batchSize: Int = 10000): ConnectionIO[Int] = { + unsaved.compile.toList.map { rows => + var num = 0 + rows.foreach { row => + map += (row.shipmethodid -> row) + num += 1 + } + num + } + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/purchasing/shipmethod/ShipmethodRow.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/purchasing/shipmethod/ShipmethodRow.scala index df146d7da..a2ce260cd 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/purchasing/shipmethod/ShipmethodRow.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/purchasing/shipmethod/ShipmethodRow.scala @@ -14,6 +14,7 @@ import adventureworks.public.Name import doobie.enumerated.Nullability import doobie.postgres.Text import doobie.util.Read +import doobie.util.Write import doobie.util.meta.Meta import io.circe.Decoder import io.circe.Encoder @@ -80,4 +81,29 @@ object ShipmethodRow { sb.append(Text.DELIMETER) TypoLocalDateTime.text.unsafeEncode(row.modifieddate, sb) } + implicit lazy val write: Write[ShipmethodRow] = new Write[ShipmethodRow]( + puts = List((ShipmethodId.put, Nullability.NoNulls), + (Name.put, Nullability.NoNulls), + (Meta.ScalaBigDecimalMeta.put, Nullability.NoNulls), + (Meta.ScalaBigDecimalMeta.put, Nullability.NoNulls), + (TypoUUID.put, Nullability.NoNulls), + (TypoLocalDateTime.put, Nullability.NoNulls)), + toList = x => List(x.shipmethodid, x.name, x.shipbase, x.shiprate, x.rowguid, x.modifieddate), + unsafeSet = (rs, i, a) => { + ShipmethodId.put.unsafeSetNonNullable(rs, i + 0, a.shipmethodid) + Name.put.unsafeSetNonNullable(rs, i + 1, a.name) + Meta.ScalaBigDecimalMeta.put.unsafeSetNonNullable(rs, i + 2, a.shipbase) + Meta.ScalaBigDecimalMeta.put.unsafeSetNonNullable(rs, i + 3, a.shiprate) + TypoUUID.put.unsafeSetNonNullable(rs, i + 4, a.rowguid) + TypoLocalDateTime.put.unsafeSetNonNullable(rs, i + 5, a.modifieddate) + }, + unsafeUpdate = (ps, i, a) => { + ShipmethodId.put.unsafeUpdateNonNullable(ps, i + 0, a.shipmethodid) + Name.put.unsafeUpdateNonNullable(ps, i + 1, a.name) + Meta.ScalaBigDecimalMeta.put.unsafeUpdateNonNullable(ps, i + 2, a.shipbase) + Meta.ScalaBigDecimalMeta.put.unsafeUpdateNonNullable(ps, i + 3, a.shiprate) + TypoUUID.put.unsafeUpdateNonNullable(ps, i + 4, a.rowguid) + TypoLocalDateTime.put.unsafeUpdateNonNullable(ps, i + 5, a.modifieddate) + } + ) } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/purchasing/vendor/VendorRepo.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/purchasing/vendor/VendorRepo.scala index 00bd84cf6..fa9060713 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/purchasing/vendor/VendorRepo.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/purchasing/vendor/VendorRepo.scala @@ -31,4 +31,7 @@ trait VendorRepo { def update: UpdateBuilder[VendorFields, VendorRow] def update(row: VendorRow): ConnectionIO[Boolean] def upsert(unsaved: VendorRow): ConnectionIO[VendorRow] + def upsertBatch(unsaved: List[VendorRow]): Stream[ConnectionIO, VendorRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Stream[ConnectionIO, VendorRow], batchSize: Int = 10000): ConnectionIO[Int] } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/purchasing/vendor/VendorRepoImpl.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/purchasing/vendor/VendorRepoImpl.scala index 2b2288dc6..546dd6368 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/purchasing/vendor/VendorRepoImpl.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/purchasing/vendor/VendorRepoImpl.scala @@ -14,6 +14,7 @@ import adventureworks.person.businessentity.BusinessentityId import adventureworks.public.AccountNumber import adventureworks.public.Flag import adventureworks.public.Name +import cats.instances.list.catsStdInstancesForList import doobie.free.connection.ConnectionIO import doobie.postgres.syntax.FragmentOps import doobie.syntax.SqlInterpolator.SingleFragment.fromWrite @@ -21,6 +22,7 @@ import doobie.syntax.string.toSqlInterpolator import doobie.util.Write import doobie.util.fragment.Fragment import doobie.util.meta.Meta +import doobie.util.update.Update import fs2.Stream import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder @@ -145,4 +147,41 @@ class VendorRepoImpl extends VendorRepo { returning "businessentityid", "accountnumber", "name", "creditrating", "preferredvendorstatus", "activeflag", "purchasingwebserviceurl", "modifieddate"::text """.query(using VendorRow.read).unique } + override def upsertBatch(unsaved: List[VendorRow]): Stream[ConnectionIO, VendorRow] = { + Update[VendorRow]( + s"""insert into purchasing.vendor("businessentityid", "accountnumber", "name", "creditrating", "preferredvendorstatus", "activeflag", "purchasingwebserviceurl", "modifieddate") + values (?::int4,?::varchar,?::varchar,?::int2,?::bool,?::bool,?,?::timestamp) + on conflict ("businessentityid") + do update set + "accountnumber" = EXCLUDED."accountnumber", + "name" = EXCLUDED."name", + "creditrating" = EXCLUDED."creditrating", + "preferredvendorstatus" = EXCLUDED."preferredvendorstatus", + "activeflag" = EXCLUDED."activeflag", + "purchasingwebserviceurl" = EXCLUDED."purchasingwebserviceurl", + "modifieddate" = EXCLUDED."modifieddate" + returning "businessentityid", "accountnumber", "name", "creditrating", "preferredvendorstatus", "activeflag", "purchasingwebserviceurl", "modifieddate"::text""" + )(using VendorRow.write) + .updateManyWithGeneratedKeys[VendorRow]("businessentityid", "accountnumber", "name", "creditrating", "preferredvendorstatus", "activeflag", "purchasingwebserviceurl", "modifieddate")(unsaved)(using catsStdInstancesForList, VendorRow.read) + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, VendorRow], batchSize: Int = 10000): ConnectionIO[Int] = { + for { + _ <- sql"create temporary table vendor_TEMP (like purchasing.vendor) on commit drop".update.run + _ <- new FragmentOps(sql"""copy vendor_TEMP("businessentityid", "accountnumber", "name", "creditrating", "preferredvendorstatus", "activeflag", "purchasingwebserviceurl", "modifieddate") from stdin""").copyIn(unsaved, batchSize)(using VendorRow.text) + res <- sql"""insert into purchasing.vendor("businessentityid", "accountnumber", "name", "creditrating", "preferredvendorstatus", "activeflag", "purchasingwebserviceurl", "modifieddate") + select * from vendor_TEMP + on conflict ("businessentityid") + do update set + "accountnumber" = EXCLUDED."accountnumber", + "name" = EXCLUDED."name", + "creditrating" = EXCLUDED."creditrating", + "preferredvendorstatus" = EXCLUDED."preferredvendorstatus", + "activeflag" = EXCLUDED."activeflag", + "purchasingwebserviceurl" = EXCLUDED."purchasingwebserviceurl", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table vendor_TEMP;""".update.run + } yield res + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/purchasing/vendor/VendorRepoMock.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/purchasing/vendor/VendorRepoMock.scala index 2f573a7a0..01a0a3fcc 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/purchasing/vendor/VendorRepoMock.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/purchasing/vendor/VendorRepoMock.scala @@ -106,4 +106,23 @@ class VendorRepoMock(toRow: Function1[VendorRowUnsaved, VendorRow], unsaved } } + override def upsertBatch(unsaved: List[VendorRow]): Stream[ConnectionIO, VendorRow] = { + Stream.emits { + unsaved.map { row => + map += (row.businessentityid -> row) + row + } + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, VendorRow], batchSize: Int = 10000): ConnectionIO[Int] = { + unsaved.compile.toList.map { rows => + var num = 0 + rows.foreach { row => + map += (row.businessentityid -> row) + num += 1 + } + num + } + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/purchasing/vendor/VendorRow.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/purchasing/vendor/VendorRow.scala index 5cb3c9732..bf93102c6 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/purchasing/vendor/VendorRow.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/purchasing/vendor/VendorRow.scala @@ -17,6 +17,7 @@ import adventureworks.public.Name import doobie.enumerated.Nullability import doobie.postgres.Text import doobie.util.Read +import doobie.util.Write import doobie.util.meta.Meta import io.circe.Decoder import io.circe.Encoder @@ -94,4 +95,35 @@ object VendorRow { sb.append(Text.DELIMETER) TypoLocalDateTime.text.unsafeEncode(row.modifieddate, sb) } + implicit lazy val write: Write[VendorRow] = new Write[VendorRow]( + puts = List((BusinessentityId.put, Nullability.NoNulls), + (AccountNumber.put, Nullability.NoNulls), + (Name.put, Nullability.NoNulls), + (TypoShort.put, Nullability.NoNulls), + (Flag.put, Nullability.NoNulls), + (Flag.put, Nullability.NoNulls), + (Meta.StringMeta.put, Nullability.Nullable), + (TypoLocalDateTime.put, Nullability.NoNulls)), + toList = x => List(x.businessentityid, x.accountnumber, x.name, x.creditrating, x.preferredvendorstatus, x.activeflag, x.purchasingwebserviceurl, x.modifieddate), + unsafeSet = (rs, i, a) => { + BusinessentityId.put.unsafeSetNonNullable(rs, i + 0, a.businessentityid) + AccountNumber.put.unsafeSetNonNullable(rs, i + 1, a.accountnumber) + Name.put.unsafeSetNonNullable(rs, i + 2, a.name) + TypoShort.put.unsafeSetNonNullable(rs, i + 3, a.creditrating) + Flag.put.unsafeSetNonNullable(rs, i + 4, a.preferredvendorstatus) + Flag.put.unsafeSetNonNullable(rs, i + 5, a.activeflag) + Meta.StringMeta.put.unsafeSetNullable(rs, i + 6, a.purchasingwebserviceurl) + TypoLocalDateTime.put.unsafeSetNonNullable(rs, i + 7, a.modifieddate) + }, + unsafeUpdate = (ps, i, a) => { + BusinessentityId.put.unsafeUpdateNonNullable(ps, i + 0, a.businessentityid) + AccountNumber.put.unsafeUpdateNonNullable(ps, i + 1, a.accountnumber) + Name.put.unsafeUpdateNonNullable(ps, i + 2, a.name) + TypoShort.put.unsafeUpdateNonNullable(ps, i + 3, a.creditrating) + Flag.put.unsafeUpdateNonNullable(ps, i + 4, a.preferredvendorstatus) + Flag.put.unsafeUpdateNonNullable(ps, i + 5, a.activeflag) + Meta.StringMeta.put.unsafeUpdateNullable(ps, i + 6, a.purchasingwebserviceurl) + TypoLocalDateTime.put.unsafeUpdateNonNullable(ps, i + 7, a.modifieddate) + } + ) } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/countryregioncurrency/CountryregioncurrencyRepo.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/countryregioncurrency/CountryregioncurrencyRepo.scala index 06424c622..9e970c96e 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/countryregioncurrency/CountryregioncurrencyRepo.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/countryregioncurrency/CountryregioncurrencyRepo.scala @@ -30,4 +30,7 @@ trait CountryregioncurrencyRepo { def update: UpdateBuilder[CountryregioncurrencyFields, CountryregioncurrencyRow] def update(row: CountryregioncurrencyRow): ConnectionIO[Boolean] def upsert(unsaved: CountryregioncurrencyRow): ConnectionIO[CountryregioncurrencyRow] + def upsertBatch(unsaved: List[CountryregioncurrencyRow]): Stream[ConnectionIO, CountryregioncurrencyRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Stream[ConnectionIO, CountryregioncurrencyRow], batchSize: Int = 10000): ConnectionIO[Int] } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/countryregioncurrency/CountryregioncurrencyRepoImpl.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/countryregioncurrency/CountryregioncurrencyRepoImpl.scala index 87e4e74af..7520aef88 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/countryregioncurrency/CountryregioncurrencyRepoImpl.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/countryregioncurrency/CountryregioncurrencyRepoImpl.scala @@ -11,12 +11,14 @@ import adventureworks.customtypes.Defaulted import adventureworks.customtypes.TypoLocalDateTime import adventureworks.person.countryregion.CountryregionId import adventureworks.sales.currency.CurrencyId +import cats.instances.list.catsStdInstancesForList import doobie.free.connection.ConnectionIO import doobie.postgres.syntax.FragmentOps import doobie.syntax.SqlInterpolator.SingleFragment.fromWrite import doobie.syntax.string.toSqlInterpolator import doobie.util.Write import doobie.util.fragment.Fragment +import doobie.util.update.Update import fs2.Stream import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder @@ -127,4 +129,29 @@ class CountryregioncurrencyRepoImpl extends CountryregioncurrencyRepo { returning "countryregioncode", "currencycode", "modifieddate"::text """.query(using CountryregioncurrencyRow.read).unique } + override def upsertBatch(unsaved: List[CountryregioncurrencyRow]): Stream[ConnectionIO, CountryregioncurrencyRow] = { + Update[CountryregioncurrencyRow]( + s"""insert into sales.countryregioncurrency("countryregioncode", "currencycode", "modifieddate") + values (?,?::bpchar,?::timestamp) + on conflict ("countryregioncode", "currencycode") + do update set + "modifieddate" = EXCLUDED."modifieddate" + returning "countryregioncode", "currencycode", "modifieddate"::text""" + )(using CountryregioncurrencyRow.write) + .updateManyWithGeneratedKeys[CountryregioncurrencyRow]("countryregioncode", "currencycode", "modifieddate")(unsaved)(using catsStdInstancesForList, CountryregioncurrencyRow.read) + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, CountryregioncurrencyRow], batchSize: Int = 10000): ConnectionIO[Int] = { + for { + _ <- sql"create temporary table countryregioncurrency_TEMP (like sales.countryregioncurrency) on commit drop".update.run + _ <- new FragmentOps(sql"""copy countryregioncurrency_TEMP("countryregioncode", "currencycode", "modifieddate") from stdin""").copyIn(unsaved, batchSize)(using CountryregioncurrencyRow.text) + res <- sql"""insert into sales.countryregioncurrency("countryregioncode", "currencycode", "modifieddate") + select * from countryregioncurrency_TEMP + on conflict ("countryregioncode", "currencycode") + do update set + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table countryregioncurrency_TEMP;""".update.run + } yield res + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/countryregioncurrency/CountryregioncurrencyRepoMock.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/countryregioncurrency/CountryregioncurrencyRepoMock.scala index 3076e5a2d..4782a3ced 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/countryregioncurrency/CountryregioncurrencyRepoMock.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/countryregioncurrency/CountryregioncurrencyRepoMock.scala @@ -105,4 +105,23 @@ class CountryregioncurrencyRepoMock(toRow: Function1[CountryregioncurrencyRowUns unsaved } } + override def upsertBatch(unsaved: List[CountryregioncurrencyRow]): Stream[ConnectionIO, CountryregioncurrencyRow] = { + Stream.emits { + unsaved.map { row => + map += (row.compositeId -> row) + row + } + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, CountryregioncurrencyRow], batchSize: Int = 10000): ConnectionIO[Int] = { + unsaved.compile.toList.map { rows => + var num = 0 + rows.foreach { row => + map += (row.compositeId -> row) + num += 1 + } + num + } + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/countryregioncurrency/CountryregioncurrencyRow.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/countryregioncurrency/CountryregioncurrencyRow.scala index dcd7215d4..665289f12 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/countryregioncurrency/CountryregioncurrencyRow.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/countryregioncurrency/CountryregioncurrencyRow.scala @@ -14,6 +14,7 @@ import adventureworks.sales.currency.CurrencyId import doobie.enumerated.Nullability import doobie.postgres.Text import doobie.util.Read +import doobie.util.Write import io.circe.Decoder import io.circe.Encoder import java.sql.ResultSet @@ -61,4 +62,20 @@ object CountryregioncurrencyRow { sb.append(Text.DELIMETER) TypoLocalDateTime.text.unsafeEncode(row.modifieddate, sb) } + implicit lazy val write: Write[CountryregioncurrencyRow] = new Write[CountryregioncurrencyRow]( + puts = List((CountryregionId.put, Nullability.NoNulls), + (CurrencyId.put, Nullability.NoNulls), + (TypoLocalDateTime.put, Nullability.NoNulls)), + toList = x => List(x.countryregioncode, x.currencycode, x.modifieddate), + unsafeSet = (rs, i, a) => { + CountryregionId.put.unsafeSetNonNullable(rs, i + 0, a.countryregioncode) + CurrencyId.put.unsafeSetNonNullable(rs, i + 1, a.currencycode) + TypoLocalDateTime.put.unsafeSetNonNullable(rs, i + 2, a.modifieddate) + }, + unsafeUpdate = (ps, i, a) => { + CountryregionId.put.unsafeUpdateNonNullable(ps, i + 0, a.countryregioncode) + CurrencyId.put.unsafeUpdateNonNullable(ps, i + 1, a.currencycode) + TypoLocalDateTime.put.unsafeUpdateNonNullable(ps, i + 2, a.modifieddate) + } + ) } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/creditcard/CreditcardRepo.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/creditcard/CreditcardRepo.scala index a9f293d7a..0fb195f7e 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/creditcard/CreditcardRepo.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/creditcard/CreditcardRepo.scala @@ -32,4 +32,7 @@ trait CreditcardRepo { def update: UpdateBuilder[CreditcardFields, CreditcardRow] def update(row: CreditcardRow): ConnectionIO[Boolean] def upsert(unsaved: CreditcardRow): ConnectionIO[CreditcardRow] + def upsertBatch(unsaved: List[CreditcardRow]): Stream[ConnectionIO, CreditcardRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Stream[ConnectionIO, CreditcardRow], batchSize: Int = 10000): ConnectionIO[Int] } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/creditcard/CreditcardRepoImpl.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/creditcard/CreditcardRepoImpl.scala index 1dc0c0d38..ce86cfb94 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/creditcard/CreditcardRepoImpl.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/creditcard/CreditcardRepoImpl.scala @@ -11,6 +11,7 @@ import adventureworks.customtypes.Defaulted import adventureworks.customtypes.TypoLocalDateTime import adventureworks.customtypes.TypoShort import adventureworks.userdefined.CustomCreditcardId +import cats.instances.list.catsStdInstancesForList import doobie.free.connection.ConnectionIO import doobie.postgres.syntax.FragmentOps import doobie.syntax.SqlInterpolator.SingleFragment.fromWrite @@ -19,6 +20,7 @@ import doobie.util.Put import doobie.util.Write import doobie.util.fragment.Fragment import doobie.util.meta.Meta +import doobie.util.update.Update import fs2.Stream import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder @@ -132,4 +134,37 @@ class CreditcardRepoImpl extends CreditcardRepo { returning "creditcardid", "cardtype", "cardnumber", "expmonth", "expyear", "modifieddate"::text """.query(using CreditcardRow.read).unique } + override def upsertBatch(unsaved: List[CreditcardRow]): Stream[ConnectionIO, CreditcardRow] = { + Update[CreditcardRow]( + s"""insert into sales.creditcard("creditcardid", "cardtype", "cardnumber", "expmonth", "expyear", "modifieddate") + values (?::int4,?,?,?::int2,?::int2,?::timestamp) + on conflict ("creditcardid") + do update set + "cardtype" = EXCLUDED."cardtype", + "cardnumber" = EXCLUDED."cardnumber", + "expmonth" = EXCLUDED."expmonth", + "expyear" = EXCLUDED."expyear", + "modifieddate" = EXCLUDED."modifieddate" + returning "creditcardid", "cardtype", "cardnumber", "expmonth", "expyear", "modifieddate"::text""" + )(using CreditcardRow.write) + .updateManyWithGeneratedKeys[CreditcardRow]("creditcardid", "cardtype", "cardnumber", "expmonth", "expyear", "modifieddate")(unsaved)(using catsStdInstancesForList, CreditcardRow.read) + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, CreditcardRow], batchSize: Int = 10000): ConnectionIO[Int] = { + for { + _ <- sql"create temporary table creditcard_TEMP (like sales.creditcard) on commit drop".update.run + _ <- new FragmentOps(sql"""copy creditcard_TEMP("creditcardid", "cardtype", "cardnumber", "expmonth", "expyear", "modifieddate") from stdin""").copyIn(unsaved, batchSize)(using CreditcardRow.text) + res <- sql"""insert into sales.creditcard("creditcardid", "cardtype", "cardnumber", "expmonth", "expyear", "modifieddate") + select * from creditcard_TEMP + on conflict ("creditcardid") + do update set + "cardtype" = EXCLUDED."cardtype", + "cardnumber" = EXCLUDED."cardnumber", + "expmonth" = EXCLUDED."expmonth", + "expyear" = EXCLUDED."expyear", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table creditcard_TEMP;""".update.run + } yield res + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/creditcard/CreditcardRepoMock.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/creditcard/CreditcardRepoMock.scala index 64fab3b9a..57b19c0ac 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/creditcard/CreditcardRepoMock.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/creditcard/CreditcardRepoMock.scala @@ -107,4 +107,23 @@ class CreditcardRepoMock(toRow: Function1[CreditcardRowUnsaved, CreditcardRow], unsaved } } + override def upsertBatch(unsaved: List[CreditcardRow]): Stream[ConnectionIO, CreditcardRow] = { + Stream.emits { + unsaved.map { row => + map += (row.creditcardid -> row) + row + } + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, CreditcardRow], batchSize: Int = 10000): ConnectionIO[Int] = { + unsaved.compile.toList.map { rows => + var num = 0 + rows.foreach { row => + map += (row.creditcardid -> row) + num += 1 + } + num + } + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/creditcard/CreditcardRow.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/creditcard/CreditcardRow.scala index 60edaa940..fdd5c5943 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/creditcard/CreditcardRow.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/creditcard/CreditcardRow.scala @@ -14,6 +14,7 @@ import adventureworks.userdefined.CustomCreditcardId import doobie.enumerated.Nullability import doobie.postgres.Text import doobie.util.Read +import doobie.util.Write import doobie.util.meta.Meta import io.circe.Decoder import io.circe.Encoder @@ -76,4 +77,29 @@ object CreditcardRow { sb.append(Text.DELIMETER) TypoLocalDateTime.text.unsafeEncode(row.modifieddate, sb) } + implicit lazy val write: Write[CreditcardRow] = new Write[CreditcardRow]( + puts = List((/* user-picked */ CustomCreditcardId.put, Nullability.NoNulls), + (Meta.StringMeta.put, Nullability.NoNulls), + (Meta.StringMeta.put, Nullability.NoNulls), + (TypoShort.put, Nullability.NoNulls), + (TypoShort.put, Nullability.NoNulls), + (TypoLocalDateTime.put, Nullability.NoNulls)), + toList = x => List(x.creditcardid, x.cardtype, x.cardnumber, x.expmonth, x.expyear, x.modifieddate), + unsafeSet = (rs, i, a) => { + /* user-picked */ CustomCreditcardId.put.unsafeSetNonNullable(rs, i + 0, a.creditcardid) + Meta.StringMeta.put.unsafeSetNonNullable(rs, i + 1, a.cardtype) + Meta.StringMeta.put.unsafeSetNonNullable(rs, i + 2, a.cardnumber) + TypoShort.put.unsafeSetNonNullable(rs, i + 3, a.expmonth) + TypoShort.put.unsafeSetNonNullable(rs, i + 4, a.expyear) + TypoLocalDateTime.put.unsafeSetNonNullable(rs, i + 5, a.modifieddate) + }, + unsafeUpdate = (ps, i, a) => { + /* user-picked */ CustomCreditcardId.put.unsafeUpdateNonNullable(ps, i + 0, a.creditcardid) + Meta.StringMeta.put.unsafeUpdateNonNullable(ps, i + 1, a.cardtype) + Meta.StringMeta.put.unsafeUpdateNonNullable(ps, i + 2, a.cardnumber) + TypoShort.put.unsafeUpdateNonNullable(ps, i + 3, a.expmonth) + TypoShort.put.unsafeUpdateNonNullable(ps, i + 4, a.expyear) + TypoLocalDateTime.put.unsafeUpdateNonNullable(ps, i + 5, a.modifieddate) + } + ) } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/currency/CurrencyRepo.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/currency/CurrencyRepo.scala index f9c279e71..c62d5c054 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/currency/CurrencyRepo.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/currency/CurrencyRepo.scala @@ -30,4 +30,7 @@ trait CurrencyRepo { def update: UpdateBuilder[CurrencyFields, CurrencyRow] def update(row: CurrencyRow): ConnectionIO[Boolean] def upsert(unsaved: CurrencyRow): ConnectionIO[CurrencyRow] + def upsertBatch(unsaved: List[CurrencyRow]): Stream[ConnectionIO, CurrencyRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Stream[ConnectionIO, CurrencyRow], batchSize: Int = 10000): ConnectionIO[Int] } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/currency/CurrencyRepoImpl.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/currency/CurrencyRepoImpl.scala index 54c8e8d0c..9f4ba2c3a 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/currency/CurrencyRepoImpl.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/currency/CurrencyRepoImpl.scala @@ -10,12 +10,14 @@ package currency import adventureworks.customtypes.Defaulted import adventureworks.customtypes.TypoLocalDateTime import adventureworks.public.Name +import cats.instances.list.catsStdInstancesForList import doobie.free.connection.ConnectionIO import doobie.postgres.syntax.FragmentOps import doobie.syntax.SqlInterpolator.SingleFragment.fromWrite import doobie.syntax.string.toSqlInterpolator import doobie.util.Write import doobie.util.fragment.Fragment +import doobie.util.update.Update import fs2.Stream import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder @@ -114,4 +116,31 @@ class CurrencyRepoImpl extends CurrencyRepo { returning "currencycode", "name", "modifieddate"::text """.query(using CurrencyRow.read).unique } + override def upsertBatch(unsaved: List[CurrencyRow]): Stream[ConnectionIO, CurrencyRow] = { + Update[CurrencyRow]( + s"""insert into sales.currency("currencycode", "name", "modifieddate") + values (?::bpchar,?::varchar,?::timestamp) + on conflict ("currencycode") + do update set + "name" = EXCLUDED."name", + "modifieddate" = EXCLUDED."modifieddate" + returning "currencycode", "name", "modifieddate"::text""" + )(using CurrencyRow.write) + .updateManyWithGeneratedKeys[CurrencyRow]("currencycode", "name", "modifieddate")(unsaved)(using catsStdInstancesForList, CurrencyRow.read) + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, CurrencyRow], batchSize: Int = 10000): ConnectionIO[Int] = { + for { + _ <- sql"create temporary table currency_TEMP (like sales.currency) on commit drop".update.run + _ <- new FragmentOps(sql"""copy currency_TEMP("currencycode", "name", "modifieddate") from stdin""").copyIn(unsaved, batchSize)(using CurrencyRow.text) + res <- sql"""insert into sales.currency("currencycode", "name", "modifieddate") + select * from currency_TEMP + on conflict ("currencycode") + do update set + "name" = EXCLUDED."name", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table currency_TEMP;""".update.run + } yield res + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/currency/CurrencyRepoMock.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/currency/CurrencyRepoMock.scala index eb3d43fa1..c125ff797 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/currency/CurrencyRepoMock.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/currency/CurrencyRepoMock.scala @@ -105,4 +105,23 @@ class CurrencyRepoMock(toRow: Function1[CurrencyRowUnsaved, CurrencyRow], unsaved } } + override def upsertBatch(unsaved: List[CurrencyRow]): Stream[ConnectionIO, CurrencyRow] = { + Stream.emits { + unsaved.map { row => + map += (row.currencycode -> row) + row + } + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, CurrencyRow], batchSize: Int = 10000): ConnectionIO[Int] = { + unsaved.compile.toList.map { rows => + var num = 0 + rows.foreach { row => + map += (row.currencycode -> row) + num += 1 + } + num + } + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/currency/CurrencyRow.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/currency/CurrencyRow.scala index 286b6a769..7a90d8eae 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/currency/CurrencyRow.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/currency/CurrencyRow.scala @@ -13,6 +13,7 @@ import adventureworks.public.Name import doobie.enumerated.Nullability import doobie.postgres.Text import doobie.util.Read +import doobie.util.Write import io.circe.Decoder import io.circe.Encoder import java.sql.ResultSet @@ -55,4 +56,20 @@ object CurrencyRow { sb.append(Text.DELIMETER) TypoLocalDateTime.text.unsafeEncode(row.modifieddate, sb) } + implicit lazy val write: Write[CurrencyRow] = new Write[CurrencyRow]( + puts = List((CurrencyId.put, Nullability.NoNulls), + (Name.put, Nullability.NoNulls), + (TypoLocalDateTime.put, Nullability.NoNulls)), + toList = x => List(x.currencycode, x.name, x.modifieddate), + unsafeSet = (rs, i, a) => { + CurrencyId.put.unsafeSetNonNullable(rs, i + 0, a.currencycode) + Name.put.unsafeSetNonNullable(rs, i + 1, a.name) + TypoLocalDateTime.put.unsafeSetNonNullable(rs, i + 2, a.modifieddate) + }, + unsafeUpdate = (ps, i, a) => { + CurrencyId.put.unsafeUpdateNonNullable(ps, i + 0, a.currencycode) + Name.put.unsafeUpdateNonNullable(ps, i + 1, a.name) + TypoLocalDateTime.put.unsafeUpdateNonNullable(ps, i + 2, a.modifieddate) + } + ) } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/currencyrate/CurrencyrateRepo.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/currencyrate/CurrencyrateRepo.scala index 22153e1ec..3105dbd32 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/currencyrate/CurrencyrateRepo.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/currencyrate/CurrencyrateRepo.scala @@ -30,4 +30,7 @@ trait CurrencyrateRepo { def update: UpdateBuilder[CurrencyrateFields, CurrencyrateRow] def update(row: CurrencyrateRow): ConnectionIO[Boolean] def upsert(unsaved: CurrencyrateRow): ConnectionIO[CurrencyrateRow] + def upsertBatch(unsaved: List[CurrencyrateRow]): Stream[ConnectionIO, CurrencyrateRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Stream[ConnectionIO, CurrencyrateRow], batchSize: Int = 10000): ConnectionIO[Int] } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/currencyrate/CurrencyrateRepoImpl.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/currencyrate/CurrencyrateRepoImpl.scala index 1e30d6065..013d44dba 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/currencyrate/CurrencyrateRepoImpl.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/currencyrate/CurrencyrateRepoImpl.scala @@ -10,6 +10,7 @@ package currencyrate import adventureworks.customtypes.Defaulted import adventureworks.customtypes.TypoLocalDateTime import adventureworks.sales.currency.CurrencyId +import cats.instances.list.catsStdInstancesForList import doobie.free.connection.ConnectionIO import doobie.postgres.syntax.FragmentOps import doobie.syntax.SqlInterpolator.SingleFragment.fromWrite @@ -17,6 +18,7 @@ import doobie.syntax.string.toSqlInterpolator import doobie.util.Write import doobie.util.fragment.Fragment import doobie.util.meta.Meta +import doobie.util.update.Update import fs2.Stream import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder @@ -134,4 +136,39 @@ class CurrencyrateRepoImpl extends CurrencyrateRepo { returning "currencyrateid", "currencyratedate"::text, "fromcurrencycode", "tocurrencycode", "averagerate", "endofdayrate", "modifieddate"::text """.query(using CurrencyrateRow.read).unique } + override def upsertBatch(unsaved: List[CurrencyrateRow]): Stream[ConnectionIO, CurrencyrateRow] = { + Update[CurrencyrateRow]( + s"""insert into sales.currencyrate("currencyrateid", "currencyratedate", "fromcurrencycode", "tocurrencycode", "averagerate", "endofdayrate", "modifieddate") + values (?::int4,?::timestamp,?::bpchar,?::bpchar,?::numeric,?::numeric,?::timestamp) + on conflict ("currencyrateid") + do update set + "currencyratedate" = EXCLUDED."currencyratedate", + "fromcurrencycode" = EXCLUDED."fromcurrencycode", + "tocurrencycode" = EXCLUDED."tocurrencycode", + "averagerate" = EXCLUDED."averagerate", + "endofdayrate" = EXCLUDED."endofdayrate", + "modifieddate" = EXCLUDED."modifieddate" + returning "currencyrateid", "currencyratedate"::text, "fromcurrencycode", "tocurrencycode", "averagerate", "endofdayrate", "modifieddate"::text""" + )(using CurrencyrateRow.write) + .updateManyWithGeneratedKeys[CurrencyrateRow]("currencyrateid", "currencyratedate", "fromcurrencycode", "tocurrencycode", "averagerate", "endofdayrate", "modifieddate")(unsaved)(using catsStdInstancesForList, CurrencyrateRow.read) + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, CurrencyrateRow], batchSize: Int = 10000): ConnectionIO[Int] = { + for { + _ <- sql"create temporary table currencyrate_TEMP (like sales.currencyrate) on commit drop".update.run + _ <- new FragmentOps(sql"""copy currencyrate_TEMP("currencyrateid", "currencyratedate", "fromcurrencycode", "tocurrencycode", "averagerate", "endofdayrate", "modifieddate") from stdin""").copyIn(unsaved, batchSize)(using CurrencyrateRow.text) + res <- sql"""insert into sales.currencyrate("currencyrateid", "currencyratedate", "fromcurrencycode", "tocurrencycode", "averagerate", "endofdayrate", "modifieddate") + select * from currencyrate_TEMP + on conflict ("currencyrateid") + do update set + "currencyratedate" = EXCLUDED."currencyratedate", + "fromcurrencycode" = EXCLUDED."fromcurrencycode", + "tocurrencycode" = EXCLUDED."tocurrencycode", + "averagerate" = EXCLUDED."averagerate", + "endofdayrate" = EXCLUDED."endofdayrate", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table currencyrate_TEMP;""".update.run + } yield res + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/currencyrate/CurrencyrateRepoMock.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/currencyrate/CurrencyrateRepoMock.scala index b40d4aede..628090109 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/currencyrate/CurrencyrateRepoMock.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/currencyrate/CurrencyrateRepoMock.scala @@ -105,4 +105,23 @@ class CurrencyrateRepoMock(toRow: Function1[CurrencyrateRowUnsaved, Currencyrate unsaved } } + override def upsertBatch(unsaved: List[CurrencyrateRow]): Stream[ConnectionIO, CurrencyrateRow] = { + Stream.emits { + unsaved.map { row => + map += (row.currencyrateid -> row) + row + } + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, CurrencyrateRow], batchSize: Int = 10000): ConnectionIO[Int] = { + unsaved.compile.toList.map { rows => + var num = 0 + rows.foreach { row => + map += (row.currencyrateid -> row) + num += 1 + } + num + } + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/currencyrate/CurrencyrateRow.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/currencyrate/CurrencyrateRow.scala index af99a6630..d3534f002 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/currencyrate/CurrencyrateRow.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/currencyrate/CurrencyrateRow.scala @@ -13,6 +13,7 @@ import adventureworks.sales.currency.CurrencyId import doobie.enumerated.Nullability import doobie.postgres.Text import doobie.util.Read +import doobie.util.Write import doobie.util.meta.Meta import io.circe.Decoder import io.circe.Encoder @@ -83,4 +84,32 @@ object CurrencyrateRow { sb.append(Text.DELIMETER) TypoLocalDateTime.text.unsafeEncode(row.modifieddate, sb) } + implicit lazy val write: Write[CurrencyrateRow] = new Write[CurrencyrateRow]( + puts = List((CurrencyrateId.put, Nullability.NoNulls), + (TypoLocalDateTime.put, Nullability.NoNulls), + (CurrencyId.put, Nullability.NoNulls), + (CurrencyId.put, Nullability.NoNulls), + (Meta.ScalaBigDecimalMeta.put, Nullability.NoNulls), + (Meta.ScalaBigDecimalMeta.put, Nullability.NoNulls), + (TypoLocalDateTime.put, Nullability.NoNulls)), + toList = x => List(x.currencyrateid, x.currencyratedate, x.fromcurrencycode, x.tocurrencycode, x.averagerate, x.endofdayrate, x.modifieddate), + unsafeSet = (rs, i, a) => { + CurrencyrateId.put.unsafeSetNonNullable(rs, i + 0, a.currencyrateid) + TypoLocalDateTime.put.unsafeSetNonNullable(rs, i + 1, a.currencyratedate) + CurrencyId.put.unsafeSetNonNullable(rs, i + 2, a.fromcurrencycode) + CurrencyId.put.unsafeSetNonNullable(rs, i + 3, a.tocurrencycode) + Meta.ScalaBigDecimalMeta.put.unsafeSetNonNullable(rs, i + 4, a.averagerate) + Meta.ScalaBigDecimalMeta.put.unsafeSetNonNullable(rs, i + 5, a.endofdayrate) + TypoLocalDateTime.put.unsafeSetNonNullable(rs, i + 6, a.modifieddate) + }, + unsafeUpdate = (ps, i, a) => { + CurrencyrateId.put.unsafeUpdateNonNullable(ps, i + 0, a.currencyrateid) + TypoLocalDateTime.put.unsafeUpdateNonNullable(ps, i + 1, a.currencyratedate) + CurrencyId.put.unsafeUpdateNonNullable(ps, i + 2, a.fromcurrencycode) + CurrencyId.put.unsafeUpdateNonNullable(ps, i + 3, a.tocurrencycode) + Meta.ScalaBigDecimalMeta.put.unsafeUpdateNonNullable(ps, i + 4, a.averagerate) + Meta.ScalaBigDecimalMeta.put.unsafeUpdateNonNullable(ps, i + 5, a.endofdayrate) + TypoLocalDateTime.put.unsafeUpdateNonNullable(ps, i + 6, a.modifieddate) + } + ) } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/customer/CustomerRepo.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/customer/CustomerRepo.scala index dac21df46..77c17afec 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/customer/CustomerRepo.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/customer/CustomerRepo.scala @@ -30,4 +30,7 @@ trait CustomerRepo { def update: UpdateBuilder[CustomerFields, CustomerRow] def update(row: CustomerRow): ConnectionIO[Boolean] def upsert(unsaved: CustomerRow): ConnectionIO[CustomerRow] + def upsertBatch(unsaved: List[CustomerRow]): Stream[ConnectionIO, CustomerRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Stream[ConnectionIO, CustomerRow], batchSize: Int = 10000): ConnectionIO[Int] } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/customer/CustomerRepoImpl.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/customer/CustomerRepoImpl.scala index 5c18b33cc..4d87d1834 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/customer/CustomerRepoImpl.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/customer/CustomerRepoImpl.scala @@ -12,12 +12,14 @@ import adventureworks.customtypes.TypoLocalDateTime import adventureworks.customtypes.TypoUUID import adventureworks.person.businessentity.BusinessentityId import adventureworks.sales.salesterritory.SalesterritoryId +import cats.instances.list.catsStdInstancesForList import doobie.free.connection.ConnectionIO import doobie.postgres.syntax.FragmentOps import doobie.syntax.SqlInterpolator.SingleFragment.fromWrite import doobie.syntax.string.toSqlInterpolator import doobie.util.Write import doobie.util.fragment.Fragment +import doobie.util.update.Update import fs2.Stream import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder @@ -134,4 +136,37 @@ class CustomerRepoImpl extends CustomerRepo { returning "customerid", "personid", "storeid", "territoryid", "rowguid", "modifieddate"::text """.query(using CustomerRow.read).unique } + override def upsertBatch(unsaved: List[CustomerRow]): Stream[ConnectionIO, CustomerRow] = { + Update[CustomerRow]( + s"""insert into sales.customer("customerid", "personid", "storeid", "territoryid", "rowguid", "modifieddate") + values (?::int4,?::int4,?::int4,?::int4,?::uuid,?::timestamp) + on conflict ("customerid") + do update set + "personid" = EXCLUDED."personid", + "storeid" = EXCLUDED."storeid", + "territoryid" = EXCLUDED."territoryid", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + returning "customerid", "personid", "storeid", "territoryid", "rowguid", "modifieddate"::text""" + )(using CustomerRow.write) + .updateManyWithGeneratedKeys[CustomerRow]("customerid", "personid", "storeid", "territoryid", "rowguid", "modifieddate")(unsaved)(using catsStdInstancesForList, CustomerRow.read) + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, CustomerRow], batchSize: Int = 10000): ConnectionIO[Int] = { + for { + _ <- sql"create temporary table customer_TEMP (like sales.customer) on commit drop".update.run + _ <- new FragmentOps(sql"""copy customer_TEMP("customerid", "personid", "storeid", "territoryid", "rowguid", "modifieddate") from stdin""").copyIn(unsaved, batchSize)(using CustomerRow.text) + res <- sql"""insert into sales.customer("customerid", "personid", "storeid", "territoryid", "rowguid", "modifieddate") + select * from customer_TEMP + on conflict ("customerid") + do update set + "personid" = EXCLUDED."personid", + "storeid" = EXCLUDED."storeid", + "territoryid" = EXCLUDED."territoryid", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table customer_TEMP;""".update.run + } yield res + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/customer/CustomerRepoMock.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/customer/CustomerRepoMock.scala index bd608e128..663c846b1 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/customer/CustomerRepoMock.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/customer/CustomerRepoMock.scala @@ -105,4 +105,23 @@ class CustomerRepoMock(toRow: Function1[CustomerRowUnsaved, CustomerRow], unsaved } } + override def upsertBatch(unsaved: List[CustomerRow]): Stream[ConnectionIO, CustomerRow] = { + Stream.emits { + unsaved.map { row => + map += (row.customerid -> row) + row + } + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, CustomerRow], batchSize: Int = 10000): ConnectionIO[Int] = { + unsaved.compile.toList.map { rows => + var num = 0 + rows.foreach { row => + map += (row.customerid -> row) + num += 1 + } + num + } + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/customer/CustomerRow.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/customer/CustomerRow.scala index fa1714e21..4016a757d 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/customer/CustomerRow.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/customer/CustomerRow.scala @@ -15,6 +15,7 @@ import adventureworks.sales.salesterritory.SalesterritoryId import doobie.enumerated.Nullability import doobie.postgres.Text import doobie.util.Read +import doobie.util.Write import io.circe.Decoder import io.circe.Encoder import java.sql.ResultSet @@ -79,4 +80,29 @@ object CustomerRow { sb.append(Text.DELIMETER) TypoLocalDateTime.text.unsafeEncode(row.modifieddate, sb) } + implicit lazy val write: Write[CustomerRow] = new Write[CustomerRow]( + puts = List((CustomerId.put, Nullability.NoNulls), + (BusinessentityId.put, Nullability.Nullable), + (BusinessentityId.put, Nullability.Nullable), + (SalesterritoryId.put, Nullability.Nullable), + (TypoUUID.put, Nullability.NoNulls), + (TypoLocalDateTime.put, Nullability.NoNulls)), + toList = x => List(x.customerid, x.personid, x.storeid, x.territoryid, x.rowguid, x.modifieddate), + unsafeSet = (rs, i, a) => { + CustomerId.put.unsafeSetNonNullable(rs, i + 0, a.customerid) + BusinessentityId.put.unsafeSetNullable(rs, i + 1, a.personid) + BusinessentityId.put.unsafeSetNullable(rs, i + 2, a.storeid) + SalesterritoryId.put.unsafeSetNullable(rs, i + 3, a.territoryid) + TypoUUID.put.unsafeSetNonNullable(rs, i + 4, a.rowguid) + TypoLocalDateTime.put.unsafeSetNonNullable(rs, i + 5, a.modifieddate) + }, + unsafeUpdate = (ps, i, a) => { + CustomerId.put.unsafeUpdateNonNullable(ps, i + 0, a.customerid) + BusinessentityId.put.unsafeUpdateNullable(ps, i + 1, a.personid) + BusinessentityId.put.unsafeUpdateNullable(ps, i + 2, a.storeid) + SalesterritoryId.put.unsafeUpdateNullable(ps, i + 3, a.territoryid) + TypoUUID.put.unsafeUpdateNonNullable(ps, i + 4, a.rowguid) + TypoLocalDateTime.put.unsafeUpdateNonNullable(ps, i + 5, a.modifieddate) + } + ) } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/personcreditcard/PersoncreditcardRepo.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/personcreditcard/PersoncreditcardRepo.scala index de08af799..839ecd4cb 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/personcreditcard/PersoncreditcardRepo.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/personcreditcard/PersoncreditcardRepo.scala @@ -32,4 +32,7 @@ trait PersoncreditcardRepo { def update: UpdateBuilder[PersoncreditcardFields, PersoncreditcardRow] def update(row: PersoncreditcardRow): ConnectionIO[Boolean] def upsert(unsaved: PersoncreditcardRow): ConnectionIO[PersoncreditcardRow] + def upsertBatch(unsaved: List[PersoncreditcardRow]): Stream[ConnectionIO, PersoncreditcardRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Stream[ConnectionIO, PersoncreditcardRow], batchSize: Int = 10000): ConnectionIO[Int] } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/personcreditcard/PersoncreditcardRepoImpl.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/personcreditcard/PersoncreditcardRepoImpl.scala index 4fe9f85a0..fb9dcd839 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/personcreditcard/PersoncreditcardRepoImpl.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/personcreditcard/PersoncreditcardRepoImpl.scala @@ -11,6 +11,7 @@ import adventureworks.customtypes.Defaulted import adventureworks.customtypes.TypoLocalDateTime import adventureworks.person.businessentity.BusinessentityId import adventureworks.userdefined.CustomCreditcardId +import cats.instances.list.catsStdInstancesForList import doobie.free.connection.ConnectionIO import doobie.postgres.syntax.FragmentOps import doobie.syntax.SqlInterpolator.SingleFragment.fromWrite @@ -18,6 +19,7 @@ import doobie.syntax.string.toSqlInterpolator import doobie.util.Put import doobie.util.Write import doobie.util.fragment.Fragment +import doobie.util.update.Update import fs2.Stream import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder @@ -128,4 +130,29 @@ class PersoncreditcardRepoImpl extends PersoncreditcardRepo { returning "businessentityid", "creditcardid", "modifieddate"::text """.query(using PersoncreditcardRow.read).unique } + override def upsertBatch(unsaved: List[PersoncreditcardRow]): Stream[ConnectionIO, PersoncreditcardRow] = { + Update[PersoncreditcardRow]( + s"""insert into sales.personcreditcard("businessentityid", "creditcardid", "modifieddate") + values (?::int4,?::int4,?::timestamp) + on conflict ("businessentityid", "creditcardid") + do update set + "modifieddate" = EXCLUDED."modifieddate" + returning "businessentityid", "creditcardid", "modifieddate"::text""" + )(using PersoncreditcardRow.write) + .updateManyWithGeneratedKeys[PersoncreditcardRow]("businessentityid", "creditcardid", "modifieddate")(unsaved)(using catsStdInstancesForList, PersoncreditcardRow.read) + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, PersoncreditcardRow], batchSize: Int = 10000): ConnectionIO[Int] = { + for { + _ <- sql"create temporary table personcreditcard_TEMP (like sales.personcreditcard) on commit drop".update.run + _ <- new FragmentOps(sql"""copy personcreditcard_TEMP("businessentityid", "creditcardid", "modifieddate") from stdin""").copyIn(unsaved, batchSize)(using PersoncreditcardRow.text) + res <- sql"""insert into sales.personcreditcard("businessentityid", "creditcardid", "modifieddate") + select * from personcreditcard_TEMP + on conflict ("businessentityid", "creditcardid") + do update set + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table personcreditcard_TEMP;""".update.run + } yield res + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/personcreditcard/PersoncreditcardRepoMock.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/personcreditcard/PersoncreditcardRepoMock.scala index 6e43d0bcc..0a30ccdea 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/personcreditcard/PersoncreditcardRepoMock.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/personcreditcard/PersoncreditcardRepoMock.scala @@ -107,4 +107,23 @@ class PersoncreditcardRepoMock(toRow: Function1[PersoncreditcardRowUnsaved, Pers unsaved } } + override def upsertBatch(unsaved: List[PersoncreditcardRow]): Stream[ConnectionIO, PersoncreditcardRow] = { + Stream.emits { + unsaved.map { row => + map += (row.compositeId -> row) + row + } + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, PersoncreditcardRow], batchSize: Int = 10000): ConnectionIO[Int] = { + unsaved.compile.toList.map { rows => + var num = 0 + rows.foreach { row => + map += (row.compositeId -> row) + num += 1 + } + num + } + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/personcreditcard/PersoncreditcardRow.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/personcreditcard/PersoncreditcardRow.scala index 21c36be43..f8cda5a84 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/personcreditcard/PersoncreditcardRow.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/personcreditcard/PersoncreditcardRow.scala @@ -14,6 +14,7 @@ import adventureworks.userdefined.CustomCreditcardId import doobie.enumerated.Nullability import doobie.postgres.Text import doobie.util.Read +import doobie.util.Write import io.circe.Decoder import io.circe.Encoder import java.sql.ResultSet @@ -61,4 +62,20 @@ object PersoncreditcardRow { sb.append(Text.DELIMETER) TypoLocalDateTime.text.unsafeEncode(row.modifieddate, sb) } + implicit lazy val write: Write[PersoncreditcardRow] = new Write[PersoncreditcardRow]( + puts = List((BusinessentityId.put, Nullability.NoNulls), + (/* user-picked */ CustomCreditcardId.put, Nullability.NoNulls), + (TypoLocalDateTime.put, Nullability.NoNulls)), + toList = x => List(x.businessentityid, x.creditcardid, x.modifieddate), + unsafeSet = (rs, i, a) => { + BusinessentityId.put.unsafeSetNonNullable(rs, i + 0, a.businessentityid) + /* user-picked */ CustomCreditcardId.put.unsafeSetNonNullable(rs, i + 1, a.creditcardid) + TypoLocalDateTime.put.unsafeSetNonNullable(rs, i + 2, a.modifieddate) + }, + unsafeUpdate = (ps, i, a) => { + BusinessentityId.put.unsafeUpdateNonNullable(ps, i + 0, a.businessentityid) + /* user-picked */ CustomCreditcardId.put.unsafeUpdateNonNullable(ps, i + 1, a.creditcardid) + TypoLocalDateTime.put.unsafeUpdateNonNullable(ps, i + 2, a.modifieddate) + } + ) } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salesorderdetail/SalesorderdetailRepo.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salesorderdetail/SalesorderdetailRepo.scala index 6487ee645..5d21a35c2 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salesorderdetail/SalesorderdetailRepo.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salesorderdetail/SalesorderdetailRepo.scala @@ -30,4 +30,7 @@ trait SalesorderdetailRepo { def update: UpdateBuilder[SalesorderdetailFields, SalesorderdetailRow] def update(row: SalesorderdetailRow): ConnectionIO[Boolean] def upsert(unsaved: SalesorderdetailRow): ConnectionIO[SalesorderdetailRow] + def upsertBatch(unsaved: List[SalesorderdetailRow]): Stream[ConnectionIO, SalesorderdetailRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Stream[ConnectionIO, SalesorderdetailRow], batchSize: Int = 10000): ConnectionIO[Int] } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salesorderdetail/SalesorderdetailRepoImpl.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salesorderdetail/SalesorderdetailRepoImpl.scala index 6412a597c..26a12afbe 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salesorderdetail/SalesorderdetailRepoImpl.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salesorderdetail/SalesorderdetailRepoImpl.scala @@ -14,6 +14,7 @@ import adventureworks.customtypes.TypoUUID import adventureworks.production.product.ProductId import adventureworks.sales.salesorderheader.SalesorderheaderId import adventureworks.sales.specialoffer.SpecialofferId +import cats.instances.list.catsStdInstancesForList import doobie.free.connection.ConnectionIO import doobie.postgres.syntax.FragmentOps import doobie.syntax.SqlInterpolator.SingleFragment.fromWrite @@ -21,6 +22,7 @@ import doobie.syntax.string.toSqlInterpolator import doobie.util.Write import doobie.util.fragment.Fragment import doobie.util.meta.Meta +import doobie.util.update.Update import fs2.Stream import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder @@ -168,4 +170,43 @@ class SalesorderdetailRepoImpl extends SalesorderdetailRepo { returning "salesorderid", "salesorderdetailid", "carriertrackingnumber", "orderqty", "productid", "specialofferid", "unitprice", "unitpricediscount", "rowguid", "modifieddate"::text """.query(using SalesorderdetailRow.read).unique } + override def upsertBatch(unsaved: List[SalesorderdetailRow]): Stream[ConnectionIO, SalesorderdetailRow] = { + Update[SalesorderdetailRow]( + s"""insert into sales.salesorderdetail("salesorderid", "salesorderdetailid", "carriertrackingnumber", "orderqty", "productid", "specialofferid", "unitprice", "unitpricediscount", "rowguid", "modifieddate") + values (?::int4,?::int4,?,?::int2,?::int4,?::int4,?::numeric,?::numeric,?::uuid,?::timestamp) + on conflict ("salesorderid", "salesorderdetailid") + do update set + "carriertrackingnumber" = EXCLUDED."carriertrackingnumber", + "orderqty" = EXCLUDED."orderqty", + "productid" = EXCLUDED."productid", + "specialofferid" = EXCLUDED."specialofferid", + "unitprice" = EXCLUDED."unitprice", + "unitpricediscount" = EXCLUDED."unitpricediscount", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + returning "salesorderid", "salesorderdetailid", "carriertrackingnumber", "orderqty", "productid", "specialofferid", "unitprice", "unitpricediscount", "rowguid", "modifieddate"::text""" + )(using SalesorderdetailRow.write) + .updateManyWithGeneratedKeys[SalesorderdetailRow]("salesorderid", "salesorderdetailid", "carriertrackingnumber", "orderqty", "productid", "specialofferid", "unitprice", "unitpricediscount", "rowguid", "modifieddate")(unsaved)(using catsStdInstancesForList, SalesorderdetailRow.read) + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, SalesorderdetailRow], batchSize: Int = 10000): ConnectionIO[Int] = { + for { + _ <- sql"create temporary table salesorderdetail_TEMP (like sales.salesorderdetail) on commit drop".update.run + _ <- new FragmentOps(sql"""copy salesorderdetail_TEMP("salesorderid", "salesorderdetailid", "carriertrackingnumber", "orderqty", "productid", "specialofferid", "unitprice", "unitpricediscount", "rowguid", "modifieddate") from stdin""").copyIn(unsaved, batchSize)(using SalesorderdetailRow.text) + res <- sql"""insert into sales.salesorderdetail("salesorderid", "salesorderdetailid", "carriertrackingnumber", "orderqty", "productid", "specialofferid", "unitprice", "unitpricediscount", "rowguid", "modifieddate") + select * from salesorderdetail_TEMP + on conflict ("salesorderid", "salesorderdetailid") + do update set + "carriertrackingnumber" = EXCLUDED."carriertrackingnumber", + "orderqty" = EXCLUDED."orderqty", + "productid" = EXCLUDED."productid", + "specialofferid" = EXCLUDED."specialofferid", + "unitprice" = EXCLUDED."unitprice", + "unitpricediscount" = EXCLUDED."unitpricediscount", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table salesorderdetail_TEMP;""".update.run + } yield res + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salesorderdetail/SalesorderdetailRepoMock.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salesorderdetail/SalesorderdetailRepoMock.scala index 54cae19ef..6317331f8 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salesorderdetail/SalesorderdetailRepoMock.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salesorderdetail/SalesorderdetailRepoMock.scala @@ -105,4 +105,23 @@ class SalesorderdetailRepoMock(toRow: Function1[SalesorderdetailRowUnsaved, Sale unsaved } } + override def upsertBatch(unsaved: List[SalesorderdetailRow]): Stream[ConnectionIO, SalesorderdetailRow] = { + Stream.emits { + unsaved.map { row => + map += (row.compositeId -> row) + row + } + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, SalesorderdetailRow], batchSize: Int = 10000): ConnectionIO[Int] = { + unsaved.compile.toList.map { rows => + var num = 0 + rows.foreach { row => + map += (row.compositeId -> row) + num += 1 + } + num + } + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salesorderdetail/SalesorderdetailRow.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salesorderdetail/SalesorderdetailRow.scala index 01b271263..2133b3979 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salesorderdetail/SalesorderdetailRow.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salesorderdetail/SalesorderdetailRow.scala @@ -18,6 +18,7 @@ import adventureworks.sales.specialofferproduct.SpecialofferproductId import doobie.enumerated.Nullability import doobie.postgres.Text import doobie.util.Read +import doobie.util.Write import doobie.util.meta.Meta import io.circe.Decoder import io.circe.Encoder @@ -118,4 +119,41 @@ object SalesorderdetailRow { sb.append(Text.DELIMETER) TypoLocalDateTime.text.unsafeEncode(row.modifieddate, sb) } + implicit lazy val write: Write[SalesorderdetailRow] = new Write[SalesorderdetailRow]( + puts = List((SalesorderheaderId.put, Nullability.NoNulls), + (Meta.IntMeta.put, Nullability.NoNulls), + (Meta.StringMeta.put, Nullability.Nullable), + (TypoShort.put, Nullability.NoNulls), + (ProductId.put, Nullability.NoNulls), + (SpecialofferId.put, Nullability.NoNulls), + (Meta.ScalaBigDecimalMeta.put, Nullability.NoNulls), + (Meta.ScalaBigDecimalMeta.put, Nullability.NoNulls), + (TypoUUID.put, Nullability.NoNulls), + (TypoLocalDateTime.put, Nullability.NoNulls)), + toList = x => List(x.salesorderid, x.salesorderdetailid, x.carriertrackingnumber, x.orderqty, x.productid, x.specialofferid, x.unitprice, x.unitpricediscount, x.rowguid, x.modifieddate), + unsafeSet = (rs, i, a) => { + SalesorderheaderId.put.unsafeSetNonNullable(rs, i + 0, a.salesorderid) + Meta.IntMeta.put.unsafeSetNonNullable(rs, i + 1, a.salesorderdetailid) + Meta.StringMeta.put.unsafeSetNullable(rs, i + 2, a.carriertrackingnumber) + TypoShort.put.unsafeSetNonNullable(rs, i + 3, a.orderqty) + ProductId.put.unsafeSetNonNullable(rs, i + 4, a.productid) + SpecialofferId.put.unsafeSetNonNullable(rs, i + 5, a.specialofferid) + Meta.ScalaBigDecimalMeta.put.unsafeSetNonNullable(rs, i + 6, a.unitprice) + Meta.ScalaBigDecimalMeta.put.unsafeSetNonNullable(rs, i + 7, a.unitpricediscount) + TypoUUID.put.unsafeSetNonNullable(rs, i + 8, a.rowguid) + TypoLocalDateTime.put.unsafeSetNonNullable(rs, i + 9, a.modifieddate) + }, + unsafeUpdate = (ps, i, a) => { + SalesorderheaderId.put.unsafeUpdateNonNullable(ps, i + 0, a.salesorderid) + Meta.IntMeta.put.unsafeUpdateNonNullable(ps, i + 1, a.salesorderdetailid) + Meta.StringMeta.put.unsafeUpdateNullable(ps, i + 2, a.carriertrackingnumber) + TypoShort.put.unsafeUpdateNonNullable(ps, i + 3, a.orderqty) + ProductId.put.unsafeUpdateNonNullable(ps, i + 4, a.productid) + SpecialofferId.put.unsafeUpdateNonNullable(ps, i + 5, a.specialofferid) + Meta.ScalaBigDecimalMeta.put.unsafeUpdateNonNullable(ps, i + 6, a.unitprice) + Meta.ScalaBigDecimalMeta.put.unsafeUpdateNonNullable(ps, i + 7, a.unitpricediscount) + TypoUUID.put.unsafeUpdateNonNullable(ps, i + 8, a.rowguid) + TypoLocalDateTime.put.unsafeUpdateNonNullable(ps, i + 9, a.modifieddate) + } + ) } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salesorderheader/SalesorderheaderRepo.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salesorderheader/SalesorderheaderRepo.scala index 1b3c94bdc..bf8080cbf 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salesorderheader/SalesorderheaderRepo.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salesorderheader/SalesorderheaderRepo.scala @@ -30,4 +30,7 @@ trait SalesorderheaderRepo { def update: UpdateBuilder[SalesorderheaderFields, SalesorderheaderRow] def update(row: SalesorderheaderRow): ConnectionIO[Boolean] def upsert(unsaved: SalesorderheaderRow): ConnectionIO[SalesorderheaderRow] + def upsertBatch(unsaved: List[SalesorderheaderRow]): Stream[ConnectionIO, SalesorderheaderRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Stream[ConnectionIO, SalesorderheaderRow], batchSize: Int = 10000): ConnectionIO[Int] } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salesorderheader/SalesorderheaderRepoImpl.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salesorderheader/SalesorderheaderRepoImpl.scala index 8849fb8e6..9fcebe09b 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salesorderheader/SalesorderheaderRepoImpl.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salesorderheader/SalesorderheaderRepoImpl.scala @@ -21,6 +21,7 @@ import adventureworks.sales.currencyrate.CurrencyrateId import adventureworks.sales.customer.CustomerId import adventureworks.sales.salesterritory.SalesterritoryId import adventureworks.userdefined.CustomCreditcardId +import cats.instances.list.catsStdInstancesForList import doobie.free.connection.ConnectionIO import doobie.postgres.syntax.FragmentOps import doobie.syntax.SqlInterpolator.SingleFragment.fromWrite @@ -28,6 +29,7 @@ import doobie.syntax.string.toSqlInterpolator import doobie.util.Write import doobie.util.fragment.Fragment import doobie.util.meta.Meta +import doobie.util.update.Update import fs2.Stream import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder @@ -241,4 +243,75 @@ class SalesorderheaderRepoImpl extends SalesorderheaderRepo { returning "salesorderid", "revisionnumber", "orderdate"::text, "duedate"::text, "shipdate"::text, "status", "onlineorderflag", "purchaseordernumber", "accountnumber", "customerid", "salespersonid", "territoryid", "billtoaddressid", "shiptoaddressid", "shipmethodid", "creditcardid", "creditcardapprovalcode", "currencyrateid", "subtotal", "taxamt", "freight", "totaldue", "comment", "rowguid", "modifieddate"::text """.query(using SalesorderheaderRow.read).unique } + override def upsertBatch(unsaved: List[SalesorderheaderRow]): Stream[ConnectionIO, SalesorderheaderRow] = { + Update[SalesorderheaderRow]( + s"""insert into sales.salesorderheader("salesorderid", "revisionnumber", "orderdate", "duedate", "shipdate", "status", "onlineorderflag", "purchaseordernumber", "accountnumber", "customerid", "salespersonid", "territoryid", "billtoaddressid", "shiptoaddressid", "shipmethodid", "creditcardid", "creditcardapprovalcode", "currencyrateid", "subtotal", "taxamt", "freight", "totaldue", "comment", "rowguid", "modifieddate") + values (?::int4,?::int2,?::timestamp,?::timestamp,?::timestamp,?::int2,?::bool,?::varchar,?::varchar,?::int4,?::int4,?::int4,?::int4,?::int4,?::int4,?::int4,?,?::int4,?::numeric,?::numeric,?::numeric,?::numeric,?,?::uuid,?::timestamp) + on conflict ("salesorderid") + do update set + "revisionnumber" = EXCLUDED."revisionnumber", + "orderdate" = EXCLUDED."orderdate", + "duedate" = EXCLUDED."duedate", + "shipdate" = EXCLUDED."shipdate", + "status" = EXCLUDED."status", + "onlineorderflag" = EXCLUDED."onlineorderflag", + "purchaseordernumber" = EXCLUDED."purchaseordernumber", + "accountnumber" = EXCLUDED."accountnumber", + "customerid" = EXCLUDED."customerid", + "salespersonid" = EXCLUDED."salespersonid", + "territoryid" = EXCLUDED."territoryid", + "billtoaddressid" = EXCLUDED."billtoaddressid", + "shiptoaddressid" = EXCLUDED."shiptoaddressid", + "shipmethodid" = EXCLUDED."shipmethodid", + "creditcardid" = EXCLUDED."creditcardid", + "creditcardapprovalcode" = EXCLUDED."creditcardapprovalcode", + "currencyrateid" = EXCLUDED."currencyrateid", + "subtotal" = EXCLUDED."subtotal", + "taxamt" = EXCLUDED."taxamt", + "freight" = EXCLUDED."freight", + "totaldue" = EXCLUDED."totaldue", + "comment" = EXCLUDED."comment", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + returning "salesorderid", "revisionnumber", "orderdate"::text, "duedate"::text, "shipdate"::text, "status", "onlineorderflag", "purchaseordernumber", "accountnumber", "customerid", "salespersonid", "territoryid", "billtoaddressid", "shiptoaddressid", "shipmethodid", "creditcardid", "creditcardapprovalcode", "currencyrateid", "subtotal", "taxamt", "freight", "totaldue", "comment", "rowguid", "modifieddate"::text""" + )(using SalesorderheaderRow.write) + .updateManyWithGeneratedKeys[SalesorderheaderRow]("salesorderid", "revisionnumber", "orderdate", "duedate", "shipdate", "status", "onlineorderflag", "purchaseordernumber", "accountnumber", "customerid", "salespersonid", "territoryid", "billtoaddressid", "shiptoaddressid", "shipmethodid", "creditcardid", "creditcardapprovalcode", "currencyrateid", "subtotal", "taxamt", "freight", "totaldue", "comment", "rowguid", "modifieddate")(unsaved)(using catsStdInstancesForList, SalesorderheaderRow.read) + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, SalesorderheaderRow], batchSize: Int = 10000): ConnectionIO[Int] = { + for { + _ <- sql"create temporary table salesorderheader_TEMP (like sales.salesorderheader) on commit drop".update.run + _ <- new FragmentOps(sql"""copy salesorderheader_TEMP("salesorderid", "revisionnumber", "orderdate", "duedate", "shipdate", "status", "onlineorderflag", "purchaseordernumber", "accountnumber", "customerid", "salespersonid", "territoryid", "billtoaddressid", "shiptoaddressid", "shipmethodid", "creditcardid", "creditcardapprovalcode", "currencyrateid", "subtotal", "taxamt", "freight", "totaldue", "comment", "rowguid", "modifieddate") from stdin""").copyIn(unsaved, batchSize)(using SalesorderheaderRow.text) + res <- sql"""insert into sales.salesorderheader("salesorderid", "revisionnumber", "orderdate", "duedate", "shipdate", "status", "onlineorderflag", "purchaseordernumber", "accountnumber", "customerid", "salespersonid", "territoryid", "billtoaddressid", "shiptoaddressid", "shipmethodid", "creditcardid", "creditcardapprovalcode", "currencyrateid", "subtotal", "taxamt", "freight", "totaldue", "comment", "rowguid", "modifieddate") + select * from salesorderheader_TEMP + on conflict ("salesorderid") + do update set + "revisionnumber" = EXCLUDED."revisionnumber", + "orderdate" = EXCLUDED."orderdate", + "duedate" = EXCLUDED."duedate", + "shipdate" = EXCLUDED."shipdate", + "status" = EXCLUDED."status", + "onlineorderflag" = EXCLUDED."onlineorderflag", + "purchaseordernumber" = EXCLUDED."purchaseordernumber", + "accountnumber" = EXCLUDED."accountnumber", + "customerid" = EXCLUDED."customerid", + "salespersonid" = EXCLUDED."salespersonid", + "territoryid" = EXCLUDED."territoryid", + "billtoaddressid" = EXCLUDED."billtoaddressid", + "shiptoaddressid" = EXCLUDED."shiptoaddressid", + "shipmethodid" = EXCLUDED."shipmethodid", + "creditcardid" = EXCLUDED."creditcardid", + "creditcardapprovalcode" = EXCLUDED."creditcardapprovalcode", + "currencyrateid" = EXCLUDED."currencyrateid", + "subtotal" = EXCLUDED."subtotal", + "taxamt" = EXCLUDED."taxamt", + "freight" = EXCLUDED."freight", + "totaldue" = EXCLUDED."totaldue", + "comment" = EXCLUDED."comment", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table salesorderheader_TEMP;""".update.run + } yield res + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salesorderheader/SalesorderheaderRepoMock.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salesorderheader/SalesorderheaderRepoMock.scala index 5fb933764..86a1fdfbd 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salesorderheader/SalesorderheaderRepoMock.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salesorderheader/SalesorderheaderRepoMock.scala @@ -105,4 +105,23 @@ class SalesorderheaderRepoMock(toRow: Function1[SalesorderheaderRowUnsaved, Sale unsaved } } + override def upsertBatch(unsaved: List[SalesorderheaderRow]): Stream[ConnectionIO, SalesorderheaderRow] = { + Stream.emits { + unsaved.map { row => + map += (row.salesorderid -> row) + row + } + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, SalesorderheaderRow], batchSize: Int = 10000): ConnectionIO[Int] = { + unsaved.compile.toList.map { rows => + var num = 0 + rows.foreach { row => + map += (row.salesorderid -> row) + num += 1 + } + num + } + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salesorderheader/SalesorderheaderRow.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salesorderheader/SalesorderheaderRow.scala index 4ff377276..aa30a5545 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salesorderheader/SalesorderheaderRow.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salesorderheader/SalesorderheaderRow.scala @@ -24,6 +24,7 @@ import adventureworks.userdefined.CustomCreditcardId import doobie.enumerated.Nullability import doobie.postgres.Text import doobie.util.Read +import doobie.util.Write import doobie.util.meta.Meta import io.circe.Decoder import io.circe.DecodingFailure @@ -289,4 +290,86 @@ object SalesorderheaderRow { sb.append(Text.DELIMETER) TypoLocalDateTime.text.unsafeEncode(row.modifieddate, sb) } + implicit lazy val write: Write[SalesorderheaderRow] = new Write[SalesorderheaderRow]( + puts = List((SalesorderheaderId.put, Nullability.NoNulls), + (TypoShort.put, Nullability.NoNulls), + (TypoLocalDateTime.put, Nullability.NoNulls), + (TypoLocalDateTime.put, Nullability.NoNulls), + (TypoLocalDateTime.put, Nullability.Nullable), + (TypoShort.put, Nullability.NoNulls), + (Flag.put, Nullability.NoNulls), + (OrderNumber.put, Nullability.Nullable), + (AccountNumber.put, Nullability.Nullable), + (CustomerId.put, Nullability.NoNulls), + (BusinessentityId.put, Nullability.Nullable), + (SalesterritoryId.put, Nullability.Nullable), + (AddressId.put, Nullability.NoNulls), + (AddressId.put, Nullability.NoNulls), + (ShipmethodId.put, Nullability.NoNulls), + (/* user-picked */ CustomCreditcardId.put, Nullability.Nullable), + (Meta.StringMeta.put, Nullability.Nullable), + (CurrencyrateId.put, Nullability.Nullable), + (Meta.ScalaBigDecimalMeta.put, Nullability.NoNulls), + (Meta.ScalaBigDecimalMeta.put, Nullability.NoNulls), + (Meta.ScalaBigDecimalMeta.put, Nullability.NoNulls), + (Meta.ScalaBigDecimalMeta.put, Nullability.Nullable), + (Meta.StringMeta.put, Nullability.Nullable), + (TypoUUID.put, Nullability.NoNulls), + (TypoLocalDateTime.put, Nullability.NoNulls)), + toList = x => List(x.salesorderid, x.revisionnumber, x.orderdate, x.duedate, x.shipdate, x.status, x.onlineorderflag, x.purchaseordernumber, x.accountnumber, x.customerid, x.salespersonid, x.territoryid, x.billtoaddressid, x.shiptoaddressid, x.shipmethodid, x.creditcardid, x.creditcardapprovalcode, x.currencyrateid, x.subtotal, x.taxamt, x.freight, x.totaldue, x.comment, x.rowguid, x.modifieddate), + unsafeSet = (rs, i, a) => { + SalesorderheaderId.put.unsafeSetNonNullable(rs, i + 0, a.salesorderid) + TypoShort.put.unsafeSetNonNullable(rs, i + 1, a.revisionnumber) + TypoLocalDateTime.put.unsafeSetNonNullable(rs, i + 2, a.orderdate) + TypoLocalDateTime.put.unsafeSetNonNullable(rs, i + 3, a.duedate) + TypoLocalDateTime.put.unsafeSetNullable(rs, i + 4, a.shipdate) + TypoShort.put.unsafeSetNonNullable(rs, i + 5, a.status) + Flag.put.unsafeSetNonNullable(rs, i + 6, a.onlineorderflag) + OrderNumber.put.unsafeSetNullable(rs, i + 7, a.purchaseordernumber) + AccountNumber.put.unsafeSetNullable(rs, i + 8, a.accountnumber) + CustomerId.put.unsafeSetNonNullable(rs, i + 9, a.customerid) + BusinessentityId.put.unsafeSetNullable(rs, i + 10, a.salespersonid) + SalesterritoryId.put.unsafeSetNullable(rs, i + 11, a.territoryid) + AddressId.put.unsafeSetNonNullable(rs, i + 12, a.billtoaddressid) + AddressId.put.unsafeSetNonNullable(rs, i + 13, a.shiptoaddressid) + ShipmethodId.put.unsafeSetNonNullable(rs, i + 14, a.shipmethodid) + /* user-picked */ CustomCreditcardId.put.unsafeSetNullable(rs, i + 15, a.creditcardid) + Meta.StringMeta.put.unsafeSetNullable(rs, i + 16, a.creditcardapprovalcode) + CurrencyrateId.put.unsafeSetNullable(rs, i + 17, a.currencyrateid) + Meta.ScalaBigDecimalMeta.put.unsafeSetNonNullable(rs, i + 18, a.subtotal) + Meta.ScalaBigDecimalMeta.put.unsafeSetNonNullable(rs, i + 19, a.taxamt) + Meta.ScalaBigDecimalMeta.put.unsafeSetNonNullable(rs, i + 20, a.freight) + Meta.ScalaBigDecimalMeta.put.unsafeSetNullable(rs, i + 21, a.totaldue) + Meta.StringMeta.put.unsafeSetNullable(rs, i + 22, a.comment) + TypoUUID.put.unsafeSetNonNullable(rs, i + 23, a.rowguid) + TypoLocalDateTime.put.unsafeSetNonNullable(rs, i + 24, a.modifieddate) + }, + unsafeUpdate = (ps, i, a) => { + SalesorderheaderId.put.unsafeUpdateNonNullable(ps, i + 0, a.salesorderid) + TypoShort.put.unsafeUpdateNonNullable(ps, i + 1, a.revisionnumber) + TypoLocalDateTime.put.unsafeUpdateNonNullable(ps, i + 2, a.orderdate) + TypoLocalDateTime.put.unsafeUpdateNonNullable(ps, i + 3, a.duedate) + TypoLocalDateTime.put.unsafeUpdateNullable(ps, i + 4, a.shipdate) + TypoShort.put.unsafeUpdateNonNullable(ps, i + 5, a.status) + Flag.put.unsafeUpdateNonNullable(ps, i + 6, a.onlineorderflag) + OrderNumber.put.unsafeUpdateNullable(ps, i + 7, a.purchaseordernumber) + AccountNumber.put.unsafeUpdateNullable(ps, i + 8, a.accountnumber) + CustomerId.put.unsafeUpdateNonNullable(ps, i + 9, a.customerid) + BusinessentityId.put.unsafeUpdateNullable(ps, i + 10, a.salespersonid) + SalesterritoryId.put.unsafeUpdateNullable(ps, i + 11, a.territoryid) + AddressId.put.unsafeUpdateNonNullable(ps, i + 12, a.billtoaddressid) + AddressId.put.unsafeUpdateNonNullable(ps, i + 13, a.shiptoaddressid) + ShipmethodId.put.unsafeUpdateNonNullable(ps, i + 14, a.shipmethodid) + /* user-picked */ CustomCreditcardId.put.unsafeUpdateNullable(ps, i + 15, a.creditcardid) + Meta.StringMeta.put.unsafeUpdateNullable(ps, i + 16, a.creditcardapprovalcode) + CurrencyrateId.put.unsafeUpdateNullable(ps, i + 17, a.currencyrateid) + Meta.ScalaBigDecimalMeta.put.unsafeUpdateNonNullable(ps, i + 18, a.subtotal) + Meta.ScalaBigDecimalMeta.put.unsafeUpdateNonNullable(ps, i + 19, a.taxamt) + Meta.ScalaBigDecimalMeta.put.unsafeUpdateNonNullable(ps, i + 20, a.freight) + Meta.ScalaBigDecimalMeta.put.unsafeUpdateNullable(ps, i + 21, a.totaldue) + Meta.StringMeta.put.unsafeUpdateNullable(ps, i + 22, a.comment) + TypoUUID.put.unsafeUpdateNonNullable(ps, i + 23, a.rowguid) + TypoLocalDateTime.put.unsafeUpdateNonNullable(ps, i + 24, a.modifieddate) + } + ) } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salesorderheadersalesreason/SalesorderheadersalesreasonRepo.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salesorderheadersalesreason/SalesorderheadersalesreasonRepo.scala index 8864be325..4a1f57e4c 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salesorderheadersalesreason/SalesorderheadersalesreasonRepo.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salesorderheadersalesreason/SalesorderheadersalesreasonRepo.scala @@ -30,4 +30,7 @@ trait SalesorderheadersalesreasonRepo { def update: UpdateBuilder[SalesorderheadersalesreasonFields, SalesorderheadersalesreasonRow] def update(row: SalesorderheadersalesreasonRow): ConnectionIO[Boolean] def upsert(unsaved: SalesorderheadersalesreasonRow): ConnectionIO[SalesorderheadersalesreasonRow] + def upsertBatch(unsaved: List[SalesorderheadersalesreasonRow]): Stream[ConnectionIO, SalesorderheadersalesreasonRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Stream[ConnectionIO, SalesorderheadersalesreasonRow], batchSize: Int = 10000): ConnectionIO[Int] } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salesorderheadersalesreason/SalesorderheadersalesreasonRepoImpl.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salesorderheadersalesreason/SalesorderheadersalesreasonRepoImpl.scala index 52dcdb104..4877c01ed 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salesorderheadersalesreason/SalesorderheadersalesreasonRepoImpl.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salesorderheadersalesreason/SalesorderheadersalesreasonRepoImpl.scala @@ -11,12 +11,14 @@ import adventureworks.customtypes.Defaulted import adventureworks.customtypes.TypoLocalDateTime import adventureworks.sales.salesorderheader.SalesorderheaderId import adventureworks.sales.salesreason.SalesreasonId +import cats.instances.list.catsStdInstancesForList import doobie.free.connection.ConnectionIO import doobie.postgres.syntax.FragmentOps import doobie.syntax.SqlInterpolator.SingleFragment.fromWrite import doobie.syntax.string.toSqlInterpolator import doobie.util.Write import doobie.util.fragment.Fragment +import doobie.util.update.Update import fs2.Stream import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder @@ -127,4 +129,29 @@ class SalesorderheadersalesreasonRepoImpl extends SalesorderheadersalesreasonRep returning "salesorderid", "salesreasonid", "modifieddate"::text """.query(using SalesorderheadersalesreasonRow.read).unique } + override def upsertBatch(unsaved: List[SalesorderheadersalesreasonRow]): Stream[ConnectionIO, SalesorderheadersalesreasonRow] = { + Update[SalesorderheadersalesreasonRow]( + s"""insert into sales.salesorderheadersalesreason("salesorderid", "salesreasonid", "modifieddate") + values (?::int4,?::int4,?::timestamp) + on conflict ("salesorderid", "salesreasonid") + do update set + "modifieddate" = EXCLUDED."modifieddate" + returning "salesorderid", "salesreasonid", "modifieddate"::text""" + )(using SalesorderheadersalesreasonRow.write) + .updateManyWithGeneratedKeys[SalesorderheadersalesreasonRow]("salesorderid", "salesreasonid", "modifieddate")(unsaved)(using catsStdInstancesForList, SalesorderheadersalesreasonRow.read) + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, SalesorderheadersalesreasonRow], batchSize: Int = 10000): ConnectionIO[Int] = { + for { + _ <- sql"create temporary table salesorderheadersalesreason_TEMP (like sales.salesorderheadersalesreason) on commit drop".update.run + _ <- new FragmentOps(sql"""copy salesorderheadersalesreason_TEMP("salesorderid", "salesreasonid", "modifieddate") from stdin""").copyIn(unsaved, batchSize)(using SalesorderheadersalesreasonRow.text) + res <- sql"""insert into sales.salesorderheadersalesreason("salesorderid", "salesreasonid", "modifieddate") + select * from salesorderheadersalesreason_TEMP + on conflict ("salesorderid", "salesreasonid") + do update set + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table salesorderheadersalesreason_TEMP;""".update.run + } yield res + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salesorderheadersalesreason/SalesorderheadersalesreasonRepoMock.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salesorderheadersalesreason/SalesorderheadersalesreasonRepoMock.scala index ad6c68541..38aa0b63a 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salesorderheadersalesreason/SalesorderheadersalesreasonRepoMock.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salesorderheadersalesreason/SalesorderheadersalesreasonRepoMock.scala @@ -105,4 +105,23 @@ class SalesorderheadersalesreasonRepoMock(toRow: Function1[Salesorderheadersales unsaved } } + override def upsertBatch(unsaved: List[SalesorderheadersalesreasonRow]): Stream[ConnectionIO, SalesorderheadersalesreasonRow] = { + Stream.emits { + unsaved.map { row => + map += (row.compositeId -> row) + row + } + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, SalesorderheadersalesreasonRow], batchSize: Int = 10000): ConnectionIO[Int] = { + unsaved.compile.toList.map { rows => + var num = 0 + rows.foreach { row => + map += (row.compositeId -> row) + num += 1 + } + num + } + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salesorderheadersalesreason/SalesorderheadersalesreasonRow.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salesorderheadersalesreason/SalesorderheadersalesreasonRow.scala index e146435aa..5f57902b2 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salesorderheadersalesreason/SalesorderheadersalesreasonRow.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salesorderheadersalesreason/SalesorderheadersalesreasonRow.scala @@ -14,6 +14,7 @@ import adventureworks.sales.salesreason.SalesreasonId import doobie.enumerated.Nullability import doobie.postgres.Text import doobie.util.Read +import doobie.util.Write import io.circe.Decoder import io.circe.Encoder import java.sql.ResultSet @@ -61,4 +62,20 @@ object SalesorderheadersalesreasonRow { sb.append(Text.DELIMETER) TypoLocalDateTime.text.unsafeEncode(row.modifieddate, sb) } + implicit lazy val write: Write[SalesorderheadersalesreasonRow] = new Write[SalesorderheadersalesreasonRow]( + puts = List((SalesorderheaderId.put, Nullability.NoNulls), + (SalesreasonId.put, Nullability.NoNulls), + (TypoLocalDateTime.put, Nullability.NoNulls)), + toList = x => List(x.salesorderid, x.salesreasonid, x.modifieddate), + unsafeSet = (rs, i, a) => { + SalesorderheaderId.put.unsafeSetNonNullable(rs, i + 0, a.salesorderid) + SalesreasonId.put.unsafeSetNonNullable(rs, i + 1, a.salesreasonid) + TypoLocalDateTime.put.unsafeSetNonNullable(rs, i + 2, a.modifieddate) + }, + unsafeUpdate = (ps, i, a) => { + SalesorderheaderId.put.unsafeUpdateNonNullable(ps, i + 0, a.salesorderid) + SalesreasonId.put.unsafeUpdateNonNullable(ps, i + 1, a.salesreasonid) + TypoLocalDateTime.put.unsafeUpdateNonNullable(ps, i + 2, a.modifieddate) + } + ) } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salesperson/SalespersonRepo.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salesperson/SalespersonRepo.scala index 341a51233..235a11a7b 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salesperson/SalespersonRepo.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salesperson/SalespersonRepo.scala @@ -31,4 +31,7 @@ trait SalespersonRepo { def update: UpdateBuilder[SalespersonFields, SalespersonRow] def update(row: SalespersonRow): ConnectionIO[Boolean] def upsert(unsaved: SalespersonRow): ConnectionIO[SalespersonRow] + def upsertBatch(unsaved: List[SalespersonRow]): Stream[ConnectionIO, SalespersonRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Stream[ConnectionIO, SalespersonRow], batchSize: Int = 10000): ConnectionIO[Int] } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salesperson/SalespersonRepoImpl.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salesperson/SalespersonRepoImpl.scala index 4f75c5948..0bc71f5cc 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salesperson/SalespersonRepoImpl.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salesperson/SalespersonRepoImpl.scala @@ -12,6 +12,7 @@ import adventureworks.customtypes.TypoLocalDateTime import adventureworks.customtypes.TypoUUID import adventureworks.person.businessentity.BusinessentityId import adventureworks.sales.salesterritory.SalesterritoryId +import cats.instances.list.catsStdInstancesForList import doobie.free.connection.ConnectionIO import doobie.postgres.syntax.FragmentOps import doobie.syntax.SqlInterpolator.SingleFragment.fromWrite @@ -19,6 +20,7 @@ import doobie.syntax.string.toSqlInterpolator import doobie.util.Write import doobie.util.fragment.Fragment import doobie.util.meta.Meta +import doobie.util.update.Update import fs2.Stream import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder @@ -156,4 +158,43 @@ class SalespersonRepoImpl extends SalespersonRepo { returning "businessentityid", "territoryid", "salesquota", "bonus", "commissionpct", "salesytd", "saleslastyear", "rowguid", "modifieddate"::text """.query(using SalespersonRow.read).unique } + override def upsertBatch(unsaved: List[SalespersonRow]): Stream[ConnectionIO, SalespersonRow] = { + Update[SalespersonRow]( + s"""insert into sales.salesperson("businessentityid", "territoryid", "salesquota", "bonus", "commissionpct", "salesytd", "saleslastyear", "rowguid", "modifieddate") + values (?::int4,?::int4,?::numeric,?::numeric,?::numeric,?::numeric,?::numeric,?::uuid,?::timestamp) + on conflict ("businessentityid") + do update set + "territoryid" = EXCLUDED."territoryid", + "salesquota" = EXCLUDED."salesquota", + "bonus" = EXCLUDED."bonus", + "commissionpct" = EXCLUDED."commissionpct", + "salesytd" = EXCLUDED."salesytd", + "saleslastyear" = EXCLUDED."saleslastyear", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + returning "businessentityid", "territoryid", "salesquota", "bonus", "commissionpct", "salesytd", "saleslastyear", "rowguid", "modifieddate"::text""" + )(using SalespersonRow.write) + .updateManyWithGeneratedKeys[SalespersonRow]("businessentityid", "territoryid", "salesquota", "bonus", "commissionpct", "salesytd", "saleslastyear", "rowguid", "modifieddate")(unsaved)(using catsStdInstancesForList, SalespersonRow.read) + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, SalespersonRow], batchSize: Int = 10000): ConnectionIO[Int] = { + for { + _ <- sql"create temporary table salesperson_TEMP (like sales.salesperson) on commit drop".update.run + _ <- new FragmentOps(sql"""copy salesperson_TEMP("businessentityid", "territoryid", "salesquota", "bonus", "commissionpct", "salesytd", "saleslastyear", "rowguid", "modifieddate") from stdin""").copyIn(unsaved, batchSize)(using SalespersonRow.text) + res <- sql"""insert into sales.salesperson("businessentityid", "territoryid", "salesquota", "bonus", "commissionpct", "salesytd", "saleslastyear", "rowguid", "modifieddate") + select * from salesperson_TEMP + on conflict ("businessentityid") + do update set + "territoryid" = EXCLUDED."territoryid", + "salesquota" = EXCLUDED."salesquota", + "bonus" = EXCLUDED."bonus", + "commissionpct" = EXCLUDED."commissionpct", + "salesytd" = EXCLUDED."salesytd", + "saleslastyear" = EXCLUDED."saleslastyear", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table salesperson_TEMP;""".update.run + } yield res + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salesperson/SalespersonRepoMock.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salesperson/SalespersonRepoMock.scala index e75ed30d6..2277c0e3d 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salesperson/SalespersonRepoMock.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salesperson/SalespersonRepoMock.scala @@ -106,4 +106,23 @@ class SalespersonRepoMock(toRow: Function1[SalespersonRowUnsaved, SalespersonRow unsaved } } + override def upsertBatch(unsaved: List[SalespersonRow]): Stream[ConnectionIO, SalespersonRow] = { + Stream.emits { + unsaved.map { row => + map += (row.businessentityid -> row) + row + } + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, SalespersonRow], batchSize: Int = 10000): ConnectionIO[Int] = { + unsaved.compile.toList.map { rows => + var num = 0 + rows.foreach { row => + map += (row.businessentityid -> row) + num += 1 + } + num + } + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salesperson/SalespersonRow.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salesperson/SalespersonRow.scala index fbbf538e5..b1c375bf2 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salesperson/SalespersonRow.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salesperson/SalespersonRow.scala @@ -15,6 +15,7 @@ import adventureworks.sales.salesterritory.SalesterritoryId import doobie.enumerated.Nullability import doobie.postgres.Text import doobie.util.Read +import doobie.util.Write import doobie.util.meta.Meta import io.circe.Decoder import io.circe.Encoder @@ -105,4 +106,38 @@ object SalespersonRow { sb.append(Text.DELIMETER) TypoLocalDateTime.text.unsafeEncode(row.modifieddate, sb) } + implicit lazy val write: Write[SalespersonRow] = new Write[SalespersonRow]( + puts = List((BusinessentityId.put, Nullability.NoNulls), + (SalesterritoryId.put, Nullability.Nullable), + (Meta.ScalaBigDecimalMeta.put, Nullability.Nullable), + (Meta.ScalaBigDecimalMeta.put, Nullability.NoNulls), + (Meta.ScalaBigDecimalMeta.put, Nullability.NoNulls), + (Meta.ScalaBigDecimalMeta.put, Nullability.NoNulls), + (Meta.ScalaBigDecimalMeta.put, Nullability.NoNulls), + (TypoUUID.put, Nullability.NoNulls), + (TypoLocalDateTime.put, Nullability.NoNulls)), + toList = x => List(x.businessentityid, x.territoryid, x.salesquota, x.bonus, x.commissionpct, x.salesytd, x.saleslastyear, x.rowguid, x.modifieddate), + unsafeSet = (rs, i, a) => { + BusinessentityId.put.unsafeSetNonNullable(rs, i + 0, a.businessentityid) + SalesterritoryId.put.unsafeSetNullable(rs, i + 1, a.territoryid) + Meta.ScalaBigDecimalMeta.put.unsafeSetNullable(rs, i + 2, a.salesquota) + Meta.ScalaBigDecimalMeta.put.unsafeSetNonNullable(rs, i + 3, a.bonus) + Meta.ScalaBigDecimalMeta.put.unsafeSetNonNullable(rs, i + 4, a.commissionpct) + Meta.ScalaBigDecimalMeta.put.unsafeSetNonNullable(rs, i + 5, a.salesytd) + Meta.ScalaBigDecimalMeta.put.unsafeSetNonNullable(rs, i + 6, a.saleslastyear) + TypoUUID.put.unsafeSetNonNullable(rs, i + 7, a.rowguid) + TypoLocalDateTime.put.unsafeSetNonNullable(rs, i + 8, a.modifieddate) + }, + unsafeUpdate = (ps, i, a) => { + BusinessentityId.put.unsafeUpdateNonNullable(ps, i + 0, a.businessentityid) + SalesterritoryId.put.unsafeUpdateNullable(ps, i + 1, a.territoryid) + Meta.ScalaBigDecimalMeta.put.unsafeUpdateNullable(ps, i + 2, a.salesquota) + Meta.ScalaBigDecimalMeta.put.unsafeUpdateNonNullable(ps, i + 3, a.bonus) + Meta.ScalaBigDecimalMeta.put.unsafeUpdateNonNullable(ps, i + 4, a.commissionpct) + Meta.ScalaBigDecimalMeta.put.unsafeUpdateNonNullable(ps, i + 5, a.salesytd) + Meta.ScalaBigDecimalMeta.put.unsafeUpdateNonNullable(ps, i + 6, a.saleslastyear) + TypoUUID.put.unsafeUpdateNonNullable(ps, i + 7, a.rowguid) + TypoLocalDateTime.put.unsafeUpdateNonNullable(ps, i + 8, a.modifieddate) + } + ) } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salespersonquotahistory/SalespersonquotahistoryRepo.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salespersonquotahistory/SalespersonquotahistoryRepo.scala index 79628947d..785bc64e1 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salespersonquotahistory/SalespersonquotahistoryRepo.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salespersonquotahistory/SalespersonquotahistoryRepo.scala @@ -30,4 +30,7 @@ trait SalespersonquotahistoryRepo { def update: UpdateBuilder[SalespersonquotahistoryFields, SalespersonquotahistoryRow] def update(row: SalespersonquotahistoryRow): ConnectionIO[Boolean] def upsert(unsaved: SalespersonquotahistoryRow): ConnectionIO[SalespersonquotahistoryRow] + def upsertBatch(unsaved: List[SalespersonquotahistoryRow]): Stream[ConnectionIO, SalespersonquotahistoryRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Stream[ConnectionIO, SalespersonquotahistoryRow], batchSize: Int = 10000): ConnectionIO[Int] } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salespersonquotahistory/SalespersonquotahistoryRepoImpl.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salespersonquotahistory/SalespersonquotahistoryRepoImpl.scala index a7d71b791..c82662bbd 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salespersonquotahistory/SalespersonquotahistoryRepoImpl.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salespersonquotahistory/SalespersonquotahistoryRepoImpl.scala @@ -11,6 +11,7 @@ import adventureworks.customtypes.Defaulted import adventureworks.customtypes.TypoLocalDateTime import adventureworks.customtypes.TypoUUID import adventureworks.person.businessentity.BusinessentityId +import cats.instances.list.catsStdInstancesForList import doobie.free.connection.ConnectionIO import doobie.postgres.syntax.FragmentOps import doobie.syntax.SqlInterpolator.SingleFragment.fromWrite @@ -18,6 +19,7 @@ import doobie.syntax.string.toSqlInterpolator import doobie.util.Write import doobie.util.fragment.Fragment import doobie.util.meta.Meta +import doobie.util.update.Update import fs2.Stream import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder @@ -139,4 +141,33 @@ class SalespersonquotahistoryRepoImpl extends SalespersonquotahistoryRepo { returning "businessentityid", "quotadate"::text, "salesquota", "rowguid", "modifieddate"::text """.query(using SalespersonquotahistoryRow.read).unique } + override def upsertBatch(unsaved: List[SalespersonquotahistoryRow]): Stream[ConnectionIO, SalespersonquotahistoryRow] = { + Update[SalespersonquotahistoryRow]( + s"""insert into sales.salespersonquotahistory("businessentityid", "quotadate", "salesquota", "rowguid", "modifieddate") + values (?::int4,?::timestamp,?::numeric,?::uuid,?::timestamp) + on conflict ("businessentityid", "quotadate") + do update set + "salesquota" = EXCLUDED."salesquota", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + returning "businessentityid", "quotadate"::text, "salesquota", "rowguid", "modifieddate"::text""" + )(using SalespersonquotahistoryRow.write) + .updateManyWithGeneratedKeys[SalespersonquotahistoryRow]("businessentityid", "quotadate", "salesquota", "rowguid", "modifieddate")(unsaved)(using catsStdInstancesForList, SalespersonquotahistoryRow.read) + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, SalespersonquotahistoryRow], batchSize: Int = 10000): ConnectionIO[Int] = { + for { + _ <- sql"create temporary table salespersonquotahistory_TEMP (like sales.salespersonquotahistory) on commit drop".update.run + _ <- new FragmentOps(sql"""copy salespersonquotahistory_TEMP("businessentityid", "quotadate", "salesquota", "rowguid", "modifieddate") from stdin""").copyIn(unsaved, batchSize)(using SalespersonquotahistoryRow.text) + res <- sql"""insert into sales.salespersonquotahistory("businessentityid", "quotadate", "salesquota", "rowguid", "modifieddate") + select * from salespersonquotahistory_TEMP + on conflict ("businessentityid", "quotadate") + do update set + "salesquota" = EXCLUDED."salesquota", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table salespersonquotahistory_TEMP;""".update.run + } yield res + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salespersonquotahistory/SalespersonquotahistoryRepoMock.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salespersonquotahistory/SalespersonquotahistoryRepoMock.scala index 98fe9ed52..90cdc494b 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salespersonquotahistory/SalespersonquotahistoryRepoMock.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salespersonquotahistory/SalespersonquotahistoryRepoMock.scala @@ -105,4 +105,23 @@ class SalespersonquotahistoryRepoMock(toRow: Function1[SalespersonquotahistoryRo unsaved } } + override def upsertBatch(unsaved: List[SalespersonquotahistoryRow]): Stream[ConnectionIO, SalespersonquotahistoryRow] = { + Stream.emits { + unsaved.map { row => + map += (row.compositeId -> row) + row + } + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, SalespersonquotahistoryRow], batchSize: Int = 10000): ConnectionIO[Int] = { + unsaved.compile.toList.map { rows => + var num = 0 + rows.foreach { row => + map += (row.compositeId -> row) + num += 1 + } + num + } + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salespersonquotahistory/SalespersonquotahistoryRow.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salespersonquotahistory/SalespersonquotahistoryRow.scala index 77368f15e..8b93767d3 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salespersonquotahistory/SalespersonquotahistoryRow.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salespersonquotahistory/SalespersonquotahistoryRow.scala @@ -14,6 +14,7 @@ import adventureworks.person.businessentity.BusinessentityId import doobie.enumerated.Nullability import doobie.postgres.Text import doobie.util.Read +import doobie.util.Write import doobie.util.meta.Meta import io.circe.Decoder import io.circe.Encoder @@ -74,4 +75,26 @@ object SalespersonquotahistoryRow { sb.append(Text.DELIMETER) TypoLocalDateTime.text.unsafeEncode(row.modifieddate, sb) } + implicit lazy val write: Write[SalespersonquotahistoryRow] = new Write[SalespersonquotahistoryRow]( + puts = List((BusinessentityId.put, Nullability.NoNulls), + (TypoLocalDateTime.put, Nullability.NoNulls), + (Meta.ScalaBigDecimalMeta.put, Nullability.NoNulls), + (TypoUUID.put, Nullability.NoNulls), + (TypoLocalDateTime.put, Nullability.NoNulls)), + toList = x => List(x.businessentityid, x.quotadate, x.salesquota, x.rowguid, x.modifieddate), + unsafeSet = (rs, i, a) => { + BusinessentityId.put.unsafeSetNonNullable(rs, i + 0, a.businessentityid) + TypoLocalDateTime.put.unsafeSetNonNullable(rs, i + 1, a.quotadate) + Meta.ScalaBigDecimalMeta.put.unsafeSetNonNullable(rs, i + 2, a.salesquota) + TypoUUID.put.unsafeSetNonNullable(rs, i + 3, a.rowguid) + TypoLocalDateTime.put.unsafeSetNonNullable(rs, i + 4, a.modifieddate) + }, + unsafeUpdate = (ps, i, a) => { + BusinessentityId.put.unsafeUpdateNonNullable(ps, i + 0, a.businessentityid) + TypoLocalDateTime.put.unsafeUpdateNonNullable(ps, i + 1, a.quotadate) + Meta.ScalaBigDecimalMeta.put.unsafeUpdateNonNullable(ps, i + 2, a.salesquota) + TypoUUID.put.unsafeUpdateNonNullable(ps, i + 3, a.rowguid) + TypoLocalDateTime.put.unsafeUpdateNonNullable(ps, i + 4, a.modifieddate) + } + ) } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salesreason/SalesreasonRepo.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salesreason/SalesreasonRepo.scala index 8a2230c8a..248f1a2ae 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salesreason/SalesreasonRepo.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salesreason/SalesreasonRepo.scala @@ -30,4 +30,7 @@ trait SalesreasonRepo { def update: UpdateBuilder[SalesreasonFields, SalesreasonRow] def update(row: SalesreasonRow): ConnectionIO[Boolean] def upsert(unsaved: SalesreasonRow): ConnectionIO[SalesreasonRow] + def upsertBatch(unsaved: List[SalesreasonRow]): Stream[ConnectionIO, SalesreasonRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Stream[ConnectionIO, SalesreasonRow], batchSize: Int = 10000): ConnectionIO[Int] } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salesreason/SalesreasonRepoImpl.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salesreason/SalesreasonRepoImpl.scala index 17475479b..ef8ce9f08 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salesreason/SalesreasonRepoImpl.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salesreason/SalesreasonRepoImpl.scala @@ -10,12 +10,14 @@ package salesreason import adventureworks.customtypes.Defaulted import adventureworks.customtypes.TypoLocalDateTime import adventureworks.public.Name +import cats.instances.list.catsStdInstancesForList import doobie.free.connection.ConnectionIO import doobie.postgres.syntax.FragmentOps import doobie.syntax.SqlInterpolator.SingleFragment.fromWrite import doobie.syntax.string.toSqlInterpolator import doobie.util.Write import doobie.util.fragment.Fragment +import doobie.util.update.Update import fs2.Stream import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder @@ -121,4 +123,33 @@ class SalesreasonRepoImpl extends SalesreasonRepo { returning "salesreasonid", "name", "reasontype", "modifieddate"::text """.query(using SalesreasonRow.read).unique } + override def upsertBatch(unsaved: List[SalesreasonRow]): Stream[ConnectionIO, SalesreasonRow] = { + Update[SalesreasonRow]( + s"""insert into sales.salesreason("salesreasonid", "name", "reasontype", "modifieddate") + values (?::int4,?::varchar,?::varchar,?::timestamp) + on conflict ("salesreasonid") + do update set + "name" = EXCLUDED."name", + "reasontype" = EXCLUDED."reasontype", + "modifieddate" = EXCLUDED."modifieddate" + returning "salesreasonid", "name", "reasontype", "modifieddate"::text""" + )(using SalesreasonRow.write) + .updateManyWithGeneratedKeys[SalesreasonRow]("salesreasonid", "name", "reasontype", "modifieddate")(unsaved)(using catsStdInstancesForList, SalesreasonRow.read) + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, SalesreasonRow], batchSize: Int = 10000): ConnectionIO[Int] = { + for { + _ <- sql"create temporary table salesreason_TEMP (like sales.salesreason) on commit drop".update.run + _ <- new FragmentOps(sql"""copy salesreason_TEMP("salesreasonid", "name", "reasontype", "modifieddate") from stdin""").copyIn(unsaved, batchSize)(using SalesreasonRow.text) + res <- sql"""insert into sales.salesreason("salesreasonid", "name", "reasontype", "modifieddate") + select * from salesreason_TEMP + on conflict ("salesreasonid") + do update set + "name" = EXCLUDED."name", + "reasontype" = EXCLUDED."reasontype", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table salesreason_TEMP;""".update.run + } yield res + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salesreason/SalesreasonRepoMock.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salesreason/SalesreasonRepoMock.scala index a06455483..484040395 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salesreason/SalesreasonRepoMock.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salesreason/SalesreasonRepoMock.scala @@ -105,4 +105,23 @@ class SalesreasonRepoMock(toRow: Function1[SalesreasonRowUnsaved, SalesreasonRow unsaved } } + override def upsertBatch(unsaved: List[SalesreasonRow]): Stream[ConnectionIO, SalesreasonRow] = { + Stream.emits { + unsaved.map { row => + map += (row.salesreasonid -> row) + row + } + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, SalesreasonRow], batchSize: Int = 10000): ConnectionIO[Int] = { + unsaved.compile.toList.map { rows => + var num = 0 + rows.foreach { row => + map += (row.salesreasonid -> row) + num += 1 + } + num + } + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salesreason/SalesreasonRow.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salesreason/SalesreasonRow.scala index 2dfbce4a6..17ffe51cf 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salesreason/SalesreasonRow.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salesreason/SalesreasonRow.scala @@ -13,6 +13,7 @@ import adventureworks.public.Name import doobie.enumerated.Nullability import doobie.postgres.Text import doobie.util.Read +import doobie.util.Write import io.circe.Decoder import io.circe.Encoder import java.sql.ResultSet @@ -62,4 +63,23 @@ object SalesreasonRow { sb.append(Text.DELIMETER) TypoLocalDateTime.text.unsafeEncode(row.modifieddate, sb) } + implicit lazy val write: Write[SalesreasonRow] = new Write[SalesreasonRow]( + puts = List((SalesreasonId.put, Nullability.NoNulls), + (Name.put, Nullability.NoNulls), + (Name.put, Nullability.NoNulls), + (TypoLocalDateTime.put, Nullability.NoNulls)), + toList = x => List(x.salesreasonid, x.name, x.reasontype, x.modifieddate), + unsafeSet = (rs, i, a) => { + SalesreasonId.put.unsafeSetNonNullable(rs, i + 0, a.salesreasonid) + Name.put.unsafeSetNonNullable(rs, i + 1, a.name) + Name.put.unsafeSetNonNullable(rs, i + 2, a.reasontype) + TypoLocalDateTime.put.unsafeSetNonNullable(rs, i + 3, a.modifieddate) + }, + unsafeUpdate = (ps, i, a) => { + SalesreasonId.put.unsafeUpdateNonNullable(ps, i + 0, a.salesreasonid) + Name.put.unsafeUpdateNonNullable(ps, i + 1, a.name) + Name.put.unsafeUpdateNonNullable(ps, i + 2, a.reasontype) + TypoLocalDateTime.put.unsafeUpdateNonNullable(ps, i + 3, a.modifieddate) + } + ) } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salestaxrate/SalestaxrateRepo.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salestaxrate/SalestaxrateRepo.scala index 52e375556..2c9b1795b 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salestaxrate/SalestaxrateRepo.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salestaxrate/SalestaxrateRepo.scala @@ -30,4 +30,7 @@ trait SalestaxrateRepo { def update: UpdateBuilder[SalestaxrateFields, SalestaxrateRow] def update(row: SalestaxrateRow): ConnectionIO[Boolean] def upsert(unsaved: SalestaxrateRow): ConnectionIO[SalestaxrateRow] + def upsertBatch(unsaved: List[SalestaxrateRow]): Stream[ConnectionIO, SalestaxrateRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Stream[ConnectionIO, SalestaxrateRow], batchSize: Int = 10000): ConnectionIO[Int] } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salestaxrate/SalestaxrateRepoImpl.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salestaxrate/SalestaxrateRepoImpl.scala index cf414d28a..740ed59d2 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salestaxrate/SalestaxrateRepoImpl.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salestaxrate/SalestaxrateRepoImpl.scala @@ -13,6 +13,7 @@ import adventureworks.customtypes.TypoShort import adventureworks.customtypes.TypoUUID import adventureworks.person.stateprovince.StateprovinceId import adventureworks.public.Name +import cats.instances.list.catsStdInstancesForList import doobie.free.connection.ConnectionIO import doobie.postgres.syntax.FragmentOps import doobie.syntax.SqlInterpolator.SingleFragment.fromWrite @@ -20,6 +21,7 @@ import doobie.syntax.string.toSqlInterpolator import doobie.util.Write import doobie.util.fragment.Fragment import doobie.util.meta.Meta +import doobie.util.update.Update import fs2.Stream import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder @@ -143,4 +145,39 @@ class SalestaxrateRepoImpl extends SalestaxrateRepo { returning "salestaxrateid", "stateprovinceid", "taxtype", "taxrate", "name", "rowguid", "modifieddate"::text """.query(using SalestaxrateRow.read).unique } + override def upsertBatch(unsaved: List[SalestaxrateRow]): Stream[ConnectionIO, SalestaxrateRow] = { + Update[SalestaxrateRow]( + s"""insert into sales.salestaxrate("salestaxrateid", "stateprovinceid", "taxtype", "taxrate", "name", "rowguid", "modifieddate") + values (?::int4,?::int4,?::int2,?::numeric,?::varchar,?::uuid,?::timestamp) + on conflict ("salestaxrateid") + do update set + "stateprovinceid" = EXCLUDED."stateprovinceid", + "taxtype" = EXCLUDED."taxtype", + "taxrate" = EXCLUDED."taxrate", + "name" = EXCLUDED."name", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + returning "salestaxrateid", "stateprovinceid", "taxtype", "taxrate", "name", "rowguid", "modifieddate"::text""" + )(using SalestaxrateRow.write) + .updateManyWithGeneratedKeys[SalestaxrateRow]("salestaxrateid", "stateprovinceid", "taxtype", "taxrate", "name", "rowguid", "modifieddate")(unsaved)(using catsStdInstancesForList, SalestaxrateRow.read) + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, SalestaxrateRow], batchSize: Int = 10000): ConnectionIO[Int] = { + for { + _ <- sql"create temporary table salestaxrate_TEMP (like sales.salestaxrate) on commit drop".update.run + _ <- new FragmentOps(sql"""copy salestaxrate_TEMP("salestaxrateid", "stateprovinceid", "taxtype", "taxrate", "name", "rowguid", "modifieddate") from stdin""").copyIn(unsaved, batchSize)(using SalestaxrateRow.text) + res <- sql"""insert into sales.salestaxrate("salestaxrateid", "stateprovinceid", "taxtype", "taxrate", "name", "rowguid", "modifieddate") + select * from salestaxrate_TEMP + on conflict ("salestaxrateid") + do update set + "stateprovinceid" = EXCLUDED."stateprovinceid", + "taxtype" = EXCLUDED."taxtype", + "taxrate" = EXCLUDED."taxrate", + "name" = EXCLUDED."name", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table salestaxrate_TEMP;""".update.run + } yield res + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salestaxrate/SalestaxrateRepoMock.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salestaxrate/SalestaxrateRepoMock.scala index ab6ff6bfd..2afea1308 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salestaxrate/SalestaxrateRepoMock.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salestaxrate/SalestaxrateRepoMock.scala @@ -105,4 +105,23 @@ class SalestaxrateRepoMock(toRow: Function1[SalestaxrateRowUnsaved, Salestaxrate unsaved } } + override def upsertBatch(unsaved: List[SalestaxrateRow]): Stream[ConnectionIO, SalestaxrateRow] = { + Stream.emits { + unsaved.map { row => + map += (row.salestaxrateid -> row) + row + } + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, SalestaxrateRow], batchSize: Int = 10000): ConnectionIO[Int] = { + unsaved.compile.toList.map { rows => + var num = 0 + rows.foreach { row => + map += (row.salestaxrateid -> row) + num += 1 + } + num + } + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salestaxrate/SalestaxrateRow.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salestaxrate/SalestaxrateRow.scala index 4a97ac9ac..d771751a4 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salestaxrate/SalestaxrateRow.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salestaxrate/SalestaxrateRow.scala @@ -16,6 +16,7 @@ import adventureworks.public.Name import doobie.enumerated.Nullability import doobie.postgres.Text import doobie.util.Read +import doobie.util.Write import doobie.util.meta.Meta import io.circe.Decoder import io.circe.Encoder @@ -87,4 +88,32 @@ object SalestaxrateRow { sb.append(Text.DELIMETER) TypoLocalDateTime.text.unsafeEncode(row.modifieddate, sb) } + implicit lazy val write: Write[SalestaxrateRow] = new Write[SalestaxrateRow]( + puts = List((SalestaxrateId.put, Nullability.NoNulls), + (StateprovinceId.put, Nullability.NoNulls), + (TypoShort.put, Nullability.NoNulls), + (Meta.ScalaBigDecimalMeta.put, Nullability.NoNulls), + (Name.put, Nullability.NoNulls), + (TypoUUID.put, Nullability.NoNulls), + (TypoLocalDateTime.put, Nullability.NoNulls)), + toList = x => List(x.salestaxrateid, x.stateprovinceid, x.taxtype, x.taxrate, x.name, x.rowguid, x.modifieddate), + unsafeSet = (rs, i, a) => { + SalestaxrateId.put.unsafeSetNonNullable(rs, i + 0, a.salestaxrateid) + StateprovinceId.put.unsafeSetNonNullable(rs, i + 1, a.stateprovinceid) + TypoShort.put.unsafeSetNonNullable(rs, i + 2, a.taxtype) + Meta.ScalaBigDecimalMeta.put.unsafeSetNonNullable(rs, i + 3, a.taxrate) + Name.put.unsafeSetNonNullable(rs, i + 4, a.name) + TypoUUID.put.unsafeSetNonNullable(rs, i + 5, a.rowguid) + TypoLocalDateTime.put.unsafeSetNonNullable(rs, i + 6, a.modifieddate) + }, + unsafeUpdate = (ps, i, a) => { + SalestaxrateId.put.unsafeUpdateNonNullable(ps, i + 0, a.salestaxrateid) + StateprovinceId.put.unsafeUpdateNonNullable(ps, i + 1, a.stateprovinceid) + TypoShort.put.unsafeUpdateNonNullable(ps, i + 2, a.taxtype) + Meta.ScalaBigDecimalMeta.put.unsafeUpdateNonNullable(ps, i + 3, a.taxrate) + Name.put.unsafeUpdateNonNullable(ps, i + 4, a.name) + TypoUUID.put.unsafeUpdateNonNullable(ps, i + 5, a.rowguid) + TypoLocalDateTime.put.unsafeUpdateNonNullable(ps, i + 6, a.modifieddate) + } + ) } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salesterritory/SalesterritoryRepo.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salesterritory/SalesterritoryRepo.scala index 4d6976a24..ca487e230 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salesterritory/SalesterritoryRepo.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salesterritory/SalesterritoryRepo.scala @@ -30,4 +30,7 @@ trait SalesterritoryRepo { def update: UpdateBuilder[SalesterritoryFields, SalesterritoryRow] def update(row: SalesterritoryRow): ConnectionIO[Boolean] def upsert(unsaved: SalesterritoryRow): ConnectionIO[SalesterritoryRow] + def upsertBatch(unsaved: List[SalesterritoryRow]): Stream[ConnectionIO, SalesterritoryRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Stream[ConnectionIO, SalesterritoryRow], batchSize: Int = 10000): ConnectionIO[Int] } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salesterritory/SalesterritoryRepoImpl.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salesterritory/SalesterritoryRepoImpl.scala index ca9268c0e..42dfdb685 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salesterritory/SalesterritoryRepoImpl.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salesterritory/SalesterritoryRepoImpl.scala @@ -12,6 +12,7 @@ import adventureworks.customtypes.TypoLocalDateTime import adventureworks.customtypes.TypoUUID import adventureworks.person.countryregion.CountryregionId import adventureworks.public.Name +import cats.instances.list.catsStdInstancesForList import doobie.free.connection.ConnectionIO import doobie.postgres.syntax.FragmentOps import doobie.syntax.SqlInterpolator.SingleFragment.fromWrite @@ -19,6 +20,7 @@ import doobie.syntax.string.toSqlInterpolator import doobie.util.Write import doobie.util.fragment.Fragment import doobie.util.meta.Meta +import doobie.util.update.Update import fs2.Stream import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder @@ -163,4 +165,45 @@ class SalesterritoryRepoImpl extends SalesterritoryRepo { returning "territoryid", "name", "countryregioncode", "group", "salesytd", "saleslastyear", "costytd", "costlastyear", "rowguid", "modifieddate"::text """.query(using SalesterritoryRow.read).unique } + override def upsertBatch(unsaved: List[SalesterritoryRow]): Stream[ConnectionIO, SalesterritoryRow] = { + Update[SalesterritoryRow]( + s"""insert into sales.salesterritory("territoryid", "name", "countryregioncode", "group", "salesytd", "saleslastyear", "costytd", "costlastyear", "rowguid", "modifieddate") + values (?::int4,?::varchar,?,?,?::numeric,?::numeric,?::numeric,?::numeric,?::uuid,?::timestamp) + on conflict ("territoryid") + do update set + "name" = EXCLUDED."name", + "countryregioncode" = EXCLUDED."countryregioncode", + "group" = EXCLUDED."group", + "salesytd" = EXCLUDED."salesytd", + "saleslastyear" = EXCLUDED."saleslastyear", + "costytd" = EXCLUDED."costytd", + "costlastyear" = EXCLUDED."costlastyear", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + returning "territoryid", "name", "countryregioncode", "group", "salesytd", "saleslastyear", "costytd", "costlastyear", "rowguid", "modifieddate"::text""" + )(using SalesterritoryRow.write) + .updateManyWithGeneratedKeys[SalesterritoryRow]("territoryid", "name", "countryregioncode", "group", "salesytd", "saleslastyear", "costytd", "costlastyear", "rowguid", "modifieddate")(unsaved)(using catsStdInstancesForList, SalesterritoryRow.read) + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, SalesterritoryRow], batchSize: Int = 10000): ConnectionIO[Int] = { + for { + _ <- sql"create temporary table salesterritory_TEMP (like sales.salesterritory) on commit drop".update.run + _ <- new FragmentOps(sql"""copy salesterritory_TEMP("territoryid", "name", "countryregioncode", "group", "salesytd", "saleslastyear", "costytd", "costlastyear", "rowguid", "modifieddate") from stdin""").copyIn(unsaved, batchSize)(using SalesterritoryRow.text) + res <- sql"""insert into sales.salesterritory("territoryid", "name", "countryregioncode", "group", "salesytd", "saleslastyear", "costytd", "costlastyear", "rowguid", "modifieddate") + select * from salesterritory_TEMP + on conflict ("territoryid") + do update set + "name" = EXCLUDED."name", + "countryregioncode" = EXCLUDED."countryregioncode", + "group" = EXCLUDED."group", + "salesytd" = EXCLUDED."salesytd", + "saleslastyear" = EXCLUDED."saleslastyear", + "costytd" = EXCLUDED."costytd", + "costlastyear" = EXCLUDED."costlastyear", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table salesterritory_TEMP;""".update.run + } yield res + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salesterritory/SalesterritoryRepoMock.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salesterritory/SalesterritoryRepoMock.scala index f00de79f9..f595a0ea4 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salesterritory/SalesterritoryRepoMock.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salesterritory/SalesterritoryRepoMock.scala @@ -105,4 +105,23 @@ class SalesterritoryRepoMock(toRow: Function1[SalesterritoryRowUnsaved, Salester unsaved } } + override def upsertBatch(unsaved: List[SalesterritoryRow]): Stream[ConnectionIO, SalesterritoryRow] = { + Stream.emits { + unsaved.map { row => + map += (row.territoryid -> row) + row + } + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, SalesterritoryRow], batchSize: Int = 10000): ConnectionIO[Int] = { + unsaved.compile.toList.map { rows => + var num = 0 + rows.foreach { row => + map += (row.territoryid -> row) + num += 1 + } + num + } + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salesterritory/SalesterritoryRow.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salesterritory/SalesterritoryRow.scala index 773d8db4b..7190b9672 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salesterritory/SalesterritoryRow.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salesterritory/SalesterritoryRow.scala @@ -15,6 +15,7 @@ import adventureworks.public.Name import doobie.enumerated.Nullability import doobie.postgres.Text import doobie.util.Read +import doobie.util.Write import doobie.util.meta.Meta import io.circe.Decoder import io.circe.Encoder @@ -110,4 +111,41 @@ object SalesterritoryRow { sb.append(Text.DELIMETER) TypoLocalDateTime.text.unsafeEncode(row.modifieddate, sb) } + implicit lazy val write: Write[SalesterritoryRow] = new Write[SalesterritoryRow]( + puts = List((SalesterritoryId.put, Nullability.NoNulls), + (Name.put, Nullability.NoNulls), + (CountryregionId.put, Nullability.NoNulls), + (Meta.StringMeta.put, Nullability.NoNulls), + (Meta.ScalaBigDecimalMeta.put, Nullability.NoNulls), + (Meta.ScalaBigDecimalMeta.put, Nullability.NoNulls), + (Meta.ScalaBigDecimalMeta.put, Nullability.NoNulls), + (Meta.ScalaBigDecimalMeta.put, Nullability.NoNulls), + (TypoUUID.put, Nullability.NoNulls), + (TypoLocalDateTime.put, Nullability.NoNulls)), + toList = x => List(x.territoryid, x.name, x.countryregioncode, x.group, x.salesytd, x.saleslastyear, x.costytd, x.costlastyear, x.rowguid, x.modifieddate), + unsafeSet = (rs, i, a) => { + SalesterritoryId.put.unsafeSetNonNullable(rs, i + 0, a.territoryid) + Name.put.unsafeSetNonNullable(rs, i + 1, a.name) + CountryregionId.put.unsafeSetNonNullable(rs, i + 2, a.countryregioncode) + Meta.StringMeta.put.unsafeSetNonNullable(rs, i + 3, a.group) + Meta.ScalaBigDecimalMeta.put.unsafeSetNonNullable(rs, i + 4, a.salesytd) + Meta.ScalaBigDecimalMeta.put.unsafeSetNonNullable(rs, i + 5, a.saleslastyear) + Meta.ScalaBigDecimalMeta.put.unsafeSetNonNullable(rs, i + 6, a.costytd) + Meta.ScalaBigDecimalMeta.put.unsafeSetNonNullable(rs, i + 7, a.costlastyear) + TypoUUID.put.unsafeSetNonNullable(rs, i + 8, a.rowguid) + TypoLocalDateTime.put.unsafeSetNonNullable(rs, i + 9, a.modifieddate) + }, + unsafeUpdate = (ps, i, a) => { + SalesterritoryId.put.unsafeUpdateNonNullable(ps, i + 0, a.territoryid) + Name.put.unsafeUpdateNonNullable(ps, i + 1, a.name) + CountryregionId.put.unsafeUpdateNonNullable(ps, i + 2, a.countryregioncode) + Meta.StringMeta.put.unsafeUpdateNonNullable(ps, i + 3, a.group) + Meta.ScalaBigDecimalMeta.put.unsafeUpdateNonNullable(ps, i + 4, a.salesytd) + Meta.ScalaBigDecimalMeta.put.unsafeUpdateNonNullable(ps, i + 5, a.saleslastyear) + Meta.ScalaBigDecimalMeta.put.unsafeUpdateNonNullable(ps, i + 6, a.costytd) + Meta.ScalaBigDecimalMeta.put.unsafeUpdateNonNullable(ps, i + 7, a.costlastyear) + TypoUUID.put.unsafeUpdateNonNullable(ps, i + 8, a.rowguid) + TypoLocalDateTime.put.unsafeUpdateNonNullable(ps, i + 9, a.modifieddate) + } + ) } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salesterritoryhistory/SalesterritoryhistoryRepo.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salesterritoryhistory/SalesterritoryhistoryRepo.scala index 5d987ea57..cd3bc9f0a 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salesterritoryhistory/SalesterritoryhistoryRepo.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salesterritoryhistory/SalesterritoryhistoryRepo.scala @@ -30,4 +30,7 @@ trait SalesterritoryhistoryRepo { def update: UpdateBuilder[SalesterritoryhistoryFields, SalesterritoryhistoryRow] def update(row: SalesterritoryhistoryRow): ConnectionIO[Boolean] def upsert(unsaved: SalesterritoryhistoryRow): ConnectionIO[SalesterritoryhistoryRow] + def upsertBatch(unsaved: List[SalesterritoryhistoryRow]): Stream[ConnectionIO, SalesterritoryhistoryRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Stream[ConnectionIO, SalesterritoryhistoryRow], batchSize: Int = 10000): ConnectionIO[Int] } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salesterritoryhistory/SalesterritoryhistoryRepoImpl.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salesterritoryhistory/SalesterritoryhistoryRepoImpl.scala index acfda5788..86656429c 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salesterritoryhistory/SalesterritoryhistoryRepoImpl.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salesterritoryhistory/SalesterritoryhistoryRepoImpl.scala @@ -12,12 +12,14 @@ import adventureworks.customtypes.TypoLocalDateTime import adventureworks.customtypes.TypoUUID import adventureworks.person.businessentity.BusinessentityId import adventureworks.sales.salesterritory.SalesterritoryId +import cats.instances.list.catsStdInstancesForList import doobie.free.connection.ConnectionIO import doobie.postgres.syntax.FragmentOps import doobie.syntax.SqlInterpolator.SingleFragment.fromWrite import doobie.syntax.string.toSqlInterpolator import doobie.util.Write import doobie.util.fragment.Fragment +import doobie.util.update.Update import fs2.Stream import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder @@ -143,4 +145,33 @@ class SalesterritoryhistoryRepoImpl extends SalesterritoryhistoryRepo { returning "businessentityid", "territoryid", "startdate"::text, "enddate"::text, "rowguid", "modifieddate"::text """.query(using SalesterritoryhistoryRow.read).unique } + override def upsertBatch(unsaved: List[SalesterritoryhistoryRow]): Stream[ConnectionIO, SalesterritoryhistoryRow] = { + Update[SalesterritoryhistoryRow]( + s"""insert into sales.salesterritoryhistory("businessentityid", "territoryid", "startdate", "enddate", "rowguid", "modifieddate") + values (?::int4,?::int4,?::timestamp,?::timestamp,?::uuid,?::timestamp) + on conflict ("businessentityid", "startdate", "territoryid") + do update set + "enddate" = EXCLUDED."enddate", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + returning "businessentityid", "territoryid", "startdate"::text, "enddate"::text, "rowguid", "modifieddate"::text""" + )(using SalesterritoryhistoryRow.write) + .updateManyWithGeneratedKeys[SalesterritoryhistoryRow]("businessentityid", "territoryid", "startdate", "enddate", "rowguid", "modifieddate")(unsaved)(using catsStdInstancesForList, SalesterritoryhistoryRow.read) + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, SalesterritoryhistoryRow], batchSize: Int = 10000): ConnectionIO[Int] = { + for { + _ <- sql"create temporary table salesterritoryhistory_TEMP (like sales.salesterritoryhistory) on commit drop".update.run + _ <- new FragmentOps(sql"""copy salesterritoryhistory_TEMP("businessentityid", "territoryid", "startdate", "enddate", "rowguid", "modifieddate") from stdin""").copyIn(unsaved, batchSize)(using SalesterritoryhistoryRow.text) + res <- sql"""insert into sales.salesterritoryhistory("businessentityid", "territoryid", "startdate", "enddate", "rowguid", "modifieddate") + select * from salesterritoryhistory_TEMP + on conflict ("businessentityid", "startdate", "territoryid") + do update set + "enddate" = EXCLUDED."enddate", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table salesterritoryhistory_TEMP;""".update.run + } yield res + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salesterritoryhistory/SalesterritoryhistoryRepoMock.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salesterritoryhistory/SalesterritoryhistoryRepoMock.scala index 6d53b74a0..3ec28f0d8 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salesterritoryhistory/SalesterritoryhistoryRepoMock.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salesterritoryhistory/SalesterritoryhistoryRepoMock.scala @@ -105,4 +105,23 @@ class SalesterritoryhistoryRepoMock(toRow: Function1[SalesterritoryhistoryRowUns unsaved } } + override def upsertBatch(unsaved: List[SalesterritoryhistoryRow]): Stream[ConnectionIO, SalesterritoryhistoryRow] = { + Stream.emits { + unsaved.map { row => + map += (row.compositeId -> row) + row + } + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, SalesterritoryhistoryRow], batchSize: Int = 10000): ConnectionIO[Int] = { + unsaved.compile.toList.map { rows => + var num = 0 + rows.foreach { row => + map += (row.compositeId -> row) + num += 1 + } + num + } + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salesterritoryhistory/SalesterritoryhistoryRow.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salesterritoryhistory/SalesterritoryhistoryRow.scala index d6a7f7041..8c55fcb97 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salesterritoryhistory/SalesterritoryhistoryRow.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salesterritoryhistory/SalesterritoryhistoryRow.scala @@ -15,6 +15,7 @@ import adventureworks.sales.salesterritory.SalesterritoryId import doobie.enumerated.Nullability import doobie.postgres.Text import doobie.util.Read +import doobie.util.Write import io.circe.Decoder import io.circe.Encoder import java.sql.ResultSet @@ -82,4 +83,29 @@ object SalesterritoryhistoryRow { sb.append(Text.DELIMETER) TypoLocalDateTime.text.unsafeEncode(row.modifieddate, sb) } + implicit lazy val write: Write[SalesterritoryhistoryRow] = new Write[SalesterritoryhistoryRow]( + puts = List((BusinessentityId.put, Nullability.NoNulls), + (SalesterritoryId.put, Nullability.NoNulls), + (TypoLocalDateTime.put, Nullability.NoNulls), + (TypoLocalDateTime.put, Nullability.Nullable), + (TypoUUID.put, Nullability.NoNulls), + (TypoLocalDateTime.put, Nullability.NoNulls)), + toList = x => List(x.businessentityid, x.territoryid, x.startdate, x.enddate, x.rowguid, x.modifieddate), + unsafeSet = (rs, i, a) => { + BusinessentityId.put.unsafeSetNonNullable(rs, i + 0, a.businessentityid) + SalesterritoryId.put.unsafeSetNonNullable(rs, i + 1, a.territoryid) + TypoLocalDateTime.put.unsafeSetNonNullable(rs, i + 2, a.startdate) + TypoLocalDateTime.put.unsafeSetNullable(rs, i + 3, a.enddate) + TypoUUID.put.unsafeSetNonNullable(rs, i + 4, a.rowguid) + TypoLocalDateTime.put.unsafeSetNonNullable(rs, i + 5, a.modifieddate) + }, + unsafeUpdate = (ps, i, a) => { + BusinessentityId.put.unsafeUpdateNonNullable(ps, i + 0, a.businessentityid) + SalesterritoryId.put.unsafeUpdateNonNullable(ps, i + 1, a.territoryid) + TypoLocalDateTime.put.unsafeUpdateNonNullable(ps, i + 2, a.startdate) + TypoLocalDateTime.put.unsafeUpdateNullable(ps, i + 3, a.enddate) + TypoUUID.put.unsafeUpdateNonNullable(ps, i + 4, a.rowguid) + TypoLocalDateTime.put.unsafeUpdateNonNullable(ps, i + 5, a.modifieddate) + } + ) } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/shoppingcartitem/ShoppingcartitemRepo.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/shoppingcartitem/ShoppingcartitemRepo.scala index baeb0de87..7da8e1b14 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/shoppingcartitem/ShoppingcartitemRepo.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/shoppingcartitem/ShoppingcartitemRepo.scala @@ -30,4 +30,7 @@ trait ShoppingcartitemRepo { def update: UpdateBuilder[ShoppingcartitemFields, ShoppingcartitemRow] def update(row: ShoppingcartitemRow): ConnectionIO[Boolean] def upsert(unsaved: ShoppingcartitemRow): ConnectionIO[ShoppingcartitemRow] + def upsertBatch(unsaved: List[ShoppingcartitemRow]): Stream[ConnectionIO, ShoppingcartitemRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Stream[ConnectionIO, ShoppingcartitemRow], batchSize: Int = 10000): ConnectionIO[Int] } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/shoppingcartitem/ShoppingcartitemRepoImpl.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/shoppingcartitem/ShoppingcartitemRepoImpl.scala index a97979b8b..9de3c57e7 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/shoppingcartitem/ShoppingcartitemRepoImpl.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/shoppingcartitem/ShoppingcartitemRepoImpl.scala @@ -10,6 +10,7 @@ package shoppingcartitem import adventureworks.customtypes.Defaulted import adventureworks.customtypes.TypoLocalDateTime import adventureworks.production.product.ProductId +import cats.instances.list.catsStdInstancesForList import doobie.free.connection.ConnectionIO import doobie.postgres.syntax.FragmentOps import doobie.syntax.SqlInterpolator.SingleFragment.fromWrite @@ -17,6 +18,7 @@ import doobie.syntax.string.toSqlInterpolator import doobie.util.Write import doobie.util.fragment.Fragment import doobie.util.meta.Meta +import doobie.util.update.Update import fs2.Stream import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder @@ -136,4 +138,37 @@ class ShoppingcartitemRepoImpl extends ShoppingcartitemRepo { returning "shoppingcartitemid", "shoppingcartid", "quantity", "productid", "datecreated"::text, "modifieddate"::text """.query(using ShoppingcartitemRow.read).unique } + override def upsertBatch(unsaved: List[ShoppingcartitemRow]): Stream[ConnectionIO, ShoppingcartitemRow] = { + Update[ShoppingcartitemRow]( + s"""insert into sales.shoppingcartitem("shoppingcartitemid", "shoppingcartid", "quantity", "productid", "datecreated", "modifieddate") + values (?::int4,?,?::int4,?::int4,?::timestamp,?::timestamp) + on conflict ("shoppingcartitemid") + do update set + "shoppingcartid" = EXCLUDED."shoppingcartid", + "quantity" = EXCLUDED."quantity", + "productid" = EXCLUDED."productid", + "datecreated" = EXCLUDED."datecreated", + "modifieddate" = EXCLUDED."modifieddate" + returning "shoppingcartitemid", "shoppingcartid", "quantity", "productid", "datecreated"::text, "modifieddate"::text""" + )(using ShoppingcartitemRow.write) + .updateManyWithGeneratedKeys[ShoppingcartitemRow]("shoppingcartitemid", "shoppingcartid", "quantity", "productid", "datecreated", "modifieddate")(unsaved)(using catsStdInstancesForList, ShoppingcartitemRow.read) + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, ShoppingcartitemRow], batchSize: Int = 10000): ConnectionIO[Int] = { + for { + _ <- sql"create temporary table shoppingcartitem_TEMP (like sales.shoppingcartitem) on commit drop".update.run + _ <- new FragmentOps(sql"""copy shoppingcartitem_TEMP("shoppingcartitemid", "shoppingcartid", "quantity", "productid", "datecreated", "modifieddate") from stdin""").copyIn(unsaved, batchSize)(using ShoppingcartitemRow.text) + res <- sql"""insert into sales.shoppingcartitem("shoppingcartitemid", "shoppingcartid", "quantity", "productid", "datecreated", "modifieddate") + select * from shoppingcartitem_TEMP + on conflict ("shoppingcartitemid") + do update set + "shoppingcartid" = EXCLUDED."shoppingcartid", + "quantity" = EXCLUDED."quantity", + "productid" = EXCLUDED."productid", + "datecreated" = EXCLUDED."datecreated", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table shoppingcartitem_TEMP;""".update.run + } yield res + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/shoppingcartitem/ShoppingcartitemRepoMock.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/shoppingcartitem/ShoppingcartitemRepoMock.scala index 836e56927..d3baca7a1 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/shoppingcartitem/ShoppingcartitemRepoMock.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/shoppingcartitem/ShoppingcartitemRepoMock.scala @@ -105,4 +105,23 @@ class ShoppingcartitemRepoMock(toRow: Function1[ShoppingcartitemRowUnsaved, Shop unsaved } } + override def upsertBatch(unsaved: List[ShoppingcartitemRow]): Stream[ConnectionIO, ShoppingcartitemRow] = { + Stream.emits { + unsaved.map { row => + map += (row.shoppingcartitemid -> row) + row + } + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, ShoppingcartitemRow], batchSize: Int = 10000): ConnectionIO[Int] = { + unsaved.compile.toList.map { rows => + var num = 0 + rows.foreach { row => + map += (row.shoppingcartitemid -> row) + num += 1 + } + num + } + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/shoppingcartitem/ShoppingcartitemRow.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/shoppingcartitem/ShoppingcartitemRow.scala index babc2abdb..e069c025a 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/shoppingcartitem/ShoppingcartitemRow.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/shoppingcartitem/ShoppingcartitemRow.scala @@ -13,6 +13,7 @@ import adventureworks.production.product.ProductId import doobie.enumerated.Nullability import doobie.postgres.Text import doobie.util.Read +import doobie.util.Write import doobie.util.meta.Meta import io.circe.Decoder import io.circe.Encoder @@ -79,4 +80,29 @@ object ShoppingcartitemRow { sb.append(Text.DELIMETER) TypoLocalDateTime.text.unsafeEncode(row.modifieddate, sb) } + implicit lazy val write: Write[ShoppingcartitemRow] = new Write[ShoppingcartitemRow]( + puts = List((ShoppingcartitemId.put, Nullability.NoNulls), + (Meta.StringMeta.put, Nullability.NoNulls), + (Meta.IntMeta.put, Nullability.NoNulls), + (ProductId.put, Nullability.NoNulls), + (TypoLocalDateTime.put, Nullability.NoNulls), + (TypoLocalDateTime.put, Nullability.NoNulls)), + toList = x => List(x.shoppingcartitemid, x.shoppingcartid, x.quantity, x.productid, x.datecreated, x.modifieddate), + unsafeSet = (rs, i, a) => { + ShoppingcartitemId.put.unsafeSetNonNullable(rs, i + 0, a.shoppingcartitemid) + Meta.StringMeta.put.unsafeSetNonNullable(rs, i + 1, a.shoppingcartid) + Meta.IntMeta.put.unsafeSetNonNullable(rs, i + 2, a.quantity) + ProductId.put.unsafeSetNonNullable(rs, i + 3, a.productid) + TypoLocalDateTime.put.unsafeSetNonNullable(rs, i + 4, a.datecreated) + TypoLocalDateTime.put.unsafeSetNonNullable(rs, i + 5, a.modifieddate) + }, + unsafeUpdate = (ps, i, a) => { + ShoppingcartitemId.put.unsafeUpdateNonNullable(ps, i + 0, a.shoppingcartitemid) + Meta.StringMeta.put.unsafeUpdateNonNullable(ps, i + 1, a.shoppingcartid) + Meta.IntMeta.put.unsafeUpdateNonNullable(ps, i + 2, a.quantity) + ProductId.put.unsafeUpdateNonNullable(ps, i + 3, a.productid) + TypoLocalDateTime.put.unsafeUpdateNonNullable(ps, i + 4, a.datecreated) + TypoLocalDateTime.put.unsafeUpdateNonNullable(ps, i + 5, a.modifieddate) + } + ) } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/specialoffer/SpecialofferRepo.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/specialoffer/SpecialofferRepo.scala index b82db537e..40135e686 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/specialoffer/SpecialofferRepo.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/specialoffer/SpecialofferRepo.scala @@ -30,4 +30,7 @@ trait SpecialofferRepo { def update: UpdateBuilder[SpecialofferFields, SpecialofferRow] def update(row: SpecialofferRow): ConnectionIO[Boolean] def upsert(unsaved: SpecialofferRow): ConnectionIO[SpecialofferRow] + def upsertBatch(unsaved: List[SpecialofferRow]): Stream[ConnectionIO, SpecialofferRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Stream[ConnectionIO, SpecialofferRow], batchSize: Int = 10000): ConnectionIO[Int] } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/specialoffer/SpecialofferRepoImpl.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/specialoffer/SpecialofferRepoImpl.scala index 581558fc8..d05cbbd80 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/specialoffer/SpecialofferRepoImpl.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/specialoffer/SpecialofferRepoImpl.scala @@ -10,6 +10,7 @@ package specialoffer import adventureworks.customtypes.Defaulted import adventureworks.customtypes.TypoLocalDateTime import adventureworks.customtypes.TypoUUID +import cats.instances.list.catsStdInstancesForList import doobie.free.connection.ConnectionIO import doobie.postgres.syntax.FragmentOps import doobie.syntax.SqlInterpolator.SingleFragment.fromWrite @@ -17,6 +18,7 @@ import doobie.syntax.string.toSqlInterpolator import doobie.util.Write import doobie.util.fragment.Fragment import doobie.util.meta.Meta +import doobie.util.update.Update import fs2.Stream import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder @@ -159,4 +161,47 @@ class SpecialofferRepoImpl extends SpecialofferRepo { returning "specialofferid", "description", "discountpct", "type", "category", "startdate"::text, "enddate"::text, "minqty", "maxqty", "rowguid", "modifieddate"::text """.query(using SpecialofferRow.read).unique } + override def upsertBatch(unsaved: List[SpecialofferRow]): Stream[ConnectionIO, SpecialofferRow] = { + Update[SpecialofferRow]( + s"""insert into sales.specialoffer("specialofferid", "description", "discountpct", "type", "category", "startdate", "enddate", "minqty", "maxqty", "rowguid", "modifieddate") + values (?::int4,?,?::numeric,?,?,?::timestamp,?::timestamp,?::int4,?::int4,?::uuid,?::timestamp) + on conflict ("specialofferid") + do update set + "description" = EXCLUDED."description", + "discountpct" = EXCLUDED."discountpct", + "type" = EXCLUDED."type", + "category" = EXCLUDED."category", + "startdate" = EXCLUDED."startdate", + "enddate" = EXCLUDED."enddate", + "minqty" = EXCLUDED."minqty", + "maxqty" = EXCLUDED."maxqty", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + returning "specialofferid", "description", "discountpct", "type", "category", "startdate"::text, "enddate"::text, "minqty", "maxqty", "rowguid", "modifieddate"::text""" + )(using SpecialofferRow.write) + .updateManyWithGeneratedKeys[SpecialofferRow]("specialofferid", "description", "discountpct", "type", "category", "startdate", "enddate", "minqty", "maxqty", "rowguid", "modifieddate")(unsaved)(using catsStdInstancesForList, SpecialofferRow.read) + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, SpecialofferRow], batchSize: Int = 10000): ConnectionIO[Int] = { + for { + _ <- sql"create temporary table specialoffer_TEMP (like sales.specialoffer) on commit drop".update.run + _ <- new FragmentOps(sql"""copy specialoffer_TEMP("specialofferid", "description", "discountpct", "type", "category", "startdate", "enddate", "minqty", "maxqty", "rowguid", "modifieddate") from stdin""").copyIn(unsaved, batchSize)(using SpecialofferRow.text) + res <- sql"""insert into sales.specialoffer("specialofferid", "description", "discountpct", "type", "category", "startdate", "enddate", "minqty", "maxqty", "rowguid", "modifieddate") + select * from specialoffer_TEMP + on conflict ("specialofferid") + do update set + "description" = EXCLUDED."description", + "discountpct" = EXCLUDED."discountpct", + "type" = EXCLUDED."type", + "category" = EXCLUDED."category", + "startdate" = EXCLUDED."startdate", + "enddate" = EXCLUDED."enddate", + "minqty" = EXCLUDED."minqty", + "maxqty" = EXCLUDED."maxqty", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table specialoffer_TEMP;""".update.run + } yield res + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/specialoffer/SpecialofferRepoMock.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/specialoffer/SpecialofferRepoMock.scala index 94c945431..67b03eaeb 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/specialoffer/SpecialofferRepoMock.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/specialoffer/SpecialofferRepoMock.scala @@ -105,4 +105,23 @@ class SpecialofferRepoMock(toRow: Function1[SpecialofferRowUnsaved, Specialoffer unsaved } } + override def upsertBatch(unsaved: List[SpecialofferRow]): Stream[ConnectionIO, SpecialofferRow] = { + Stream.emits { + unsaved.map { row => + map += (row.specialofferid -> row) + row + } + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, SpecialofferRow], batchSize: Int = 10000): ConnectionIO[Int] = { + unsaved.compile.toList.map { rows => + var num = 0 + rows.foreach { row => + map += (row.specialofferid -> row) + num += 1 + } + num + } + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/specialoffer/SpecialofferRow.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/specialoffer/SpecialofferRow.scala index 9a07cb576..11ce559d4 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/specialoffer/SpecialofferRow.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/specialoffer/SpecialofferRow.scala @@ -13,6 +13,7 @@ import adventureworks.customtypes.TypoUUID import doobie.enumerated.Nullability import doobie.postgres.Text import doobie.util.Read +import doobie.util.Write import doobie.util.meta.Meta import io.circe.Decoder import io.circe.Encoder @@ -112,4 +113,44 @@ object SpecialofferRow { sb.append(Text.DELIMETER) TypoLocalDateTime.text.unsafeEncode(row.modifieddate, sb) } + implicit lazy val write: Write[SpecialofferRow] = new Write[SpecialofferRow]( + puts = List((SpecialofferId.put, Nullability.NoNulls), + (Meta.StringMeta.put, Nullability.NoNulls), + (Meta.ScalaBigDecimalMeta.put, Nullability.NoNulls), + (Meta.StringMeta.put, Nullability.NoNulls), + (Meta.StringMeta.put, Nullability.NoNulls), + (TypoLocalDateTime.put, Nullability.NoNulls), + (TypoLocalDateTime.put, Nullability.NoNulls), + (Meta.IntMeta.put, Nullability.NoNulls), + (Meta.IntMeta.put, Nullability.Nullable), + (TypoUUID.put, Nullability.NoNulls), + (TypoLocalDateTime.put, Nullability.NoNulls)), + toList = x => List(x.specialofferid, x.description, x.discountpct, x.`type`, x.category, x.startdate, x.enddate, x.minqty, x.maxqty, x.rowguid, x.modifieddate), + unsafeSet = (rs, i, a) => { + SpecialofferId.put.unsafeSetNonNullable(rs, i + 0, a.specialofferid) + Meta.StringMeta.put.unsafeSetNonNullable(rs, i + 1, a.description) + Meta.ScalaBigDecimalMeta.put.unsafeSetNonNullable(rs, i + 2, a.discountpct) + Meta.StringMeta.put.unsafeSetNonNullable(rs, i + 3, a.`type`) + Meta.StringMeta.put.unsafeSetNonNullable(rs, i + 4, a.category) + TypoLocalDateTime.put.unsafeSetNonNullable(rs, i + 5, a.startdate) + TypoLocalDateTime.put.unsafeSetNonNullable(rs, i + 6, a.enddate) + Meta.IntMeta.put.unsafeSetNonNullable(rs, i + 7, a.minqty) + Meta.IntMeta.put.unsafeSetNullable(rs, i + 8, a.maxqty) + TypoUUID.put.unsafeSetNonNullable(rs, i + 9, a.rowguid) + TypoLocalDateTime.put.unsafeSetNonNullable(rs, i + 10, a.modifieddate) + }, + unsafeUpdate = (ps, i, a) => { + SpecialofferId.put.unsafeUpdateNonNullable(ps, i + 0, a.specialofferid) + Meta.StringMeta.put.unsafeUpdateNonNullable(ps, i + 1, a.description) + Meta.ScalaBigDecimalMeta.put.unsafeUpdateNonNullable(ps, i + 2, a.discountpct) + Meta.StringMeta.put.unsafeUpdateNonNullable(ps, i + 3, a.`type`) + Meta.StringMeta.put.unsafeUpdateNonNullable(ps, i + 4, a.category) + TypoLocalDateTime.put.unsafeUpdateNonNullable(ps, i + 5, a.startdate) + TypoLocalDateTime.put.unsafeUpdateNonNullable(ps, i + 6, a.enddate) + Meta.IntMeta.put.unsafeUpdateNonNullable(ps, i + 7, a.minqty) + Meta.IntMeta.put.unsafeUpdateNullable(ps, i + 8, a.maxqty) + TypoUUID.put.unsafeUpdateNonNullable(ps, i + 9, a.rowguid) + TypoLocalDateTime.put.unsafeUpdateNonNullable(ps, i + 10, a.modifieddate) + } + ) } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/specialofferproduct/SpecialofferproductRepo.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/specialofferproduct/SpecialofferproductRepo.scala index ee69f3980..6d6773e0a 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/specialofferproduct/SpecialofferproductRepo.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/specialofferproduct/SpecialofferproductRepo.scala @@ -30,4 +30,7 @@ trait SpecialofferproductRepo { def update: UpdateBuilder[SpecialofferproductFields, SpecialofferproductRow] def update(row: SpecialofferproductRow): ConnectionIO[Boolean] def upsert(unsaved: SpecialofferproductRow): ConnectionIO[SpecialofferproductRow] + def upsertBatch(unsaved: List[SpecialofferproductRow]): Stream[ConnectionIO, SpecialofferproductRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Stream[ConnectionIO, SpecialofferproductRow], batchSize: Int = 10000): ConnectionIO[Int] } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/specialofferproduct/SpecialofferproductRepoImpl.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/specialofferproduct/SpecialofferproductRepoImpl.scala index d2f8afaca..4282ff5b4 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/specialofferproduct/SpecialofferproductRepoImpl.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/specialofferproduct/SpecialofferproductRepoImpl.scala @@ -12,12 +12,14 @@ import adventureworks.customtypes.TypoLocalDateTime import adventureworks.customtypes.TypoUUID import adventureworks.production.product.ProductId import adventureworks.sales.specialoffer.SpecialofferId +import cats.instances.list.catsStdInstancesForList import doobie.free.connection.ConnectionIO import doobie.postgres.syntax.FragmentOps import doobie.syntax.SqlInterpolator.SingleFragment.fromWrite import doobie.syntax.string.toSqlInterpolator import doobie.util.Write import doobie.util.fragment.Fragment +import doobie.util.update.Update import fs2.Stream import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder @@ -135,4 +137,31 @@ class SpecialofferproductRepoImpl extends SpecialofferproductRepo { returning "specialofferid", "productid", "rowguid", "modifieddate"::text """.query(using SpecialofferproductRow.read).unique } + override def upsertBatch(unsaved: List[SpecialofferproductRow]): Stream[ConnectionIO, SpecialofferproductRow] = { + Update[SpecialofferproductRow]( + s"""insert into sales.specialofferproduct("specialofferid", "productid", "rowguid", "modifieddate") + values (?::int4,?::int4,?::uuid,?::timestamp) + on conflict ("specialofferid", "productid") + do update set + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + returning "specialofferid", "productid", "rowguid", "modifieddate"::text""" + )(using SpecialofferproductRow.write) + .updateManyWithGeneratedKeys[SpecialofferproductRow]("specialofferid", "productid", "rowguid", "modifieddate")(unsaved)(using catsStdInstancesForList, SpecialofferproductRow.read) + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, SpecialofferproductRow], batchSize: Int = 10000): ConnectionIO[Int] = { + for { + _ <- sql"create temporary table specialofferproduct_TEMP (like sales.specialofferproduct) on commit drop".update.run + _ <- new FragmentOps(sql"""copy specialofferproduct_TEMP("specialofferid", "productid", "rowguid", "modifieddate") from stdin""").copyIn(unsaved, batchSize)(using SpecialofferproductRow.text) + res <- sql"""insert into sales.specialofferproduct("specialofferid", "productid", "rowguid", "modifieddate") + select * from specialofferproduct_TEMP + on conflict ("specialofferid", "productid") + do update set + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table specialofferproduct_TEMP;""".update.run + } yield res + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/specialofferproduct/SpecialofferproductRepoMock.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/specialofferproduct/SpecialofferproductRepoMock.scala index 80f3069eb..2f672a6b1 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/specialofferproduct/SpecialofferproductRepoMock.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/specialofferproduct/SpecialofferproductRepoMock.scala @@ -105,4 +105,23 @@ class SpecialofferproductRepoMock(toRow: Function1[SpecialofferproductRowUnsaved unsaved } } + override def upsertBatch(unsaved: List[SpecialofferproductRow]): Stream[ConnectionIO, SpecialofferproductRow] = { + Stream.emits { + unsaved.map { row => + map += (row.compositeId -> row) + row + } + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, SpecialofferproductRow], batchSize: Int = 10000): ConnectionIO[Int] = { + unsaved.compile.toList.map { rows => + var num = 0 + rows.foreach { row => + map += (row.compositeId -> row) + num += 1 + } + num + } + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/specialofferproduct/SpecialofferproductRow.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/specialofferproduct/SpecialofferproductRow.scala index e4a8cdebe..8e85aacfc 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/specialofferproduct/SpecialofferproductRow.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/specialofferproduct/SpecialofferproductRow.scala @@ -15,6 +15,7 @@ import adventureworks.sales.specialoffer.SpecialofferId import doobie.enumerated.Nullability import doobie.postgres.Text import doobie.util.Read +import doobie.util.Write import io.circe.Decoder import io.circe.Encoder import java.sql.ResultSet @@ -68,4 +69,23 @@ object SpecialofferproductRow { sb.append(Text.DELIMETER) TypoLocalDateTime.text.unsafeEncode(row.modifieddate, sb) } + implicit lazy val write: Write[SpecialofferproductRow] = new Write[SpecialofferproductRow]( + puts = List((SpecialofferId.put, Nullability.NoNulls), + (ProductId.put, Nullability.NoNulls), + (TypoUUID.put, Nullability.NoNulls), + (TypoLocalDateTime.put, Nullability.NoNulls)), + toList = x => List(x.specialofferid, x.productid, x.rowguid, x.modifieddate), + unsafeSet = (rs, i, a) => { + SpecialofferId.put.unsafeSetNonNullable(rs, i + 0, a.specialofferid) + ProductId.put.unsafeSetNonNullable(rs, i + 1, a.productid) + TypoUUID.put.unsafeSetNonNullable(rs, i + 2, a.rowguid) + TypoLocalDateTime.put.unsafeSetNonNullable(rs, i + 3, a.modifieddate) + }, + unsafeUpdate = (ps, i, a) => { + SpecialofferId.put.unsafeUpdateNonNullable(ps, i + 0, a.specialofferid) + ProductId.put.unsafeUpdateNonNullable(ps, i + 1, a.productid) + TypoUUID.put.unsafeUpdateNonNullable(ps, i + 2, a.rowguid) + TypoLocalDateTime.put.unsafeUpdateNonNullable(ps, i + 3, a.modifieddate) + } + ) } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/store/StoreRepo.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/store/StoreRepo.scala index 756b16f64..0a877126a 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/store/StoreRepo.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/store/StoreRepo.scala @@ -31,4 +31,7 @@ trait StoreRepo { def update: UpdateBuilder[StoreFields, StoreRow] def update(row: StoreRow): ConnectionIO[Boolean] def upsert(unsaved: StoreRow): ConnectionIO[StoreRow] + def upsertBatch(unsaved: List[StoreRow]): Stream[ConnectionIO, StoreRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Stream[ConnectionIO, StoreRow], batchSize: Int = 10000): ConnectionIO[Int] } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/store/StoreRepoImpl.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/store/StoreRepoImpl.scala index 6f22544a2..10e3e43b4 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/store/StoreRepoImpl.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/store/StoreRepoImpl.scala @@ -13,12 +13,14 @@ import adventureworks.customtypes.TypoUUID import adventureworks.customtypes.TypoXml import adventureworks.person.businessentity.BusinessentityId import adventureworks.public.Name +import cats.instances.list.catsStdInstancesForList import doobie.free.connection.ConnectionIO import doobie.postgres.syntax.FragmentOps import doobie.syntax.SqlInterpolator.SingleFragment.fromWrite import doobie.syntax.string.toSqlInterpolator import doobie.util.Write import doobie.util.fragment.Fragment +import doobie.util.update.Update import fs2.Stream import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder @@ -132,4 +134,37 @@ class StoreRepoImpl extends StoreRepo { returning "businessentityid", "name", "salespersonid", "demographics", "rowguid", "modifieddate"::text """.query(using StoreRow.read).unique } + override def upsertBatch(unsaved: List[StoreRow]): Stream[ConnectionIO, StoreRow] = { + Update[StoreRow]( + s"""insert into sales.store("businessentityid", "name", "salespersonid", "demographics", "rowguid", "modifieddate") + values (?::int4,?::varchar,?::int4,?::xml,?::uuid,?::timestamp) + on conflict ("businessentityid") + do update set + "name" = EXCLUDED."name", + "salespersonid" = EXCLUDED."salespersonid", + "demographics" = EXCLUDED."demographics", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + returning "businessentityid", "name", "salespersonid", "demographics", "rowguid", "modifieddate"::text""" + )(using StoreRow.write) + .updateManyWithGeneratedKeys[StoreRow]("businessentityid", "name", "salespersonid", "demographics", "rowguid", "modifieddate")(unsaved)(using catsStdInstancesForList, StoreRow.read) + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, StoreRow], batchSize: Int = 10000): ConnectionIO[Int] = { + for { + _ <- sql"create temporary table store_TEMP (like sales.store) on commit drop".update.run + _ <- new FragmentOps(sql"""copy store_TEMP("businessentityid", "name", "salespersonid", "demographics", "rowguid", "modifieddate") from stdin""").copyIn(unsaved, batchSize)(using StoreRow.text) + res <- sql"""insert into sales.store("businessentityid", "name", "salespersonid", "demographics", "rowguid", "modifieddate") + select * from store_TEMP + on conflict ("businessentityid") + do update set + "name" = EXCLUDED."name", + "salespersonid" = EXCLUDED."salespersonid", + "demographics" = EXCLUDED."demographics", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table store_TEMP;""".update.run + } yield res + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/store/StoreRepoMock.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/store/StoreRepoMock.scala index 4ab86e04d..49aed4f1f 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/store/StoreRepoMock.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/store/StoreRepoMock.scala @@ -106,4 +106,23 @@ class StoreRepoMock(toRow: Function1[StoreRowUnsaved, StoreRow], unsaved } } + override def upsertBatch(unsaved: List[StoreRow]): Stream[ConnectionIO, StoreRow] = { + Stream.emits { + unsaved.map { row => + map += (row.businessentityid -> row) + row + } + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, StoreRow], batchSize: Int = 10000): ConnectionIO[Int] = { + unsaved.compile.toList.map { rows => + var num = 0 + rows.foreach { row => + map += (row.businessentityid -> row) + num += 1 + } + num + } + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/store/StoreRow.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/store/StoreRow.scala index 394479923..bd34ceac9 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/store/StoreRow.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/store/StoreRow.scala @@ -16,6 +16,7 @@ import adventureworks.public.Name import doobie.enumerated.Nullability import doobie.postgres.Text import doobie.util.Read +import doobie.util.Write import io.circe.Decoder import io.circe.Encoder import java.sql.ResultSet @@ -78,4 +79,29 @@ object StoreRow { sb.append(Text.DELIMETER) TypoLocalDateTime.text.unsafeEncode(row.modifieddate, sb) } + implicit lazy val write: Write[StoreRow] = new Write[StoreRow]( + puts = List((BusinessentityId.put, Nullability.NoNulls), + (Name.put, Nullability.NoNulls), + (BusinessentityId.put, Nullability.Nullable), + (TypoXml.put, Nullability.Nullable), + (TypoUUID.put, Nullability.NoNulls), + (TypoLocalDateTime.put, Nullability.NoNulls)), + toList = x => List(x.businessentityid, x.name, x.salespersonid, x.demographics, x.rowguid, x.modifieddate), + unsafeSet = (rs, i, a) => { + BusinessentityId.put.unsafeSetNonNullable(rs, i + 0, a.businessentityid) + Name.put.unsafeSetNonNullable(rs, i + 1, a.name) + BusinessentityId.put.unsafeSetNullable(rs, i + 2, a.salespersonid) + TypoXml.put.unsafeSetNullable(rs, i + 3, a.demographics) + TypoUUID.put.unsafeSetNonNullable(rs, i + 4, a.rowguid) + TypoLocalDateTime.put.unsafeSetNonNullable(rs, i + 5, a.modifieddate) + }, + unsafeUpdate = (ps, i, a) => { + BusinessentityId.put.unsafeUpdateNonNullable(ps, i + 0, a.businessentityid) + Name.put.unsafeUpdateNonNullable(ps, i + 1, a.name) + BusinessentityId.put.unsafeUpdateNullable(ps, i + 2, a.salespersonid) + TypoXml.put.unsafeUpdateNullable(ps, i + 3, a.demographics) + TypoUUID.put.unsafeUpdateNonNullable(ps, i + 4, a.rowguid) + TypoLocalDateTime.put.unsafeUpdateNonNullable(ps, i + 5, a.modifieddate) + } + ) } diff --git a/typo-tester-doobie/src/scala/adventureworks/production/product/RepoTest.scala b/typo-tester-doobie/src/scala/adventureworks/production/product/RepoTest.scala new file mode 100644 index 000000000..cfb570dfe --- /dev/null +++ b/typo-tester-doobie/src/scala/adventureworks/production/product/RepoTest.scala @@ -0,0 +1,45 @@ +package adventureworks.production.product + +import adventureworks.customtypes.* +import adventureworks.production.unitmeasure.* +import adventureworks.public.Name +import adventureworks.{SnapshotTest, withConnection} +import org.scalatest.Assertion +import doobie.free.connection.delay + +class RepoTest extends SnapshotTest { + def upsertStreaming(unitmeasureRepo: UnitmeasureRepo): Assertion = + withConnection { + val um1 = UnitmeasureRow(unitmeasurecode = UnitmeasureId("kg1"), name = Name("name1"), TypoLocalDateTime.now) + val um2 = UnitmeasureRow(unitmeasurecode = UnitmeasureId("kg2"), name = Name("name2"), TypoLocalDateTime.now) + for { + _ <- unitmeasureRepo.upsertStreaming(fs2.Stream(um1, um2)) + _ <- unitmeasureRepo.selectAll.compile.toList.map(all => assert(List(um1, um2) == all.sortBy(_.name))) + um1a = um1.copy(name = Name("name1a")) + um2a = um2.copy(name = Name("name2a")) + _ <- unitmeasureRepo.upsertStreaming(fs2.Stream(um1a, um2a)) + all <- unitmeasureRepo.selectAll.compile.toList + } yield assert(List(um1a, um2a) == all.sortBy(_.name)) + } + + def upsertBatch(unitmeasureRepo: UnitmeasureRepo): Assertion = + withConnection { + val um1 = UnitmeasureRow(unitmeasurecode = UnitmeasureId("kg1"), name = Name("name1"), TypoLocalDateTime.now) + val um2 = UnitmeasureRow(unitmeasurecode = UnitmeasureId("kg2"), name = Name("name2"), TypoLocalDateTime.now) + for { + initial <- unitmeasureRepo.upsertBatch(List(um1, um2)).compile.toList + _ <- delay(assert(List(um1, um2) == initial.sortBy(_.name))) + um1a = um1.copy(name = Name("name1a")) + um2a = um2.copy(name = Name("name2a")) + returned <- unitmeasureRepo.upsertBatch(List(um1a, um2a)).compile.toList + _ <- delay(assert(List(um1a, um2a) == returned.sortBy(_.name))) + all <- unitmeasureRepo.selectAll.compile.toList + } yield assert(List(um1a, um2a) == all.sortBy(_.name)) + } + + test("upsertStreaming in-memory")(upsertStreaming(new UnitmeasureRepoMock(_.toRow(TypoLocalDateTime.now)))) + test("upsertStreaming pg")(upsertStreaming(new UnitmeasureRepoImpl)) + + test("upsertBatch in-memory")(upsertBatch(new UnitmeasureRepoMock(_.toRow(TypoLocalDateTime.now)))) + test("upsertBatch pg")(upsertBatch(new UnitmeasureRepoImpl)) +} diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/humanresources/department/DepartmentRepo.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/humanresources/department/DepartmentRepo.scala index df273e514..a4ecb69a8 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/humanresources/department/DepartmentRepo.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/humanresources/department/DepartmentRepo.scala @@ -32,4 +32,7 @@ trait DepartmentRepo { def update: UpdateBuilder[DepartmentFields, DepartmentRow] def update(row: DepartmentRow): ZIO[ZConnection, Throwable, Boolean] def upsert(unsaved: DepartmentRow): ZIO[ZConnection, Throwable, UpdateResult[DepartmentRow]] + // Not implementable for zio-jdbc: upsertBatch + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, DepartmentRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/humanresources/department/DepartmentRepoImpl.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/humanresources/department/DepartmentRepoImpl.scala index 58d12700a..e675c78c1 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/humanresources/department/DepartmentRepoImpl.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/humanresources/department/DepartmentRepoImpl.scala @@ -115,4 +115,19 @@ class DepartmentRepoImpl extends DepartmentRepo { "modifieddate" = EXCLUDED."modifieddate" returning "departmentid", "name", "groupname", "modifieddate"::text""".insertReturning(using DepartmentRow.jdbcDecoder) } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, DepartmentRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + val created = sql"create temporary table department_TEMP (like humanresources.department) on commit drop".execute + val copied = streamingInsert(s"""copy department_TEMP("departmentid", "name", "groupname", "modifieddate") from stdin""", batchSize, unsaved)(DepartmentRow.text) + val merged = sql"""insert into humanresources.department("departmentid", "name", "groupname", "modifieddate") + select * from department_TEMP + on conflict ("departmentid") + do update set + "name" = EXCLUDED."name", + "groupname" = EXCLUDED."groupname", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table department_TEMP;""".update + created *> copied *> merged + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/humanresources/department/DepartmentRepoMock.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/humanresources/department/DepartmentRepoMock.scala index 6cae26593..023e5870d 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/humanresources/department/DepartmentRepoMock.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/humanresources/department/DepartmentRepoMock.scala @@ -104,4 +104,13 @@ class DepartmentRepoMock(toRow: Function1[DepartmentRowUnsaved, DepartmentRow], UpdateResult(1, Chunk.single(unsaved)) } } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, DepartmentRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + unsaved.scanZIO(0L) { case (acc, row) => + ZIO.succeed { + map += (row.departmentid -> row) + acc + 1 + } + }.runLast.map(_.getOrElse(0L)) + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/humanresources/employee/EmployeeRepo.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/humanresources/employee/EmployeeRepo.scala index 10c985d53..d835749ec 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/humanresources/employee/EmployeeRepo.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/humanresources/employee/EmployeeRepo.scala @@ -33,4 +33,7 @@ trait EmployeeRepo { def update: UpdateBuilder[EmployeeFields, EmployeeRow] def update(row: EmployeeRow): ZIO[ZConnection, Throwable, Boolean] def upsert(unsaved: EmployeeRow): ZIO[ZConnection, Throwable, UpdateResult[EmployeeRow]] + // Not implementable for zio-jdbc: upsertBatch + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, EmployeeRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/humanresources/employee/EmployeeRepoImpl.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/humanresources/employee/EmployeeRepoImpl.scala index c89d7b34f..cf8250fbd 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/humanresources/employee/EmployeeRepoImpl.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/humanresources/employee/EmployeeRepoImpl.scala @@ -179,4 +179,30 @@ class EmployeeRepoImpl extends EmployeeRepo { "organizationnode" = EXCLUDED."organizationnode" returning "businessentityid", "nationalidnumber", "loginid", "jobtitle", "birthdate"::text, "maritalstatus", "gender", "hiredate"::text, "salariedflag", "vacationhours", "sickleavehours", "currentflag", "rowguid", "modifieddate"::text, "organizationnode"""".insertReturning(using EmployeeRow.jdbcDecoder) } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, EmployeeRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + val created = sql"create temporary table employee_TEMP (like humanresources.employee) on commit drop".execute + val copied = streamingInsert(s"""copy employee_TEMP("businessentityid", "nationalidnumber", "loginid", "jobtitle", "birthdate", "maritalstatus", "gender", "hiredate", "salariedflag", "vacationhours", "sickleavehours", "currentflag", "rowguid", "modifieddate", "organizationnode") from stdin""", batchSize, unsaved)(EmployeeRow.text) + val merged = sql"""insert into humanresources.employee("businessentityid", "nationalidnumber", "loginid", "jobtitle", "birthdate", "maritalstatus", "gender", "hiredate", "salariedflag", "vacationhours", "sickleavehours", "currentflag", "rowguid", "modifieddate", "organizationnode") + select * from employee_TEMP + on conflict ("businessentityid") + do update set + "nationalidnumber" = EXCLUDED."nationalidnumber", + "loginid" = EXCLUDED."loginid", + "jobtitle" = EXCLUDED."jobtitle", + "birthdate" = EXCLUDED."birthdate", + "maritalstatus" = EXCLUDED."maritalstatus", + "gender" = EXCLUDED."gender", + "hiredate" = EXCLUDED."hiredate", + "salariedflag" = EXCLUDED."salariedflag", + "vacationhours" = EXCLUDED."vacationhours", + "sickleavehours" = EXCLUDED."sickleavehours", + "currentflag" = EXCLUDED."currentflag", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate", + "organizationnode" = EXCLUDED."organizationnode" + ; + drop table employee_TEMP;""".update + created *> copied *> merged + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/humanresources/employee/EmployeeRepoMock.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/humanresources/employee/EmployeeRepoMock.scala index 2ba6e9ec1..b03bd1df6 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/humanresources/employee/EmployeeRepoMock.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/humanresources/employee/EmployeeRepoMock.scala @@ -105,4 +105,13 @@ class EmployeeRepoMock(toRow: Function1[EmployeeRowUnsaved, EmployeeRow], UpdateResult(1, Chunk.single(unsaved)) } } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, EmployeeRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + unsaved.scanZIO(0L) { case (acc, row) => + ZIO.succeed { + map += (row.businessentityid -> row) + acc + 1 + } + }.runLast.map(_.getOrElse(0L)) + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/humanresources/employeedepartmenthistory/EmployeedepartmenthistoryRepo.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/humanresources/employeedepartmenthistory/EmployeedepartmenthistoryRepo.scala index 11c6bd2c2..21ebdef46 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/humanresources/employeedepartmenthistory/EmployeedepartmenthistoryRepo.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/humanresources/employeedepartmenthistory/EmployeedepartmenthistoryRepo.scala @@ -32,4 +32,7 @@ trait EmployeedepartmenthistoryRepo { def update: UpdateBuilder[EmployeedepartmenthistoryFields, EmployeedepartmenthistoryRow] def update(row: EmployeedepartmenthistoryRow): ZIO[ZConnection, Throwable, Boolean] def upsert(unsaved: EmployeedepartmenthistoryRow): ZIO[ZConnection, Throwable, UpdateResult[EmployeedepartmenthistoryRow]] + // Not implementable for zio-jdbc: upsertBatch + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, EmployeedepartmenthistoryRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/humanresources/employeedepartmenthistory/EmployeedepartmenthistoryRepoImpl.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/humanresources/employeedepartmenthistory/EmployeedepartmenthistoryRepoImpl.scala index 1d659a7a7..306315621 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/humanresources/employeedepartmenthistory/EmployeedepartmenthistoryRepoImpl.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/humanresources/employeedepartmenthistory/EmployeedepartmenthistoryRepoImpl.scala @@ -136,4 +136,18 @@ class EmployeedepartmenthistoryRepoImpl extends EmployeedepartmenthistoryRepo { "modifieddate" = EXCLUDED."modifieddate" returning "businessentityid", "departmentid", "shiftid", "startdate"::text, "enddate"::text, "modifieddate"::text""".insertReturning(using EmployeedepartmenthistoryRow.jdbcDecoder) } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, EmployeedepartmenthistoryRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + val created = sql"create temporary table employeedepartmenthistory_TEMP (like humanresources.employeedepartmenthistory) on commit drop".execute + val copied = streamingInsert(s"""copy employeedepartmenthistory_TEMP("businessentityid", "departmentid", "shiftid", "startdate", "enddate", "modifieddate") from stdin""", batchSize, unsaved)(EmployeedepartmenthistoryRow.text) + val merged = sql"""insert into humanresources.employeedepartmenthistory("businessentityid", "departmentid", "shiftid", "startdate", "enddate", "modifieddate") + select * from employeedepartmenthistory_TEMP + on conflict ("businessentityid", "startdate", "departmentid", "shiftid") + do update set + "enddate" = EXCLUDED."enddate", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table employeedepartmenthistory_TEMP;""".update + created *> copied *> merged + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/humanresources/employeedepartmenthistory/EmployeedepartmenthistoryRepoMock.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/humanresources/employeedepartmenthistory/EmployeedepartmenthistoryRepoMock.scala index 22d457372..8c55f8423 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/humanresources/employeedepartmenthistory/EmployeedepartmenthistoryRepoMock.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/humanresources/employeedepartmenthistory/EmployeedepartmenthistoryRepoMock.scala @@ -104,4 +104,13 @@ class EmployeedepartmenthistoryRepoMock(toRow: Function1[Employeedepartmenthisto UpdateResult(1, Chunk.single(unsaved)) } } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, EmployeedepartmenthistoryRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + unsaved.scanZIO(0L) { case (acc, row) => + ZIO.succeed { + map += (row.compositeId -> row) + acc + 1 + } + }.runLast.map(_.getOrElse(0L)) + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/humanresources/employeepayhistory/EmployeepayhistoryRepo.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/humanresources/employeepayhistory/EmployeepayhistoryRepo.scala index a2ebfdd44..35ed6d550 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/humanresources/employeepayhistory/EmployeepayhistoryRepo.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/humanresources/employeepayhistory/EmployeepayhistoryRepo.scala @@ -32,4 +32,7 @@ trait EmployeepayhistoryRepo { def update: UpdateBuilder[EmployeepayhistoryFields, EmployeepayhistoryRow] def update(row: EmployeepayhistoryRow): ZIO[ZConnection, Throwable, Boolean] def upsert(unsaved: EmployeepayhistoryRow): ZIO[ZConnection, Throwable, UpdateResult[EmployeepayhistoryRow]] + // Not implementable for zio-jdbc: upsertBatch + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, EmployeepayhistoryRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/humanresources/employeepayhistory/EmployeepayhistoryRepoImpl.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/humanresources/employeepayhistory/EmployeepayhistoryRepoImpl.scala index aa095755e..fc44c01f7 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/humanresources/employeepayhistory/EmployeepayhistoryRepoImpl.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/humanresources/employeepayhistory/EmployeepayhistoryRepoImpl.scala @@ -130,4 +130,19 @@ class EmployeepayhistoryRepoImpl extends EmployeepayhistoryRepo { "modifieddate" = EXCLUDED."modifieddate" returning "businessentityid", "ratechangedate"::text, "rate", "payfrequency", "modifieddate"::text""".insertReturning(using EmployeepayhistoryRow.jdbcDecoder) } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, EmployeepayhistoryRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + val created = sql"create temporary table employeepayhistory_TEMP (like humanresources.employeepayhistory) on commit drop".execute + val copied = streamingInsert(s"""copy employeepayhistory_TEMP("businessentityid", "ratechangedate", "rate", "payfrequency", "modifieddate") from stdin""", batchSize, unsaved)(EmployeepayhistoryRow.text) + val merged = sql"""insert into humanresources.employeepayhistory("businessentityid", "ratechangedate", "rate", "payfrequency", "modifieddate") + select * from employeepayhistory_TEMP + on conflict ("businessentityid", "ratechangedate") + do update set + "rate" = EXCLUDED."rate", + "payfrequency" = EXCLUDED."payfrequency", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table employeepayhistory_TEMP;""".update + created *> copied *> merged + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/humanresources/employeepayhistory/EmployeepayhistoryRepoMock.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/humanresources/employeepayhistory/EmployeepayhistoryRepoMock.scala index beb692a78..b11f1600c 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/humanresources/employeepayhistory/EmployeepayhistoryRepoMock.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/humanresources/employeepayhistory/EmployeepayhistoryRepoMock.scala @@ -104,4 +104,13 @@ class EmployeepayhistoryRepoMock(toRow: Function1[EmployeepayhistoryRowUnsaved, UpdateResult(1, Chunk.single(unsaved)) } } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, EmployeepayhistoryRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + unsaved.scanZIO(0L) { case (acc, row) => + ZIO.succeed { + map += (row.compositeId -> row) + acc + 1 + } + }.runLast.map(_.getOrElse(0L)) + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/humanresources/jobcandidate/JobcandidateRepo.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/humanresources/jobcandidate/JobcandidateRepo.scala index d5e2d8f49..3100526f1 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/humanresources/jobcandidate/JobcandidateRepo.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/humanresources/jobcandidate/JobcandidateRepo.scala @@ -32,4 +32,7 @@ trait JobcandidateRepo { def update: UpdateBuilder[JobcandidateFields, JobcandidateRow] def update(row: JobcandidateRow): ZIO[ZConnection, Throwable, Boolean] def upsert(unsaved: JobcandidateRow): ZIO[ZConnection, Throwable, UpdateResult[JobcandidateRow]] + // Not implementable for zio-jdbc: upsertBatch + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, JobcandidateRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/humanresources/jobcandidate/JobcandidateRepoImpl.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/humanresources/jobcandidate/JobcandidateRepoImpl.scala index df014bfa1..9b13ee47f 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/humanresources/jobcandidate/JobcandidateRepoImpl.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/humanresources/jobcandidate/JobcandidateRepoImpl.scala @@ -117,4 +117,19 @@ class JobcandidateRepoImpl extends JobcandidateRepo { "modifieddate" = EXCLUDED."modifieddate" returning "jobcandidateid", "businessentityid", "resume", "modifieddate"::text""".insertReturning(using JobcandidateRow.jdbcDecoder) } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, JobcandidateRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + val created = sql"create temporary table jobcandidate_TEMP (like humanresources.jobcandidate) on commit drop".execute + val copied = streamingInsert(s"""copy jobcandidate_TEMP("jobcandidateid", "businessentityid", "resume", "modifieddate") from stdin""", batchSize, unsaved)(JobcandidateRow.text) + val merged = sql"""insert into humanresources.jobcandidate("jobcandidateid", "businessentityid", "resume", "modifieddate") + select * from jobcandidate_TEMP + on conflict ("jobcandidateid") + do update set + "businessentityid" = EXCLUDED."businessentityid", + "resume" = EXCLUDED."resume", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table jobcandidate_TEMP;""".update + created *> copied *> merged + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/humanresources/jobcandidate/JobcandidateRepoMock.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/humanresources/jobcandidate/JobcandidateRepoMock.scala index a4bd10df6..ace294072 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/humanresources/jobcandidate/JobcandidateRepoMock.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/humanresources/jobcandidate/JobcandidateRepoMock.scala @@ -104,4 +104,13 @@ class JobcandidateRepoMock(toRow: Function1[JobcandidateRowUnsaved, Jobcandidate UpdateResult(1, Chunk.single(unsaved)) } } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, JobcandidateRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + unsaved.scanZIO(0L) { case (acc, row) => + ZIO.succeed { + map += (row.jobcandidateid -> row) + acc + 1 + } + }.runLast.map(_.getOrElse(0L)) + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/humanresources/shift/ShiftRepo.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/humanresources/shift/ShiftRepo.scala index 466ee9766..067e3a94f 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/humanresources/shift/ShiftRepo.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/humanresources/shift/ShiftRepo.scala @@ -32,4 +32,7 @@ trait ShiftRepo { def update: UpdateBuilder[ShiftFields, ShiftRow] def update(row: ShiftRow): ZIO[ZConnection, Throwable, Boolean] def upsert(unsaved: ShiftRow): ZIO[ZConnection, Throwable, UpdateResult[ShiftRow]] + // Not implementable for zio-jdbc: upsertBatch + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, ShiftRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/humanresources/shift/ShiftRepoImpl.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/humanresources/shift/ShiftRepoImpl.scala index b96732646..e94722452 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/humanresources/shift/ShiftRepoImpl.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/humanresources/shift/ShiftRepoImpl.scala @@ -120,4 +120,20 @@ class ShiftRepoImpl extends ShiftRepo { "modifieddate" = EXCLUDED."modifieddate" returning "shiftid", "name", "starttime"::text, "endtime"::text, "modifieddate"::text""".insertReturning(using ShiftRow.jdbcDecoder) } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, ShiftRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + val created = sql"create temporary table shift_TEMP (like humanresources.shift) on commit drop".execute + val copied = streamingInsert(s"""copy shift_TEMP("shiftid", "name", "starttime", "endtime", "modifieddate") from stdin""", batchSize, unsaved)(ShiftRow.text) + val merged = sql"""insert into humanresources.shift("shiftid", "name", "starttime", "endtime", "modifieddate") + select * from shift_TEMP + on conflict ("shiftid") + do update set + "name" = EXCLUDED."name", + "starttime" = EXCLUDED."starttime", + "endtime" = EXCLUDED."endtime", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table shift_TEMP;""".update + created *> copied *> merged + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/humanresources/shift/ShiftRepoMock.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/humanresources/shift/ShiftRepoMock.scala index 254878ca3..2bf4af7fb 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/humanresources/shift/ShiftRepoMock.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/humanresources/shift/ShiftRepoMock.scala @@ -104,4 +104,13 @@ class ShiftRepoMock(toRow: Function1[ShiftRowUnsaved, ShiftRow], UpdateResult(1, Chunk.single(unsaved)) } } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, ShiftRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + unsaved.scanZIO(0L) { case (acc, row) => + ZIO.succeed { + map += (row.shiftid -> row) + acc + 1 + } + }.runLast.map(_.getOrElse(0L)) + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/information_schema/CardinalNumber.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/information_schema/CardinalNumber.scala new file mode 100644 index 000000000..41146ea38 --- /dev/null +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/information_schema/CardinalNumber.scala @@ -0,0 +1,38 @@ +/** + * File has been automatically generated by `typo`. + * + * IF YOU CHANGE THIS FILE YOUR CHANGES WILL BE OVERWRITTEN. + */ +package adventureworks +package information_schema + +import java.sql.Types +import typo.dsl.Bijection +import typo.dsl.PGType +import zio.jdbc.JdbcDecoder +import zio.jdbc.JdbcEncoder +import zio.jdbc.SqlFragment.Setter +import zio.json.JsonDecoder +import zio.json.JsonEncoder + +/** Domain `information_schema.cardinal_number` + * Constraint: CHECK ((VALUE >= 0)) + */ +case class CardinalNumber(value: Int) +object CardinalNumber { + implicit lazy val arrayJdbcDecoder: JdbcDecoder[Array[CardinalNumber]] = adventureworks.IntArrayDecoder.map(_.map(CardinalNumber.apply)) + implicit lazy val arrayJdbcEncoder: JdbcEncoder[Array[CardinalNumber]] = adventureworks.IntArrayEncoder.contramap(_.map(_.value)) + implicit lazy val arraySetter: Setter[Array[CardinalNumber]] = adventureworks.IntArraySetter.contramap(_.map(_.value)) + implicit lazy val bijection: Bijection[CardinalNumber, Int] = Bijection[CardinalNumber, Int](_.value)(CardinalNumber.apply) + implicit lazy val jdbcDecoder: JdbcDecoder[CardinalNumber] = JdbcDecoder.intDecoder.map(CardinalNumber.apply) + implicit lazy val jdbcEncoder: JdbcEncoder[CardinalNumber] = JdbcEncoder.intEncoder.contramap(_.value) + implicit lazy val jsonDecoder: JsonDecoder[CardinalNumber] = JsonDecoder.int.map(CardinalNumber.apply) + implicit lazy val jsonEncoder: JsonEncoder[CardinalNumber] = JsonEncoder.int.contramap(_.value) + implicit lazy val ordering: Ordering[CardinalNumber] = Ordering.by(_.value) + implicit lazy val pgType: PGType[CardinalNumber] = PGType.instance(""""information_schema"."cardinal_number"""", Types.OTHER) + implicit lazy val setter: Setter[CardinalNumber] = Setter.intSetter.contramap(_.value) + implicit lazy val text: Text[CardinalNumber] = new Text[CardinalNumber] { + override def unsafeEncode(v: CardinalNumber, sb: StringBuilder) = Text.intInstance.unsafeEncode(v.value, sb) + override def unsafeArrayEncode(v: CardinalNumber, sb: StringBuilder) = Text.intInstance.unsafeArrayEncode(v.value, sb) + } +} \ No newline at end of file diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/information_schema/CharacterData.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/information_schema/CharacterData.scala new file mode 100644 index 000000000..f067aa4bd --- /dev/null +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/information_schema/CharacterData.scala @@ -0,0 +1,38 @@ +/** + * File has been automatically generated by `typo`. + * + * IF YOU CHANGE THIS FILE YOUR CHANGES WILL BE OVERWRITTEN. + */ +package adventureworks +package information_schema + +import java.sql.Types +import typo.dsl.Bijection +import typo.dsl.PGType +import zio.jdbc.JdbcDecoder +import zio.jdbc.JdbcEncoder +import zio.jdbc.SqlFragment.Setter +import zio.json.JsonDecoder +import zio.json.JsonEncoder + +/** Domain `information_schema.character_data` + * No constraint + */ +case class CharacterData(value: String) +object CharacterData { + implicit lazy val arrayJdbcDecoder: JdbcDecoder[Array[CharacterData]] = adventureworks.StringArrayDecoder.map(_.map(CharacterData.apply)) + implicit lazy val arrayJdbcEncoder: JdbcEncoder[Array[CharacterData]] = adventureworks.StringArrayEncoder.contramap(_.map(_.value)) + implicit lazy val arraySetter: Setter[Array[CharacterData]] = adventureworks.StringArraySetter.contramap(_.map(_.value)) + implicit lazy val bijection: Bijection[CharacterData, String] = Bijection[CharacterData, String](_.value)(CharacterData.apply) + implicit lazy val jdbcDecoder: JdbcDecoder[CharacterData] = JdbcDecoder.stringDecoder.map(CharacterData.apply) + implicit lazy val jdbcEncoder: JdbcEncoder[CharacterData] = JdbcEncoder.stringEncoder.contramap(_.value) + implicit lazy val jsonDecoder: JsonDecoder[CharacterData] = JsonDecoder.string.map(CharacterData.apply) + implicit lazy val jsonEncoder: JsonEncoder[CharacterData] = JsonEncoder.string.contramap(_.value) + implicit lazy val ordering: Ordering[CharacterData] = Ordering.by(_.value) + implicit lazy val pgType: PGType[CharacterData] = PGType.instance(""""information_schema"."character_data"""", Types.OTHER) + implicit lazy val setter: Setter[CharacterData] = Setter.stringSetter.contramap(_.value) + implicit lazy val text: Text[CharacterData] = new Text[CharacterData] { + override def unsafeEncode(v: CharacterData, sb: StringBuilder) = Text.stringInstance.unsafeEncode(v.value, sb) + override def unsafeArrayEncode(v: CharacterData, sb: StringBuilder) = Text.stringInstance.unsafeArrayEncode(v.value, sb) + } +} \ No newline at end of file diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/information_schema/SqlIdentifier.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/information_schema/SqlIdentifier.scala new file mode 100644 index 000000000..652adfa15 --- /dev/null +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/information_schema/SqlIdentifier.scala @@ -0,0 +1,38 @@ +/** + * File has been automatically generated by `typo`. + * + * IF YOU CHANGE THIS FILE YOUR CHANGES WILL BE OVERWRITTEN. + */ +package adventureworks +package information_schema + +import java.sql.Types +import typo.dsl.Bijection +import typo.dsl.PGType +import zio.jdbc.JdbcDecoder +import zio.jdbc.JdbcEncoder +import zio.jdbc.SqlFragment.Setter +import zio.json.JsonDecoder +import zio.json.JsonEncoder + +/** Domain `information_schema.sql_identifier` + * No constraint + */ +case class SqlIdentifier(value: String) +object SqlIdentifier { + implicit lazy val arrayJdbcDecoder: JdbcDecoder[Array[SqlIdentifier]] = adventureworks.StringArrayDecoder.map(_.map(SqlIdentifier.apply)) + implicit lazy val arrayJdbcEncoder: JdbcEncoder[Array[SqlIdentifier]] = adventureworks.StringArrayEncoder.contramap(_.map(_.value)) + implicit lazy val arraySetter: Setter[Array[SqlIdentifier]] = adventureworks.StringArraySetter.contramap(_.map(_.value)) + implicit lazy val bijection: Bijection[SqlIdentifier, String] = Bijection[SqlIdentifier, String](_.value)(SqlIdentifier.apply) + implicit lazy val jdbcDecoder: JdbcDecoder[SqlIdentifier] = JdbcDecoder.stringDecoder.map(SqlIdentifier.apply) + implicit lazy val jdbcEncoder: JdbcEncoder[SqlIdentifier] = JdbcEncoder.stringEncoder.contramap(_.value) + implicit lazy val jsonDecoder: JsonDecoder[SqlIdentifier] = JsonDecoder.string.map(SqlIdentifier.apply) + implicit lazy val jsonEncoder: JsonEncoder[SqlIdentifier] = JsonEncoder.string.contramap(_.value) + implicit lazy val ordering: Ordering[SqlIdentifier] = Ordering.by(_.value) + implicit lazy val pgType: PGType[SqlIdentifier] = PGType.instance(""""information_schema"."sql_identifier"""", Types.OTHER) + implicit lazy val setter: Setter[SqlIdentifier] = Setter.stringSetter.contramap(_.value) + implicit lazy val text: Text[SqlIdentifier] = new Text[SqlIdentifier] { + override def unsafeEncode(v: SqlIdentifier, sb: StringBuilder) = Text.stringInstance.unsafeEncode(v.value, sb) + override def unsafeArrayEncode(v: SqlIdentifier, sb: StringBuilder) = Text.stringInstance.unsafeArrayEncode(v.value, sb) + } +} \ No newline at end of file diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/information_schema/TimeStamp.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/information_schema/TimeStamp.scala new file mode 100644 index 000000000..0a53c4252 --- /dev/null +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/information_schema/TimeStamp.scala @@ -0,0 +1,39 @@ +/** + * File has been automatically generated by `typo`. + * + * IF YOU CHANGE THIS FILE YOUR CHANGES WILL BE OVERWRITTEN. + */ +package adventureworks +package information_schema + +import adventureworks.customtypes.TypoInstant +import java.sql.Types +import typo.dsl.Bijection +import typo.dsl.PGType +import zio.jdbc.JdbcDecoder +import zio.jdbc.JdbcEncoder +import zio.jdbc.SqlFragment.Setter +import zio.json.JsonDecoder +import zio.json.JsonEncoder + +/** Domain `information_schema.time_stamp` + * No constraint + */ +case class TimeStamp(value: TypoInstant) +object TimeStamp { + implicit lazy val arrayJdbcDecoder: JdbcDecoder[Array[TimeStamp]] = JdbcDecoder[Array[TypoInstant]].map(_.map(TimeStamp.apply)) + implicit lazy val arrayJdbcEncoder: JdbcEncoder[Array[TimeStamp]] = JdbcEncoder[Array[TypoInstant]].contramap(_.map(_.value)) + implicit lazy val arraySetter: Setter[Array[TimeStamp]] = TypoInstant.arraySetter.contramap(_.map(_.value)) + implicit lazy val bijection: Bijection[TimeStamp, TypoInstant] = Bijection[TimeStamp, TypoInstant](_.value)(TimeStamp.apply) + implicit lazy val jdbcDecoder: JdbcDecoder[TimeStamp] = TypoInstant.jdbcDecoder.map(TimeStamp.apply) + implicit lazy val jdbcEncoder: JdbcEncoder[TimeStamp] = TypoInstant.jdbcEncoder.contramap(_.value) + implicit lazy val jsonDecoder: JsonDecoder[TimeStamp] = TypoInstant.jsonDecoder.map(TimeStamp.apply) + implicit lazy val jsonEncoder: JsonEncoder[TimeStamp] = TypoInstant.jsonEncoder.contramap(_.value) + implicit def ordering(implicit O0: Ordering[TypoInstant]): Ordering[TimeStamp] = Ordering.by(_.value) + implicit lazy val pgType: PGType[TimeStamp] = PGType.instance(""""information_schema"."time_stamp"""", Types.OTHER) + implicit lazy val setter: Setter[TimeStamp] = TypoInstant.setter.contramap(_.value) + implicit lazy val text: Text[TimeStamp] = new Text[TimeStamp] { + override def unsafeEncode(v: TimeStamp, sb: StringBuilder) = TypoInstant.text.unsafeEncode(v.value, sb) + override def unsafeArrayEncode(v: TimeStamp, sb: StringBuilder) = TypoInstant.text.unsafeArrayEncode(v.value, sb) + } +} \ No newline at end of file diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/information_schema/YesOrNo.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/information_schema/YesOrNo.scala new file mode 100644 index 000000000..97578ef7a --- /dev/null +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/information_schema/YesOrNo.scala @@ -0,0 +1,38 @@ +/** + * File has been automatically generated by `typo`. + * + * IF YOU CHANGE THIS FILE YOUR CHANGES WILL BE OVERWRITTEN. + */ +package adventureworks +package information_schema + +import java.sql.Types +import typo.dsl.Bijection +import typo.dsl.PGType +import zio.jdbc.JdbcDecoder +import zio.jdbc.JdbcEncoder +import zio.jdbc.SqlFragment.Setter +import zio.json.JsonDecoder +import zio.json.JsonEncoder + +/** Domain `information_schema.yes_or_no` + * Constraint: CHECK (((VALUE)::text = ANY ((ARRAY['YES'::character varying, 'NO'::character varying])::text[]))) + */ +case class YesOrNo(value: String) +object YesOrNo { + implicit lazy val arrayJdbcDecoder: JdbcDecoder[Array[YesOrNo]] = adventureworks.StringArrayDecoder.map(_.map(YesOrNo.apply)) + implicit lazy val arrayJdbcEncoder: JdbcEncoder[Array[YesOrNo]] = adventureworks.StringArrayEncoder.contramap(_.map(_.value)) + implicit lazy val arraySetter: Setter[Array[YesOrNo]] = adventureworks.StringArraySetter.contramap(_.map(_.value)) + implicit lazy val bijection: Bijection[YesOrNo, String] = Bijection[YesOrNo, String](_.value)(YesOrNo.apply) + implicit lazy val jdbcDecoder: JdbcDecoder[YesOrNo] = JdbcDecoder.stringDecoder.map(YesOrNo.apply) + implicit lazy val jdbcEncoder: JdbcEncoder[YesOrNo] = JdbcEncoder.stringEncoder.contramap(_.value) + implicit lazy val jsonDecoder: JsonDecoder[YesOrNo] = JsonDecoder.string.map(YesOrNo.apply) + implicit lazy val jsonEncoder: JsonEncoder[YesOrNo] = JsonEncoder.string.contramap(_.value) + implicit lazy val ordering: Ordering[YesOrNo] = Ordering.by(_.value) + implicit lazy val pgType: PGType[YesOrNo] = PGType.instance(""""information_schema"."yes_or_no"""", Types.OTHER) + implicit lazy val setter: Setter[YesOrNo] = Setter.stringSetter.contramap(_.value) + implicit lazy val text: Text[YesOrNo] = new Text[YesOrNo] { + override def unsafeEncode(v: YesOrNo, sb: StringBuilder) = Text.stringInstance.unsafeEncode(v.value, sb) + override def unsafeArrayEncode(v: YesOrNo, sb: StringBuilder) = Text.stringInstance.unsafeArrayEncode(v.value, sb) + } +} \ No newline at end of file diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/address/AddressRepo.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/address/AddressRepo.scala index 21e989706..1a973d39b 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/address/AddressRepo.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/address/AddressRepo.scala @@ -32,4 +32,7 @@ trait AddressRepo { def update: UpdateBuilder[AddressFields, AddressRow] def update(row: AddressRow): ZIO[ZConnection, Throwable, Boolean] def upsert(unsaved: AddressRow): ZIO[ZConnection, Throwable, UpdateResult[AddressRow]] + // Not implementable for zio-jdbc: upsertBatch + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, AddressRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/address/AddressRepoImpl.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/address/AddressRepoImpl.scala index 100b2fa2a..e7db77f62 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/address/AddressRepoImpl.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/address/AddressRepoImpl.scala @@ -141,4 +141,24 @@ class AddressRepoImpl extends AddressRepo { "modifieddate" = EXCLUDED."modifieddate" returning "addressid", "addressline1", "addressline2", "city", "stateprovinceid", "postalcode", "spatiallocation", "rowguid", "modifieddate"::text""".insertReturning(using AddressRow.jdbcDecoder) } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, AddressRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + val created = sql"create temporary table address_TEMP (like person.address) on commit drop".execute + val copied = streamingInsert(s"""copy address_TEMP("addressid", "addressline1", "addressline2", "city", "stateprovinceid", "postalcode", "spatiallocation", "rowguid", "modifieddate") from stdin""", batchSize, unsaved)(AddressRow.text) + val merged = sql"""insert into person.address("addressid", "addressline1", "addressline2", "city", "stateprovinceid", "postalcode", "spatiallocation", "rowguid", "modifieddate") + select * from address_TEMP + on conflict ("addressid") + do update set + "addressline1" = EXCLUDED."addressline1", + "addressline2" = EXCLUDED."addressline2", + "city" = EXCLUDED."city", + "stateprovinceid" = EXCLUDED."stateprovinceid", + "postalcode" = EXCLUDED."postalcode", + "spatiallocation" = EXCLUDED."spatiallocation", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table address_TEMP;""".update + created *> copied *> merged + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/address/AddressRepoMock.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/address/AddressRepoMock.scala index ea74ea3f2..f1306654e 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/address/AddressRepoMock.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/address/AddressRepoMock.scala @@ -104,4 +104,13 @@ class AddressRepoMock(toRow: Function1[AddressRowUnsaved, AddressRow], UpdateResult(1, Chunk.single(unsaved)) } } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, AddressRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + unsaved.scanZIO(0L) { case (acc, row) => + ZIO.succeed { + map += (row.addressid -> row) + acc + 1 + } + }.runLast.map(_.getOrElse(0L)) + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/addresstype/AddresstypeRepo.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/addresstype/AddresstypeRepo.scala index 970a1ee96..a3f10a481 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/addresstype/AddresstypeRepo.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/addresstype/AddresstypeRepo.scala @@ -32,4 +32,7 @@ trait AddresstypeRepo { def update: UpdateBuilder[AddresstypeFields, AddresstypeRow] def update(row: AddresstypeRow): ZIO[ZConnection, Throwable, Boolean] def upsert(unsaved: AddresstypeRow): ZIO[ZConnection, Throwable, UpdateResult[AddresstypeRow]] + // Not implementable for zio-jdbc: upsertBatch + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, AddresstypeRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/addresstype/AddresstypeRepoImpl.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/addresstype/AddresstypeRepoImpl.scala index 6be1ed7a7..bf63e3d7a 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/addresstype/AddresstypeRepoImpl.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/addresstype/AddresstypeRepoImpl.scala @@ -119,4 +119,19 @@ class AddresstypeRepoImpl extends AddresstypeRepo { "modifieddate" = EXCLUDED."modifieddate" returning "addresstypeid", "name", "rowguid", "modifieddate"::text""".insertReturning(using AddresstypeRow.jdbcDecoder) } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, AddresstypeRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + val created = sql"create temporary table addresstype_TEMP (like person.addresstype) on commit drop".execute + val copied = streamingInsert(s"""copy addresstype_TEMP("addresstypeid", "name", "rowguid", "modifieddate") from stdin""", batchSize, unsaved)(AddresstypeRow.text) + val merged = sql"""insert into person.addresstype("addresstypeid", "name", "rowguid", "modifieddate") + select * from addresstype_TEMP + on conflict ("addresstypeid") + do update set + "name" = EXCLUDED."name", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table addresstype_TEMP;""".update + created *> copied *> merged + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/addresstype/AddresstypeRepoMock.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/addresstype/AddresstypeRepoMock.scala index 3d136ee01..2e98a7a64 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/addresstype/AddresstypeRepoMock.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/addresstype/AddresstypeRepoMock.scala @@ -104,4 +104,13 @@ class AddresstypeRepoMock(toRow: Function1[AddresstypeRowUnsaved, AddresstypeRow UpdateResult(1, Chunk.single(unsaved)) } } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, AddresstypeRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + unsaved.scanZIO(0L) { case (acc, row) => + ZIO.succeed { + map += (row.addresstypeid -> row) + acc + 1 + } + }.runLast.map(_.getOrElse(0L)) + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/businessentity/BusinessentityRepo.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/businessentity/BusinessentityRepo.scala index 8d5723e57..39fec6772 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/businessentity/BusinessentityRepo.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/businessentity/BusinessentityRepo.scala @@ -32,4 +32,7 @@ trait BusinessentityRepo { def update: UpdateBuilder[BusinessentityFields, BusinessentityRow] def update(row: BusinessentityRow): ZIO[ZConnection, Throwable, Boolean] def upsert(unsaved: BusinessentityRow): ZIO[ZConnection, Throwable, UpdateResult[BusinessentityRow]] + // Not implementable for zio-jdbc: upsertBatch + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, BusinessentityRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/businessentity/BusinessentityRepoImpl.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/businessentity/BusinessentityRepoImpl.scala index a909211f5..d4dba791a 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/businessentity/BusinessentityRepoImpl.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/businessentity/BusinessentityRepoImpl.scala @@ -114,4 +114,18 @@ class BusinessentityRepoImpl extends BusinessentityRepo { "modifieddate" = EXCLUDED."modifieddate" returning "businessentityid", "rowguid", "modifieddate"::text""".insertReturning(using BusinessentityRow.jdbcDecoder) } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, BusinessentityRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + val created = sql"create temporary table businessentity_TEMP (like person.businessentity) on commit drop".execute + val copied = streamingInsert(s"""copy businessentity_TEMP("businessentityid", "rowguid", "modifieddate") from stdin""", batchSize, unsaved)(BusinessentityRow.text) + val merged = sql"""insert into person.businessentity("businessentityid", "rowguid", "modifieddate") + select * from businessentity_TEMP + on conflict ("businessentityid") + do update set + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table businessentity_TEMP;""".update + created *> copied *> merged + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/businessentity/BusinessentityRepoMock.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/businessentity/BusinessentityRepoMock.scala index a67ee9642..d9d4f70ca 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/businessentity/BusinessentityRepoMock.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/businessentity/BusinessentityRepoMock.scala @@ -104,4 +104,13 @@ class BusinessentityRepoMock(toRow: Function1[BusinessentityRowUnsaved, Business UpdateResult(1, Chunk.single(unsaved)) } } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, BusinessentityRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + unsaved.scanZIO(0L) { case (acc, row) => + ZIO.succeed { + map += (row.businessentityid -> row) + acc + 1 + } + }.runLast.map(_.getOrElse(0L)) + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/businessentityaddress/BusinessentityaddressRepo.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/businessentityaddress/BusinessentityaddressRepo.scala index de1c702ff..60d62dbf1 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/businessentityaddress/BusinessentityaddressRepo.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/businessentityaddress/BusinessentityaddressRepo.scala @@ -32,4 +32,7 @@ trait BusinessentityaddressRepo { def update: UpdateBuilder[BusinessentityaddressFields, BusinessentityaddressRow] def update(row: BusinessentityaddressRow): ZIO[ZConnection, Throwable, Boolean] def upsert(unsaved: BusinessentityaddressRow): ZIO[ZConnection, Throwable, UpdateResult[BusinessentityaddressRow]] + // Not implementable for zio-jdbc: upsertBatch + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, BusinessentityaddressRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/businessentityaddress/BusinessentityaddressRepoImpl.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/businessentityaddress/BusinessentityaddressRepoImpl.scala index 9d54f614f..b4dd567bd 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/businessentityaddress/BusinessentityaddressRepoImpl.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/businessentityaddress/BusinessentityaddressRepoImpl.scala @@ -134,4 +134,18 @@ class BusinessentityaddressRepoImpl extends BusinessentityaddressRepo { "modifieddate" = EXCLUDED."modifieddate" returning "businessentityid", "addressid", "addresstypeid", "rowguid", "modifieddate"::text""".insertReturning(using BusinessentityaddressRow.jdbcDecoder) } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, BusinessentityaddressRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + val created = sql"create temporary table businessentityaddress_TEMP (like person.businessentityaddress) on commit drop".execute + val copied = streamingInsert(s"""copy businessentityaddress_TEMP("businessentityid", "addressid", "addresstypeid", "rowguid", "modifieddate") from stdin""", batchSize, unsaved)(BusinessentityaddressRow.text) + val merged = sql"""insert into person.businessentityaddress("businessentityid", "addressid", "addresstypeid", "rowguid", "modifieddate") + select * from businessentityaddress_TEMP + on conflict ("businessentityid", "addressid", "addresstypeid") + do update set + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table businessentityaddress_TEMP;""".update + created *> copied *> merged + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/businessentityaddress/BusinessentityaddressRepoMock.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/businessentityaddress/BusinessentityaddressRepoMock.scala index 63abb1c79..05fef776c 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/businessentityaddress/BusinessentityaddressRepoMock.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/businessentityaddress/BusinessentityaddressRepoMock.scala @@ -104,4 +104,13 @@ class BusinessentityaddressRepoMock(toRow: Function1[BusinessentityaddressRowUns UpdateResult(1, Chunk.single(unsaved)) } } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, BusinessentityaddressRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + unsaved.scanZIO(0L) { case (acc, row) => + ZIO.succeed { + map += (row.compositeId -> row) + acc + 1 + } + }.runLast.map(_.getOrElse(0L)) + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/businessentitycontact/BusinessentitycontactRepo.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/businessentitycontact/BusinessentitycontactRepo.scala index c4cef1253..b0c2013a9 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/businessentitycontact/BusinessentitycontactRepo.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/businessentitycontact/BusinessentitycontactRepo.scala @@ -32,4 +32,7 @@ trait BusinessentitycontactRepo { def update: UpdateBuilder[BusinessentitycontactFields, BusinessentitycontactRow] def update(row: BusinessentitycontactRow): ZIO[ZConnection, Throwable, Boolean] def upsert(unsaved: BusinessentitycontactRow): ZIO[ZConnection, Throwable, UpdateResult[BusinessentitycontactRow]] + // Not implementable for zio-jdbc: upsertBatch + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, BusinessentitycontactRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/businessentitycontact/BusinessentitycontactRepoImpl.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/businessentitycontact/BusinessentitycontactRepoImpl.scala index e74810ffe..a14c7a407 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/businessentitycontact/BusinessentitycontactRepoImpl.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/businessentitycontact/BusinessentitycontactRepoImpl.scala @@ -133,4 +133,18 @@ class BusinessentitycontactRepoImpl extends BusinessentitycontactRepo { "modifieddate" = EXCLUDED."modifieddate" returning "businessentityid", "personid", "contacttypeid", "rowguid", "modifieddate"::text""".insertReturning(using BusinessentitycontactRow.jdbcDecoder) } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, BusinessentitycontactRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + val created = sql"create temporary table businessentitycontact_TEMP (like person.businessentitycontact) on commit drop".execute + val copied = streamingInsert(s"""copy businessentitycontact_TEMP("businessentityid", "personid", "contacttypeid", "rowguid", "modifieddate") from stdin""", batchSize, unsaved)(BusinessentitycontactRow.text) + val merged = sql"""insert into person.businessentitycontact("businessentityid", "personid", "contacttypeid", "rowguid", "modifieddate") + select * from businessentitycontact_TEMP + on conflict ("businessentityid", "personid", "contacttypeid") + do update set + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table businessentitycontact_TEMP;""".update + created *> copied *> merged + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/businessentitycontact/BusinessentitycontactRepoMock.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/businessentitycontact/BusinessentitycontactRepoMock.scala index cee9eed9a..f22f3b46e 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/businessentitycontact/BusinessentitycontactRepoMock.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/businessentitycontact/BusinessentitycontactRepoMock.scala @@ -104,4 +104,13 @@ class BusinessentitycontactRepoMock(toRow: Function1[BusinessentitycontactRowUns UpdateResult(1, Chunk.single(unsaved)) } } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, BusinessentitycontactRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + unsaved.scanZIO(0L) { case (acc, row) => + ZIO.succeed { + map += (row.compositeId -> row) + acc + 1 + } + }.runLast.map(_.getOrElse(0L)) + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/contacttype/ContacttypeRepo.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/contacttype/ContacttypeRepo.scala index 6cefc8487..fc4ea7a35 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/contacttype/ContacttypeRepo.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/contacttype/ContacttypeRepo.scala @@ -32,4 +32,7 @@ trait ContacttypeRepo { def update: UpdateBuilder[ContacttypeFields, ContacttypeRow] def update(row: ContacttypeRow): ZIO[ZConnection, Throwable, Boolean] def upsert(unsaved: ContacttypeRow): ZIO[ZConnection, Throwable, UpdateResult[ContacttypeRow]] + // Not implementable for zio-jdbc: upsertBatch + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, ContacttypeRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/contacttype/ContacttypeRepoImpl.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/contacttype/ContacttypeRepoImpl.scala index a8d9879bc..fa062b131 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/contacttype/ContacttypeRepoImpl.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/contacttype/ContacttypeRepoImpl.scala @@ -111,4 +111,18 @@ class ContacttypeRepoImpl extends ContacttypeRepo { "modifieddate" = EXCLUDED."modifieddate" returning "contacttypeid", "name", "modifieddate"::text""".insertReturning(using ContacttypeRow.jdbcDecoder) } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, ContacttypeRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + val created = sql"create temporary table contacttype_TEMP (like person.contacttype) on commit drop".execute + val copied = streamingInsert(s"""copy contacttype_TEMP("contacttypeid", "name", "modifieddate") from stdin""", batchSize, unsaved)(ContacttypeRow.text) + val merged = sql"""insert into person.contacttype("contacttypeid", "name", "modifieddate") + select * from contacttype_TEMP + on conflict ("contacttypeid") + do update set + "name" = EXCLUDED."name", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table contacttype_TEMP;""".update + created *> copied *> merged + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/contacttype/ContacttypeRepoMock.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/contacttype/ContacttypeRepoMock.scala index f440033b1..5c02e6a97 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/contacttype/ContacttypeRepoMock.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/contacttype/ContacttypeRepoMock.scala @@ -104,4 +104,13 @@ class ContacttypeRepoMock(toRow: Function1[ContacttypeRowUnsaved, ContacttypeRow UpdateResult(1, Chunk.single(unsaved)) } } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, ContacttypeRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + unsaved.scanZIO(0L) { case (acc, row) => + ZIO.succeed { + map += (row.contacttypeid -> row) + acc + 1 + } + }.runLast.map(_.getOrElse(0L)) + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/countryregion/CountryregionRepo.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/countryregion/CountryregionRepo.scala index ba9ccc582..ad3bb8eb3 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/countryregion/CountryregionRepo.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/countryregion/CountryregionRepo.scala @@ -32,4 +32,7 @@ trait CountryregionRepo { def update: UpdateBuilder[CountryregionFields, CountryregionRow] def update(row: CountryregionRow): ZIO[ZConnection, Throwable, Boolean] def upsert(unsaved: CountryregionRow): ZIO[ZConnection, Throwable, UpdateResult[CountryregionRow]] + // Not implementable for zio-jdbc: upsertBatch + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, CountryregionRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/countryregion/CountryregionRepoImpl.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/countryregion/CountryregionRepoImpl.scala index 81f25e3a9..d03ac9a2e 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/countryregion/CountryregionRepoImpl.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/countryregion/CountryregionRepoImpl.scala @@ -108,4 +108,18 @@ class CountryregionRepoImpl extends CountryregionRepo { "modifieddate" = EXCLUDED."modifieddate" returning "countryregioncode", "name", "modifieddate"::text""".insertReturning(using CountryregionRow.jdbcDecoder) } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, CountryregionRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + val created = sql"create temporary table countryregion_TEMP (like person.countryregion) on commit drop".execute + val copied = streamingInsert(s"""copy countryregion_TEMP("countryregioncode", "name", "modifieddate") from stdin""", batchSize, unsaved)(CountryregionRow.text) + val merged = sql"""insert into person.countryregion("countryregioncode", "name", "modifieddate") + select * from countryregion_TEMP + on conflict ("countryregioncode") + do update set + "name" = EXCLUDED."name", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table countryregion_TEMP;""".update + created *> copied *> merged + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/countryregion/CountryregionRepoMock.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/countryregion/CountryregionRepoMock.scala index cdc5ee6f9..bce0c417c 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/countryregion/CountryregionRepoMock.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/countryregion/CountryregionRepoMock.scala @@ -104,4 +104,13 @@ class CountryregionRepoMock(toRow: Function1[CountryregionRowUnsaved, Countryreg UpdateResult(1, Chunk.single(unsaved)) } } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, CountryregionRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + unsaved.scanZIO(0L) { case (acc, row) => + ZIO.succeed { + map += (row.countryregioncode -> row) + acc + 1 + } + }.runLast.map(_.getOrElse(0L)) + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/emailaddress/EmailaddressRepo.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/emailaddress/EmailaddressRepo.scala index f25412f96..e6b7b53ed 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/emailaddress/EmailaddressRepo.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/emailaddress/EmailaddressRepo.scala @@ -32,4 +32,7 @@ trait EmailaddressRepo { def update: UpdateBuilder[EmailaddressFields, EmailaddressRow] def update(row: EmailaddressRow): ZIO[ZConnection, Throwable, Boolean] def upsert(unsaved: EmailaddressRow): ZIO[ZConnection, Throwable, UpdateResult[EmailaddressRow]] + // Not implementable for zio-jdbc: upsertBatch + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, EmailaddressRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/emailaddress/EmailaddressRepoImpl.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/emailaddress/EmailaddressRepoImpl.scala index 7bf819ab3..d06be6ccc 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/emailaddress/EmailaddressRepoImpl.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/emailaddress/EmailaddressRepoImpl.scala @@ -136,4 +136,19 @@ class EmailaddressRepoImpl extends EmailaddressRepo { "modifieddate" = EXCLUDED."modifieddate" returning "businessentityid", "emailaddressid", "emailaddress", "rowguid", "modifieddate"::text""".insertReturning(using EmailaddressRow.jdbcDecoder) } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, EmailaddressRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + val created = sql"create temporary table emailaddress_TEMP (like person.emailaddress) on commit drop".execute + val copied = streamingInsert(s"""copy emailaddress_TEMP("businessentityid", "emailaddressid", "emailaddress", "rowguid", "modifieddate") from stdin""", batchSize, unsaved)(EmailaddressRow.text) + val merged = sql"""insert into person.emailaddress("businessentityid", "emailaddressid", "emailaddress", "rowguid", "modifieddate") + select * from emailaddress_TEMP + on conflict ("businessentityid", "emailaddressid") + do update set + "emailaddress" = EXCLUDED."emailaddress", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table emailaddress_TEMP;""".update + created *> copied *> merged + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/emailaddress/EmailaddressRepoMock.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/emailaddress/EmailaddressRepoMock.scala index 41445281d..4b3f9bc47 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/emailaddress/EmailaddressRepoMock.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/emailaddress/EmailaddressRepoMock.scala @@ -104,4 +104,13 @@ class EmailaddressRepoMock(toRow: Function1[EmailaddressRowUnsaved, Emailaddress UpdateResult(1, Chunk.single(unsaved)) } } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, EmailaddressRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + unsaved.scanZIO(0L) { case (acc, row) => + ZIO.succeed { + map += (row.compositeId -> row) + acc + 1 + } + }.runLast.map(_.getOrElse(0L)) + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/password/PasswordRepo.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/password/PasswordRepo.scala index 447152fff..e65fa0690 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/password/PasswordRepo.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/password/PasswordRepo.scala @@ -33,4 +33,7 @@ trait PasswordRepo { def update: UpdateBuilder[PasswordFields, PasswordRow] def update(row: PasswordRow): ZIO[ZConnection, Throwable, Boolean] def upsert(unsaved: PasswordRow): ZIO[ZConnection, Throwable, UpdateResult[PasswordRow]] + // Not implementable for zio-jdbc: upsertBatch + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, PasswordRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/password/PasswordRepoImpl.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/password/PasswordRepoImpl.scala index aaeaf9d06..f9acc195d 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/password/PasswordRepoImpl.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/password/PasswordRepoImpl.scala @@ -121,4 +121,20 @@ class PasswordRepoImpl extends PasswordRepo { "modifieddate" = EXCLUDED."modifieddate" returning "businessentityid", "passwordhash", "passwordsalt", "rowguid", "modifieddate"::text""".insertReturning(using PasswordRow.jdbcDecoder) } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, PasswordRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + val created = sql"create temporary table password_TEMP (like person.password) on commit drop".execute + val copied = streamingInsert(s"""copy password_TEMP("businessentityid", "passwordhash", "passwordsalt", "rowguid", "modifieddate") from stdin""", batchSize, unsaved)(PasswordRow.text) + val merged = sql"""insert into person.password("businessentityid", "passwordhash", "passwordsalt", "rowguid", "modifieddate") + select * from password_TEMP + on conflict ("businessentityid") + do update set + "passwordhash" = EXCLUDED."passwordhash", + "passwordsalt" = EXCLUDED."passwordsalt", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table password_TEMP;""".update + created *> copied *> merged + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/password/PasswordRepoMock.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/password/PasswordRepoMock.scala index 46915294c..a6ba35fd5 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/password/PasswordRepoMock.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/password/PasswordRepoMock.scala @@ -105,4 +105,13 @@ class PasswordRepoMock(toRow: Function1[PasswordRowUnsaved, PasswordRow], UpdateResult(1, Chunk.single(unsaved)) } } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, PasswordRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + unsaved.scanZIO(0L) { case (acc, row) => + ZIO.succeed { + map += (row.businessentityid -> row) + acc + 1 + } + }.runLast.map(_.getOrElse(0L)) + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/person/PersonRepo.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/person/PersonRepo.scala index e431bc1f1..5d880115a 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/person/PersonRepo.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/person/PersonRepo.scala @@ -33,4 +33,7 @@ trait PersonRepo { def update: UpdateBuilder[PersonFields, PersonRow] def update(row: PersonRow): ZIO[ZConnection, Throwable, Boolean] def upsert(unsaved: PersonRow): ZIO[ZConnection, Throwable, UpdateResult[PersonRow]] + // Not implementable for zio-jdbc: upsertBatch + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, PersonRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/person/PersonRepoImpl.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/person/PersonRepoImpl.scala index 1b594c84d..7f73c6e2e 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/person/PersonRepoImpl.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/person/PersonRepoImpl.scala @@ -163,4 +163,28 @@ class PersonRepoImpl extends PersonRepo { "modifieddate" = EXCLUDED."modifieddate" returning "businessentityid", "persontype", "namestyle", "title", "firstname", "middlename", "lastname", "suffix", "emailpromotion", "additionalcontactinfo", "demographics", "rowguid", "modifieddate"::text""".insertReturning(using PersonRow.jdbcDecoder) } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, PersonRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + val created = sql"create temporary table person_TEMP (like person.person) on commit drop".execute + val copied = streamingInsert(s"""copy person_TEMP("businessentityid", "persontype", "namestyle", "title", "firstname", "middlename", "lastname", "suffix", "emailpromotion", "additionalcontactinfo", "demographics", "rowguid", "modifieddate") from stdin""", batchSize, unsaved)(PersonRow.text) + val merged = sql"""insert into person.person("businessentityid", "persontype", "namestyle", "title", "firstname", "middlename", "lastname", "suffix", "emailpromotion", "additionalcontactinfo", "demographics", "rowguid", "modifieddate") + select * from person_TEMP + on conflict ("businessentityid") + do update set + "persontype" = EXCLUDED."persontype", + "namestyle" = EXCLUDED."namestyle", + "title" = EXCLUDED."title", + "firstname" = EXCLUDED."firstname", + "middlename" = EXCLUDED."middlename", + "lastname" = EXCLUDED."lastname", + "suffix" = EXCLUDED."suffix", + "emailpromotion" = EXCLUDED."emailpromotion", + "additionalcontactinfo" = EXCLUDED."additionalcontactinfo", + "demographics" = EXCLUDED."demographics", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table person_TEMP;""".update + created *> copied *> merged + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/person/PersonRepoMock.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/person/PersonRepoMock.scala index 5a3c9cec2..2eeb49c51 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/person/PersonRepoMock.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/person/PersonRepoMock.scala @@ -105,4 +105,13 @@ class PersonRepoMock(toRow: Function1[PersonRowUnsaved, PersonRow], UpdateResult(1, Chunk.single(unsaved)) } } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, PersonRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + unsaved.scanZIO(0L) { case (acc, row) => + ZIO.succeed { + map += (row.businessentityid -> row) + acc + 1 + } + }.runLast.map(_.getOrElse(0L)) + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/personphone/PersonphoneRepo.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/personphone/PersonphoneRepo.scala index bc04cd72a..650a141fd 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/personphone/PersonphoneRepo.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/personphone/PersonphoneRepo.scala @@ -32,4 +32,7 @@ trait PersonphoneRepo { def update: UpdateBuilder[PersonphoneFields, PersonphoneRow] def update(row: PersonphoneRow): ZIO[ZConnection, Throwable, Boolean] def upsert(unsaved: PersonphoneRow): ZIO[ZConnection, Throwable, UpdateResult[PersonphoneRow]] + // Not implementable for zio-jdbc: upsertBatch + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, PersonphoneRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/personphone/PersonphoneRepoImpl.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/personphone/PersonphoneRepoImpl.scala index 8abf56aad..827112f15 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/personphone/PersonphoneRepoImpl.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/personphone/PersonphoneRepoImpl.scala @@ -126,4 +126,17 @@ class PersonphoneRepoImpl extends PersonphoneRepo { "modifieddate" = EXCLUDED."modifieddate" returning "businessentityid", "phonenumber", "phonenumbertypeid", "modifieddate"::text""".insertReturning(using PersonphoneRow.jdbcDecoder) } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, PersonphoneRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + val created = sql"create temporary table personphone_TEMP (like person.personphone) on commit drop".execute + val copied = streamingInsert(s"""copy personphone_TEMP("businessentityid", "phonenumber", "phonenumbertypeid", "modifieddate") from stdin""", batchSize, unsaved)(PersonphoneRow.text) + val merged = sql"""insert into person.personphone("businessentityid", "phonenumber", "phonenumbertypeid", "modifieddate") + select * from personphone_TEMP + on conflict ("businessentityid", "phonenumber", "phonenumbertypeid") + do update set + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table personphone_TEMP;""".update + created *> copied *> merged + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/personphone/PersonphoneRepoMock.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/personphone/PersonphoneRepoMock.scala index fccc49d19..17c15b430 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/personphone/PersonphoneRepoMock.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/personphone/PersonphoneRepoMock.scala @@ -104,4 +104,13 @@ class PersonphoneRepoMock(toRow: Function1[PersonphoneRowUnsaved, PersonphoneRow UpdateResult(1, Chunk.single(unsaved)) } } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, PersonphoneRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + unsaved.scanZIO(0L) { case (acc, row) => + ZIO.succeed { + map += (row.compositeId -> row) + acc + 1 + } + }.runLast.map(_.getOrElse(0L)) + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/phonenumbertype/PhonenumbertypeRepo.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/phonenumbertype/PhonenumbertypeRepo.scala index 426ed087e..2710eb9a9 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/phonenumbertype/PhonenumbertypeRepo.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/phonenumbertype/PhonenumbertypeRepo.scala @@ -32,4 +32,7 @@ trait PhonenumbertypeRepo { def update: UpdateBuilder[PhonenumbertypeFields, PhonenumbertypeRow] def update(row: PhonenumbertypeRow): ZIO[ZConnection, Throwable, Boolean] def upsert(unsaved: PhonenumbertypeRow): ZIO[ZConnection, Throwable, UpdateResult[PhonenumbertypeRow]] + // Not implementable for zio-jdbc: upsertBatch + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, PhonenumbertypeRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/phonenumbertype/PhonenumbertypeRepoImpl.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/phonenumbertype/PhonenumbertypeRepoImpl.scala index 63efd0a14..e6aa4827c 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/phonenumbertype/PhonenumbertypeRepoImpl.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/phonenumbertype/PhonenumbertypeRepoImpl.scala @@ -111,4 +111,18 @@ class PhonenumbertypeRepoImpl extends PhonenumbertypeRepo { "modifieddate" = EXCLUDED."modifieddate" returning "phonenumbertypeid", "name", "modifieddate"::text""".insertReturning(using PhonenumbertypeRow.jdbcDecoder) } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, PhonenumbertypeRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + val created = sql"create temporary table phonenumbertype_TEMP (like person.phonenumbertype) on commit drop".execute + val copied = streamingInsert(s"""copy phonenumbertype_TEMP("phonenumbertypeid", "name", "modifieddate") from stdin""", batchSize, unsaved)(PhonenumbertypeRow.text) + val merged = sql"""insert into person.phonenumbertype("phonenumbertypeid", "name", "modifieddate") + select * from phonenumbertype_TEMP + on conflict ("phonenumbertypeid") + do update set + "name" = EXCLUDED."name", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table phonenumbertype_TEMP;""".update + created *> copied *> merged + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/phonenumbertype/PhonenumbertypeRepoMock.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/phonenumbertype/PhonenumbertypeRepoMock.scala index dbe7dc7f1..2b95303c2 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/phonenumbertype/PhonenumbertypeRepoMock.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/phonenumbertype/PhonenumbertypeRepoMock.scala @@ -104,4 +104,13 @@ class PhonenumbertypeRepoMock(toRow: Function1[PhonenumbertypeRowUnsaved, Phonen UpdateResult(1, Chunk.single(unsaved)) } } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, PhonenumbertypeRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + unsaved.scanZIO(0L) { case (acc, row) => + ZIO.succeed { + map += (row.phonenumbertypeid -> row) + acc + 1 + } + }.runLast.map(_.getOrElse(0L)) + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/stateprovince/StateprovinceRepo.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/stateprovince/StateprovinceRepo.scala index 5e3a26c69..02ef25756 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/stateprovince/StateprovinceRepo.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/stateprovince/StateprovinceRepo.scala @@ -32,4 +32,7 @@ trait StateprovinceRepo { def update: UpdateBuilder[StateprovinceFields, StateprovinceRow] def update(row: StateprovinceRow): ZIO[ZConnection, Throwable, Boolean] def upsert(unsaved: StateprovinceRow): ZIO[ZConnection, Throwable, UpdateResult[StateprovinceRow]] + // Not implementable for zio-jdbc: upsertBatch + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, StateprovinceRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/stateprovince/StateprovinceRepoImpl.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/stateprovince/StateprovinceRepoImpl.scala index fc885ba9a..0521b1601 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/stateprovince/StateprovinceRepoImpl.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/stateprovince/StateprovinceRepoImpl.scala @@ -142,4 +142,23 @@ class StateprovinceRepoImpl extends StateprovinceRepo { "modifieddate" = EXCLUDED."modifieddate" returning "stateprovinceid", "stateprovincecode", "countryregioncode", "isonlystateprovinceflag", "name", "territoryid", "rowguid", "modifieddate"::text""".insertReturning(using StateprovinceRow.jdbcDecoder) } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, StateprovinceRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + val created = sql"create temporary table stateprovince_TEMP (like person.stateprovince) on commit drop".execute + val copied = streamingInsert(s"""copy stateprovince_TEMP("stateprovinceid", "stateprovincecode", "countryregioncode", "isonlystateprovinceflag", "name", "territoryid", "rowguid", "modifieddate") from stdin""", batchSize, unsaved)(StateprovinceRow.text) + val merged = sql"""insert into person.stateprovince("stateprovinceid", "stateprovincecode", "countryregioncode", "isonlystateprovinceflag", "name", "territoryid", "rowguid", "modifieddate") + select * from stateprovince_TEMP + on conflict ("stateprovinceid") + do update set + "stateprovincecode" = EXCLUDED."stateprovincecode", + "countryregioncode" = EXCLUDED."countryregioncode", + "isonlystateprovinceflag" = EXCLUDED."isonlystateprovinceflag", + "name" = EXCLUDED."name", + "territoryid" = EXCLUDED."territoryid", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table stateprovince_TEMP;""".update + created *> copied *> merged + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/stateprovince/StateprovinceRepoMock.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/stateprovince/StateprovinceRepoMock.scala index e7d43ae06..18d3050e9 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/stateprovince/StateprovinceRepoMock.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/stateprovince/StateprovinceRepoMock.scala @@ -104,4 +104,13 @@ class StateprovinceRepoMock(toRow: Function1[StateprovinceRowUnsaved, Stateprovi UpdateResult(1, Chunk.single(unsaved)) } } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, StateprovinceRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + unsaved.scanZIO(0L) { case (acc, row) => + ZIO.succeed { + map += (row.stateprovinceid -> row) + acc + 1 + } + }.runLast.map(_.getOrElse(0L)) + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/billofmaterials/BillofmaterialsRepo.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/billofmaterials/BillofmaterialsRepo.scala index 312892a6e..e94ddcbb5 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/billofmaterials/BillofmaterialsRepo.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/billofmaterials/BillofmaterialsRepo.scala @@ -32,4 +32,7 @@ trait BillofmaterialsRepo { def update: UpdateBuilder[BillofmaterialsFields, BillofmaterialsRow] def update(row: BillofmaterialsRow): ZIO[ZConnection, Throwable, Boolean] def upsert(unsaved: BillofmaterialsRow): ZIO[ZConnection, Throwable, UpdateResult[BillofmaterialsRow]] + // Not implementable for zio-jdbc: upsertBatch + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, BillofmaterialsRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/billofmaterials/BillofmaterialsRepoImpl.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/billofmaterials/BillofmaterialsRepoImpl.scala index fd1c7f2c0..cb38914d8 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/billofmaterials/BillofmaterialsRepoImpl.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/billofmaterials/BillofmaterialsRepoImpl.scala @@ -144,4 +144,24 @@ class BillofmaterialsRepoImpl extends BillofmaterialsRepo { "modifieddate" = EXCLUDED."modifieddate" returning "billofmaterialsid", "productassemblyid", "componentid", "startdate"::text, "enddate"::text, "unitmeasurecode", "bomlevel", "perassemblyqty", "modifieddate"::text""".insertReturning(using BillofmaterialsRow.jdbcDecoder) } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, BillofmaterialsRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + val created = sql"create temporary table billofmaterials_TEMP (like production.billofmaterials) on commit drop".execute + val copied = streamingInsert(s"""copy billofmaterials_TEMP("billofmaterialsid", "productassemblyid", "componentid", "startdate", "enddate", "unitmeasurecode", "bomlevel", "perassemblyqty", "modifieddate") from stdin""", batchSize, unsaved)(BillofmaterialsRow.text) + val merged = sql"""insert into production.billofmaterials("billofmaterialsid", "productassemblyid", "componentid", "startdate", "enddate", "unitmeasurecode", "bomlevel", "perassemblyqty", "modifieddate") + select * from billofmaterials_TEMP + on conflict ("billofmaterialsid") + do update set + "productassemblyid" = EXCLUDED."productassemblyid", + "componentid" = EXCLUDED."componentid", + "startdate" = EXCLUDED."startdate", + "enddate" = EXCLUDED."enddate", + "unitmeasurecode" = EXCLUDED."unitmeasurecode", + "bomlevel" = EXCLUDED."bomlevel", + "perassemblyqty" = EXCLUDED."perassemblyqty", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table billofmaterials_TEMP;""".update + created *> copied *> merged + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/billofmaterials/BillofmaterialsRepoMock.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/billofmaterials/BillofmaterialsRepoMock.scala index bd10bc5be..46e570e63 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/billofmaterials/BillofmaterialsRepoMock.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/billofmaterials/BillofmaterialsRepoMock.scala @@ -104,4 +104,13 @@ class BillofmaterialsRepoMock(toRow: Function1[BillofmaterialsRowUnsaved, Billof UpdateResult(1, Chunk.single(unsaved)) } } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, BillofmaterialsRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + unsaved.scanZIO(0L) { case (acc, row) => + ZIO.succeed { + map += (row.billofmaterialsid -> row) + acc + 1 + } + }.runLast.map(_.getOrElse(0L)) + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/culture/CultureRepo.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/culture/CultureRepo.scala index 03712c55c..40589de9a 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/culture/CultureRepo.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/culture/CultureRepo.scala @@ -32,4 +32,7 @@ trait CultureRepo { def update: UpdateBuilder[CultureFields, CultureRow] def update(row: CultureRow): ZIO[ZConnection, Throwable, Boolean] def upsert(unsaved: CultureRow): ZIO[ZConnection, Throwable, UpdateResult[CultureRow]] + // Not implementable for zio-jdbc: upsertBatch + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, CultureRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/culture/CultureRepoImpl.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/culture/CultureRepoImpl.scala index 76057147d..a939f40dd 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/culture/CultureRepoImpl.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/culture/CultureRepoImpl.scala @@ -108,4 +108,18 @@ class CultureRepoImpl extends CultureRepo { "modifieddate" = EXCLUDED."modifieddate" returning "cultureid", "name", "modifieddate"::text""".insertReturning(using CultureRow.jdbcDecoder) } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, CultureRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + val created = sql"create temporary table culture_TEMP (like production.culture) on commit drop".execute + val copied = streamingInsert(s"""copy culture_TEMP("cultureid", "name", "modifieddate") from stdin""", batchSize, unsaved)(CultureRow.text) + val merged = sql"""insert into production.culture("cultureid", "name", "modifieddate") + select * from culture_TEMP + on conflict ("cultureid") + do update set + "name" = EXCLUDED."name", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table culture_TEMP;""".update + created *> copied *> merged + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/culture/CultureRepoMock.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/culture/CultureRepoMock.scala index cdeaa55e0..1e92821f0 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/culture/CultureRepoMock.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/culture/CultureRepoMock.scala @@ -104,4 +104,13 @@ class CultureRepoMock(toRow: Function1[CultureRowUnsaved, CultureRow], UpdateResult(1, Chunk.single(unsaved)) } } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, CultureRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + unsaved.scanZIO(0L) { case (acc, row) => + ZIO.succeed { + map += (row.cultureid -> row) + acc + 1 + } + }.runLast.map(_.getOrElse(0L)) + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/document/DocumentRepo.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/document/DocumentRepo.scala index f99a29423..724696083 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/document/DocumentRepo.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/document/DocumentRepo.scala @@ -34,4 +34,7 @@ trait DocumentRepo { def update: UpdateBuilder[DocumentFields, DocumentRow] def update(row: DocumentRow): ZIO[ZConnection, Throwable, Boolean] def upsert(unsaved: DocumentRow): ZIO[ZConnection, Throwable, UpdateResult[DocumentRow]] + // Not implementable for zio-jdbc: upsertBatch + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, DocumentRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/document/DocumentRepoImpl.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/document/DocumentRepoImpl.scala index 639c1bf31..38223ddad 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/document/DocumentRepoImpl.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/document/DocumentRepoImpl.scala @@ -171,4 +171,28 @@ class DocumentRepoImpl extends DocumentRepo { "modifieddate" = EXCLUDED."modifieddate" returning "title", "owner", "folderflag", "filename", "fileextension", "revision", "changenumber", "status", "documentsummary", "document", "rowguid", "modifieddate"::text, "documentnode"""".insertReturning(using DocumentRow.jdbcDecoder) } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, DocumentRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + val created = sql"create temporary table document_TEMP (like production.document) on commit drop".execute + val copied = streamingInsert(s"""copy document_TEMP("title", "owner", "folderflag", "filename", "fileextension", "revision", "changenumber", "status", "documentsummary", "document", "rowguid", "modifieddate", "documentnode") from stdin""", batchSize, unsaved)(DocumentRow.text) + val merged = sql"""insert into production.document("title", "owner", "folderflag", "filename", "fileextension", "revision", "changenumber", "status", "documentsummary", "document", "rowguid", "modifieddate", "documentnode") + select * from document_TEMP + on conflict ("documentnode") + do update set + "title" = EXCLUDED."title", + "owner" = EXCLUDED."owner", + "folderflag" = EXCLUDED."folderflag", + "filename" = EXCLUDED."filename", + "fileextension" = EXCLUDED."fileextension", + "revision" = EXCLUDED."revision", + "changenumber" = EXCLUDED."changenumber", + "status" = EXCLUDED."status", + "documentsummary" = EXCLUDED."documentsummary", + "document" = EXCLUDED."document", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table document_TEMP;""".update + created *> copied *> merged + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/document/DocumentRepoMock.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/document/DocumentRepoMock.scala index d8dbf7979..2443ab6c7 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/document/DocumentRepoMock.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/document/DocumentRepoMock.scala @@ -108,4 +108,13 @@ class DocumentRepoMock(toRow: Function1[DocumentRowUnsaved, DocumentRow], UpdateResult(1, Chunk.single(unsaved)) } } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, DocumentRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + unsaved.scanZIO(0L) { case (acc, row) => + ZIO.succeed { + map += (row.documentnode -> row) + acc + 1 + } + }.runLast.map(_.getOrElse(0L)) + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/illustration/IllustrationRepo.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/illustration/IllustrationRepo.scala index 1ff081c4c..6a9735938 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/illustration/IllustrationRepo.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/illustration/IllustrationRepo.scala @@ -32,4 +32,7 @@ trait IllustrationRepo { def update: UpdateBuilder[IllustrationFields, IllustrationRow] def update(row: IllustrationRow): ZIO[ZConnection, Throwable, Boolean] def upsert(unsaved: IllustrationRow): ZIO[ZConnection, Throwable, UpdateResult[IllustrationRow]] + // Not implementable for zio-jdbc: upsertBatch + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, IllustrationRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/illustration/IllustrationRepoImpl.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/illustration/IllustrationRepoImpl.scala index 73c8dc570..9b4e4700b 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/illustration/IllustrationRepoImpl.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/illustration/IllustrationRepoImpl.scala @@ -112,4 +112,18 @@ class IllustrationRepoImpl extends IllustrationRepo { "modifieddate" = EXCLUDED."modifieddate" returning "illustrationid", "diagram", "modifieddate"::text""".insertReturning(using IllustrationRow.jdbcDecoder) } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, IllustrationRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + val created = sql"create temporary table illustration_TEMP (like production.illustration) on commit drop".execute + val copied = streamingInsert(s"""copy illustration_TEMP("illustrationid", "diagram", "modifieddate") from stdin""", batchSize, unsaved)(IllustrationRow.text) + val merged = sql"""insert into production.illustration("illustrationid", "diagram", "modifieddate") + select * from illustration_TEMP + on conflict ("illustrationid") + do update set + "diagram" = EXCLUDED."diagram", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table illustration_TEMP;""".update + created *> copied *> merged + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/illustration/IllustrationRepoMock.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/illustration/IllustrationRepoMock.scala index 466b5da4c..e3499486a 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/illustration/IllustrationRepoMock.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/illustration/IllustrationRepoMock.scala @@ -104,4 +104,13 @@ class IllustrationRepoMock(toRow: Function1[IllustrationRowUnsaved, Illustration UpdateResult(1, Chunk.single(unsaved)) } } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, IllustrationRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + unsaved.scanZIO(0L) { case (acc, row) => + ZIO.succeed { + map += (row.illustrationid -> row) + acc + 1 + } + }.runLast.map(_.getOrElse(0L)) + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/location/LocationRepo.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/location/LocationRepo.scala index 2ba616034..161d008b6 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/location/LocationRepo.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/location/LocationRepo.scala @@ -32,4 +32,7 @@ trait LocationRepo { def update: UpdateBuilder[LocationFields, LocationRow] def update(row: LocationRow): ZIO[ZConnection, Throwable, Boolean] def upsert(unsaved: LocationRow): ZIO[ZConnection, Throwable, UpdateResult[LocationRow]] + // Not implementable for zio-jdbc: upsertBatch + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, LocationRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/location/LocationRepoImpl.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/location/LocationRepoImpl.scala index 628c9d6ba..0f175d892 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/location/LocationRepoImpl.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/location/LocationRepoImpl.scala @@ -126,4 +126,20 @@ class LocationRepoImpl extends LocationRepo { "modifieddate" = EXCLUDED."modifieddate" returning "locationid", "name", "costrate", "availability", "modifieddate"::text""".insertReturning(using LocationRow.jdbcDecoder) } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, LocationRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + val created = sql"create temporary table location_TEMP (like production.location) on commit drop".execute + val copied = streamingInsert(s"""copy location_TEMP("locationid", "name", "costrate", "availability", "modifieddate") from stdin""", batchSize, unsaved)(LocationRow.text) + val merged = sql"""insert into production.location("locationid", "name", "costrate", "availability", "modifieddate") + select * from location_TEMP + on conflict ("locationid") + do update set + "name" = EXCLUDED."name", + "costrate" = EXCLUDED."costrate", + "availability" = EXCLUDED."availability", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table location_TEMP;""".update + created *> copied *> merged + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/location/LocationRepoMock.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/location/LocationRepoMock.scala index 22c1f0e7f..4f54b1a9b 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/location/LocationRepoMock.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/location/LocationRepoMock.scala @@ -104,4 +104,13 @@ class LocationRepoMock(toRow: Function1[LocationRowUnsaved, LocationRow], UpdateResult(1, Chunk.single(unsaved)) } } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, LocationRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + unsaved.scanZIO(0L) { case (acc, row) => + ZIO.succeed { + map += (row.locationid -> row) + acc + 1 + } + }.runLast.map(_.getOrElse(0L)) + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/product/ProductRepo.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/product/ProductRepo.scala index 0eca70366..4be415fe6 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/product/ProductRepo.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/product/ProductRepo.scala @@ -32,4 +32,7 @@ trait ProductRepo { def update: UpdateBuilder[ProductFields, ProductRow] def update(row: ProductRow): ZIO[ZConnection, Throwable, Boolean] def upsert(unsaved: ProductRow): ZIO[ZConnection, Throwable, UpdateResult[ProductRow]] + // Not implementable for zio-jdbc: upsertBatch + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, ProductRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/product/ProductRepoImpl.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/product/ProductRepoImpl.scala index 72b2f4373..cbec38600 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/product/ProductRepoImpl.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/product/ProductRepoImpl.scala @@ -215,4 +215,40 @@ class ProductRepoImpl extends ProductRepo { "modifieddate" = EXCLUDED."modifieddate" returning "productid", "name", "productnumber", "makeflag", "finishedgoodsflag", "color", "safetystocklevel", "reorderpoint", "standardcost", "listprice", "size", "sizeunitmeasurecode", "weightunitmeasurecode", "weight", "daystomanufacture", "productline", "class", "style", "productsubcategoryid", "productmodelid", "sellstartdate"::text, "sellenddate"::text, "discontinueddate"::text, "rowguid", "modifieddate"::text""".insertReturning(using ProductRow.jdbcDecoder) } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, ProductRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + val created = sql"create temporary table product_TEMP (like production.product) on commit drop".execute + val copied = streamingInsert(s"""copy product_TEMP("productid", "name", "productnumber", "makeflag", "finishedgoodsflag", "color", "safetystocklevel", "reorderpoint", "standardcost", "listprice", "size", "sizeunitmeasurecode", "weightunitmeasurecode", "weight", "daystomanufacture", "productline", "class", "style", "productsubcategoryid", "productmodelid", "sellstartdate", "sellenddate", "discontinueddate", "rowguid", "modifieddate") from stdin""", batchSize, unsaved)(ProductRow.text) + val merged = sql"""insert into production.product("productid", "name", "productnumber", "makeflag", "finishedgoodsflag", "color", "safetystocklevel", "reorderpoint", "standardcost", "listprice", "size", "sizeunitmeasurecode", "weightunitmeasurecode", "weight", "daystomanufacture", "productline", "class", "style", "productsubcategoryid", "productmodelid", "sellstartdate", "sellenddate", "discontinueddate", "rowguid", "modifieddate") + select * from product_TEMP + on conflict ("productid") + do update set + "name" = EXCLUDED."name", + "productnumber" = EXCLUDED."productnumber", + "makeflag" = EXCLUDED."makeflag", + "finishedgoodsflag" = EXCLUDED."finishedgoodsflag", + "color" = EXCLUDED."color", + "safetystocklevel" = EXCLUDED."safetystocklevel", + "reorderpoint" = EXCLUDED."reorderpoint", + "standardcost" = EXCLUDED."standardcost", + "listprice" = EXCLUDED."listprice", + "size" = EXCLUDED."size", + "sizeunitmeasurecode" = EXCLUDED."sizeunitmeasurecode", + "weightunitmeasurecode" = EXCLUDED."weightunitmeasurecode", + "weight" = EXCLUDED."weight", + "daystomanufacture" = EXCLUDED."daystomanufacture", + "productline" = EXCLUDED."productline", + "class" = EXCLUDED."class", + "style" = EXCLUDED."style", + "productsubcategoryid" = EXCLUDED."productsubcategoryid", + "productmodelid" = EXCLUDED."productmodelid", + "sellstartdate" = EXCLUDED."sellstartdate", + "sellenddate" = EXCLUDED."sellenddate", + "discontinueddate" = EXCLUDED."discontinueddate", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table product_TEMP;""".update + created *> copied *> merged + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/product/ProductRepoMock.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/product/ProductRepoMock.scala index 20bfccfed..b3b1a494b 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/product/ProductRepoMock.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/product/ProductRepoMock.scala @@ -104,4 +104,13 @@ class ProductRepoMock(toRow: Function1[ProductRowUnsaved, ProductRow], UpdateResult(1, Chunk.single(unsaved)) } } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, ProductRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + unsaved.scanZIO(0L) { case (acc, row) => + ZIO.succeed { + map += (row.productid -> row) + acc + 1 + } + }.runLast.map(_.getOrElse(0L)) + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productcategory/ProductcategoryRepo.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productcategory/ProductcategoryRepo.scala index 0d84445fa..ba80243ae 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productcategory/ProductcategoryRepo.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productcategory/ProductcategoryRepo.scala @@ -32,4 +32,7 @@ trait ProductcategoryRepo { def update: UpdateBuilder[ProductcategoryFields, ProductcategoryRow] def update(row: ProductcategoryRow): ZIO[ZConnection, Throwable, Boolean] def upsert(unsaved: ProductcategoryRow): ZIO[ZConnection, Throwable, UpdateResult[ProductcategoryRow]] + // Not implementable for zio-jdbc: upsertBatch + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, ProductcategoryRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productcategory/ProductcategoryRepoImpl.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productcategory/ProductcategoryRepoImpl.scala index 3b9e1baa6..a6cc30056 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productcategory/ProductcategoryRepoImpl.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productcategory/ProductcategoryRepoImpl.scala @@ -119,4 +119,19 @@ class ProductcategoryRepoImpl extends ProductcategoryRepo { "modifieddate" = EXCLUDED."modifieddate" returning "productcategoryid", "name", "rowguid", "modifieddate"::text""".insertReturning(using ProductcategoryRow.jdbcDecoder) } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, ProductcategoryRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + val created = sql"create temporary table productcategory_TEMP (like production.productcategory) on commit drop".execute + val copied = streamingInsert(s"""copy productcategory_TEMP("productcategoryid", "name", "rowguid", "modifieddate") from stdin""", batchSize, unsaved)(ProductcategoryRow.text) + val merged = sql"""insert into production.productcategory("productcategoryid", "name", "rowguid", "modifieddate") + select * from productcategory_TEMP + on conflict ("productcategoryid") + do update set + "name" = EXCLUDED."name", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table productcategory_TEMP;""".update + created *> copied *> merged + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productcategory/ProductcategoryRepoMock.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productcategory/ProductcategoryRepoMock.scala index 96c228625..351aef21d 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productcategory/ProductcategoryRepoMock.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productcategory/ProductcategoryRepoMock.scala @@ -104,4 +104,13 @@ class ProductcategoryRepoMock(toRow: Function1[ProductcategoryRowUnsaved, Produc UpdateResult(1, Chunk.single(unsaved)) } } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, ProductcategoryRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + unsaved.scanZIO(0L) { case (acc, row) => + ZIO.succeed { + map += (row.productcategoryid -> row) + acc + 1 + } + }.runLast.map(_.getOrElse(0L)) + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productcosthistory/ProductcosthistoryRepo.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productcosthistory/ProductcosthistoryRepo.scala index 0bf6ecf39..0893036d5 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productcosthistory/ProductcosthistoryRepo.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productcosthistory/ProductcosthistoryRepo.scala @@ -32,4 +32,7 @@ trait ProductcosthistoryRepo { def update: UpdateBuilder[ProductcosthistoryFields, ProductcosthistoryRow] def update(row: ProductcosthistoryRow): ZIO[ZConnection, Throwable, Boolean] def upsert(unsaved: ProductcosthistoryRow): ZIO[ZConnection, Throwable, UpdateResult[ProductcosthistoryRow]] + // Not implementable for zio-jdbc: upsertBatch + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, ProductcosthistoryRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productcosthistory/ProductcosthistoryRepoImpl.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productcosthistory/ProductcosthistoryRepoImpl.scala index ce95305ed..da6654b10 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productcosthistory/ProductcosthistoryRepoImpl.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productcosthistory/ProductcosthistoryRepoImpl.scala @@ -129,4 +129,19 @@ class ProductcosthistoryRepoImpl extends ProductcosthistoryRepo { "modifieddate" = EXCLUDED."modifieddate" returning "productid", "startdate"::text, "enddate"::text, "standardcost", "modifieddate"::text""".insertReturning(using ProductcosthistoryRow.jdbcDecoder) } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, ProductcosthistoryRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + val created = sql"create temporary table productcosthistory_TEMP (like production.productcosthistory) on commit drop".execute + val copied = streamingInsert(s"""copy productcosthistory_TEMP("productid", "startdate", "enddate", "standardcost", "modifieddate") from stdin""", batchSize, unsaved)(ProductcosthistoryRow.text) + val merged = sql"""insert into production.productcosthistory("productid", "startdate", "enddate", "standardcost", "modifieddate") + select * from productcosthistory_TEMP + on conflict ("productid", "startdate") + do update set + "enddate" = EXCLUDED."enddate", + "standardcost" = EXCLUDED."standardcost", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table productcosthistory_TEMP;""".update + created *> copied *> merged + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productcosthistory/ProductcosthistoryRepoMock.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productcosthistory/ProductcosthistoryRepoMock.scala index 65553c54e..94f8bbb7c 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productcosthistory/ProductcosthistoryRepoMock.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productcosthistory/ProductcosthistoryRepoMock.scala @@ -104,4 +104,13 @@ class ProductcosthistoryRepoMock(toRow: Function1[ProductcosthistoryRowUnsaved, UpdateResult(1, Chunk.single(unsaved)) } } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, ProductcosthistoryRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + unsaved.scanZIO(0L) { case (acc, row) => + ZIO.succeed { + map += (row.compositeId -> row) + acc + 1 + } + }.runLast.map(_.getOrElse(0L)) + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productdescription/ProductdescriptionRepo.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productdescription/ProductdescriptionRepo.scala index cfe85d2fe..c24dd3191 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productdescription/ProductdescriptionRepo.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productdescription/ProductdescriptionRepo.scala @@ -32,4 +32,7 @@ trait ProductdescriptionRepo { def update: UpdateBuilder[ProductdescriptionFields, ProductdescriptionRow] def update(row: ProductdescriptionRow): ZIO[ZConnection, Throwable, Boolean] def upsert(unsaved: ProductdescriptionRow): ZIO[ZConnection, Throwable, UpdateResult[ProductdescriptionRow]] + // Not implementable for zio-jdbc: upsertBatch + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, ProductdescriptionRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productdescription/ProductdescriptionRepoImpl.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productdescription/ProductdescriptionRepoImpl.scala index 56b7f8af4..dd2c20318 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productdescription/ProductdescriptionRepoImpl.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productdescription/ProductdescriptionRepoImpl.scala @@ -119,4 +119,19 @@ class ProductdescriptionRepoImpl extends ProductdescriptionRepo { "modifieddate" = EXCLUDED."modifieddate" returning "productdescriptionid", "description", "rowguid", "modifieddate"::text""".insertReturning(using ProductdescriptionRow.jdbcDecoder) } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, ProductdescriptionRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + val created = sql"create temporary table productdescription_TEMP (like production.productdescription) on commit drop".execute + val copied = streamingInsert(s"""copy productdescription_TEMP("productdescriptionid", "description", "rowguid", "modifieddate") from stdin""", batchSize, unsaved)(ProductdescriptionRow.text) + val merged = sql"""insert into production.productdescription("productdescriptionid", "description", "rowguid", "modifieddate") + select * from productdescription_TEMP + on conflict ("productdescriptionid") + do update set + "description" = EXCLUDED."description", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table productdescription_TEMP;""".update + created *> copied *> merged + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productdescription/ProductdescriptionRepoMock.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productdescription/ProductdescriptionRepoMock.scala index 27856e11a..fc61ae782 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productdescription/ProductdescriptionRepoMock.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productdescription/ProductdescriptionRepoMock.scala @@ -104,4 +104,13 @@ class ProductdescriptionRepoMock(toRow: Function1[ProductdescriptionRowUnsaved, UpdateResult(1, Chunk.single(unsaved)) } } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, ProductdescriptionRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + unsaved.scanZIO(0L) { case (acc, row) => + ZIO.succeed { + map += (row.productdescriptionid -> row) + acc + 1 + } + }.runLast.map(_.getOrElse(0L)) + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productdocument/ProductdocumentRepo.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productdocument/ProductdocumentRepo.scala index a8d933b97..4639989de 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productdocument/ProductdocumentRepo.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productdocument/ProductdocumentRepo.scala @@ -32,4 +32,7 @@ trait ProductdocumentRepo { def update: UpdateBuilder[ProductdocumentFields, ProductdocumentRow] def update(row: ProductdocumentRow): ZIO[ZConnection, Throwable, Boolean] def upsert(unsaved: ProductdocumentRow): ZIO[ZConnection, Throwable, UpdateResult[ProductdocumentRow]] + // Not implementable for zio-jdbc: upsertBatch + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, ProductdocumentRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productdocument/ProductdocumentRepoImpl.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productdocument/ProductdocumentRepoImpl.scala index d74c08021..afd5b277f 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productdocument/ProductdocumentRepoImpl.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productdocument/ProductdocumentRepoImpl.scala @@ -124,4 +124,17 @@ class ProductdocumentRepoImpl extends ProductdocumentRepo { "modifieddate" = EXCLUDED."modifieddate" returning "productid", "modifieddate"::text, "documentnode"""".insertReturning(using ProductdocumentRow.jdbcDecoder) } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, ProductdocumentRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + val created = sql"create temporary table productdocument_TEMP (like production.productdocument) on commit drop".execute + val copied = streamingInsert(s"""copy productdocument_TEMP("productid", "modifieddate", "documentnode") from stdin""", batchSize, unsaved)(ProductdocumentRow.text) + val merged = sql"""insert into production.productdocument("productid", "modifieddate", "documentnode") + select * from productdocument_TEMP + on conflict ("productid", "documentnode") + do update set + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table productdocument_TEMP;""".update + created *> copied *> merged + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productdocument/ProductdocumentRepoMock.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productdocument/ProductdocumentRepoMock.scala index 167ca48ba..a3ec80fa4 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productdocument/ProductdocumentRepoMock.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productdocument/ProductdocumentRepoMock.scala @@ -104,4 +104,13 @@ class ProductdocumentRepoMock(toRow: Function1[ProductdocumentRowUnsaved, Produc UpdateResult(1, Chunk.single(unsaved)) } } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, ProductdocumentRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + unsaved.scanZIO(0L) { case (acc, row) => + ZIO.succeed { + map += (row.compositeId -> row) + acc + 1 + } + }.runLast.map(_.getOrElse(0L)) + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productinventory/ProductinventoryRepo.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productinventory/ProductinventoryRepo.scala index 52e9c1d12..6dc98292d 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productinventory/ProductinventoryRepo.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productinventory/ProductinventoryRepo.scala @@ -32,4 +32,7 @@ trait ProductinventoryRepo { def update: UpdateBuilder[ProductinventoryFields, ProductinventoryRow] def update(row: ProductinventoryRow): ZIO[ZConnection, Throwable, Boolean] def upsert(unsaved: ProductinventoryRow): ZIO[ZConnection, Throwable, UpdateResult[ProductinventoryRow]] + // Not implementable for zio-jdbc: upsertBatch + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, ProductinventoryRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productinventory/ProductinventoryRepoImpl.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productinventory/ProductinventoryRepoImpl.scala index 682c8d6a1..a1495d1d2 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productinventory/ProductinventoryRepoImpl.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productinventory/ProductinventoryRepoImpl.scala @@ -146,4 +146,21 @@ class ProductinventoryRepoImpl extends ProductinventoryRepo { "modifieddate" = EXCLUDED."modifieddate" returning "productid", "locationid", "shelf", "bin", "quantity", "rowguid", "modifieddate"::text""".insertReturning(using ProductinventoryRow.jdbcDecoder) } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, ProductinventoryRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + val created = sql"create temporary table productinventory_TEMP (like production.productinventory) on commit drop".execute + val copied = streamingInsert(s"""copy productinventory_TEMP("productid", "locationid", "shelf", "bin", "quantity", "rowguid", "modifieddate") from stdin""", batchSize, unsaved)(ProductinventoryRow.text) + val merged = sql"""insert into production.productinventory("productid", "locationid", "shelf", "bin", "quantity", "rowguid", "modifieddate") + select * from productinventory_TEMP + on conflict ("productid", "locationid") + do update set + "shelf" = EXCLUDED."shelf", + "bin" = EXCLUDED."bin", + "quantity" = EXCLUDED."quantity", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table productinventory_TEMP;""".update + created *> copied *> merged + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productinventory/ProductinventoryRepoMock.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productinventory/ProductinventoryRepoMock.scala index 0bb6ad34b..837bcfacc 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productinventory/ProductinventoryRepoMock.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productinventory/ProductinventoryRepoMock.scala @@ -104,4 +104,13 @@ class ProductinventoryRepoMock(toRow: Function1[ProductinventoryRowUnsaved, Prod UpdateResult(1, Chunk.single(unsaved)) } } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, ProductinventoryRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + unsaved.scanZIO(0L) { case (acc, row) => + ZIO.succeed { + map += (row.compositeId -> row) + acc + 1 + } + }.runLast.map(_.getOrElse(0L)) + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productlistpricehistory/ProductlistpricehistoryRepo.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productlistpricehistory/ProductlistpricehistoryRepo.scala index 401a222fb..e2a65f035 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productlistpricehistory/ProductlistpricehistoryRepo.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productlistpricehistory/ProductlistpricehistoryRepo.scala @@ -32,4 +32,7 @@ trait ProductlistpricehistoryRepo { def update: UpdateBuilder[ProductlistpricehistoryFields, ProductlistpricehistoryRow] def update(row: ProductlistpricehistoryRow): ZIO[ZConnection, Throwable, Boolean] def upsert(unsaved: ProductlistpricehistoryRow): ZIO[ZConnection, Throwable, UpdateResult[ProductlistpricehistoryRow]] + // Not implementable for zio-jdbc: upsertBatch + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, ProductlistpricehistoryRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productlistpricehistory/ProductlistpricehistoryRepoImpl.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productlistpricehistory/ProductlistpricehistoryRepoImpl.scala index 789102f7a..a5ff94d93 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productlistpricehistory/ProductlistpricehistoryRepoImpl.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productlistpricehistory/ProductlistpricehistoryRepoImpl.scala @@ -129,4 +129,19 @@ class ProductlistpricehistoryRepoImpl extends ProductlistpricehistoryRepo { "modifieddate" = EXCLUDED."modifieddate" returning "productid", "startdate"::text, "enddate"::text, "listprice", "modifieddate"::text""".insertReturning(using ProductlistpricehistoryRow.jdbcDecoder) } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, ProductlistpricehistoryRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + val created = sql"create temporary table productlistpricehistory_TEMP (like production.productlistpricehistory) on commit drop".execute + val copied = streamingInsert(s"""copy productlistpricehistory_TEMP("productid", "startdate", "enddate", "listprice", "modifieddate") from stdin""", batchSize, unsaved)(ProductlistpricehistoryRow.text) + val merged = sql"""insert into production.productlistpricehistory("productid", "startdate", "enddate", "listprice", "modifieddate") + select * from productlistpricehistory_TEMP + on conflict ("productid", "startdate") + do update set + "enddate" = EXCLUDED."enddate", + "listprice" = EXCLUDED."listprice", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table productlistpricehistory_TEMP;""".update + created *> copied *> merged + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productlistpricehistory/ProductlistpricehistoryRepoMock.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productlistpricehistory/ProductlistpricehistoryRepoMock.scala index 23082bf8b..1af92512b 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productlistpricehistory/ProductlistpricehistoryRepoMock.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productlistpricehistory/ProductlistpricehistoryRepoMock.scala @@ -104,4 +104,13 @@ class ProductlistpricehistoryRepoMock(toRow: Function1[ProductlistpricehistoryRo UpdateResult(1, Chunk.single(unsaved)) } } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, ProductlistpricehistoryRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + unsaved.scanZIO(0L) { case (acc, row) => + ZIO.succeed { + map += (row.compositeId -> row) + acc + 1 + } + }.runLast.map(_.getOrElse(0L)) + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productmodel/ProductmodelRepo.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productmodel/ProductmodelRepo.scala index f83f9f199..7f7535151 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productmodel/ProductmodelRepo.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productmodel/ProductmodelRepo.scala @@ -32,4 +32,7 @@ trait ProductmodelRepo { def update: UpdateBuilder[ProductmodelFields, ProductmodelRow] def update(row: ProductmodelRow): ZIO[ZConnection, Throwable, Boolean] def upsert(unsaved: ProductmodelRow): ZIO[ZConnection, Throwable, UpdateResult[ProductmodelRow]] + // Not implementable for zio-jdbc: upsertBatch + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, ProductmodelRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productmodel/ProductmodelRepoImpl.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productmodel/ProductmodelRepoImpl.scala index bfb8c9bb4..fe308ea5a 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productmodel/ProductmodelRepoImpl.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productmodel/ProductmodelRepoImpl.scala @@ -129,4 +129,21 @@ class ProductmodelRepoImpl extends ProductmodelRepo { "modifieddate" = EXCLUDED."modifieddate" returning "productmodelid", "name", "catalogdescription", "instructions", "rowguid", "modifieddate"::text""".insertReturning(using ProductmodelRow.jdbcDecoder) } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, ProductmodelRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + val created = sql"create temporary table productmodel_TEMP (like production.productmodel) on commit drop".execute + val copied = streamingInsert(s"""copy productmodel_TEMP("productmodelid", "name", "catalogdescription", "instructions", "rowguid", "modifieddate") from stdin""", batchSize, unsaved)(ProductmodelRow.text) + val merged = sql"""insert into production.productmodel("productmodelid", "name", "catalogdescription", "instructions", "rowguid", "modifieddate") + select * from productmodel_TEMP + on conflict ("productmodelid") + do update set + "name" = EXCLUDED."name", + "catalogdescription" = EXCLUDED."catalogdescription", + "instructions" = EXCLUDED."instructions", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table productmodel_TEMP;""".update + created *> copied *> merged + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productmodel/ProductmodelRepoMock.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productmodel/ProductmodelRepoMock.scala index fe929b521..4e44dac67 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productmodel/ProductmodelRepoMock.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productmodel/ProductmodelRepoMock.scala @@ -104,4 +104,13 @@ class ProductmodelRepoMock(toRow: Function1[ProductmodelRowUnsaved, Productmodel UpdateResult(1, Chunk.single(unsaved)) } } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, ProductmodelRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + unsaved.scanZIO(0L) { case (acc, row) => + ZIO.succeed { + map += (row.productmodelid -> row) + acc + 1 + } + }.runLast.map(_.getOrElse(0L)) + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productmodelillustration/ProductmodelillustrationRepo.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productmodelillustration/ProductmodelillustrationRepo.scala index d02bfe0c0..957c7eb6b 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productmodelillustration/ProductmodelillustrationRepo.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productmodelillustration/ProductmodelillustrationRepo.scala @@ -32,4 +32,7 @@ trait ProductmodelillustrationRepo { def update: UpdateBuilder[ProductmodelillustrationFields, ProductmodelillustrationRow] def update(row: ProductmodelillustrationRow): ZIO[ZConnection, Throwable, Boolean] def upsert(unsaved: ProductmodelillustrationRow): ZIO[ZConnection, Throwable, UpdateResult[ProductmodelillustrationRow]] + // Not implementable for zio-jdbc: upsertBatch + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, ProductmodelillustrationRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productmodelillustration/ProductmodelillustrationRepoImpl.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productmodelillustration/ProductmodelillustrationRepoImpl.scala index 8ef3057fd..f5d4efd0a 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productmodelillustration/ProductmodelillustrationRepoImpl.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productmodelillustration/ProductmodelillustrationRepoImpl.scala @@ -121,4 +121,17 @@ class ProductmodelillustrationRepoImpl extends ProductmodelillustrationRepo { "modifieddate" = EXCLUDED."modifieddate" returning "productmodelid", "illustrationid", "modifieddate"::text""".insertReturning(using ProductmodelillustrationRow.jdbcDecoder) } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, ProductmodelillustrationRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + val created = sql"create temporary table productmodelillustration_TEMP (like production.productmodelillustration) on commit drop".execute + val copied = streamingInsert(s"""copy productmodelillustration_TEMP("productmodelid", "illustrationid", "modifieddate") from stdin""", batchSize, unsaved)(ProductmodelillustrationRow.text) + val merged = sql"""insert into production.productmodelillustration("productmodelid", "illustrationid", "modifieddate") + select * from productmodelillustration_TEMP + on conflict ("productmodelid", "illustrationid") + do update set + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table productmodelillustration_TEMP;""".update + created *> copied *> merged + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productmodelillustration/ProductmodelillustrationRepoMock.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productmodelillustration/ProductmodelillustrationRepoMock.scala index fa0976c0a..d092852f2 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productmodelillustration/ProductmodelillustrationRepoMock.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productmodelillustration/ProductmodelillustrationRepoMock.scala @@ -104,4 +104,13 @@ class ProductmodelillustrationRepoMock(toRow: Function1[Productmodelillustration UpdateResult(1, Chunk.single(unsaved)) } } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, ProductmodelillustrationRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + unsaved.scanZIO(0L) { case (acc, row) => + ZIO.succeed { + map += (row.compositeId -> row) + acc + 1 + } + }.runLast.map(_.getOrElse(0L)) + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productmodelproductdescriptionculture/ProductmodelproductdescriptioncultureRepo.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productmodelproductdescriptionculture/ProductmodelproductdescriptioncultureRepo.scala index 9e7af612d..0faf5dfcb 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productmodelproductdescriptionculture/ProductmodelproductdescriptioncultureRepo.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productmodelproductdescriptionculture/ProductmodelproductdescriptioncultureRepo.scala @@ -32,4 +32,7 @@ trait ProductmodelproductdescriptioncultureRepo { def update: UpdateBuilder[ProductmodelproductdescriptioncultureFields, ProductmodelproductdescriptioncultureRow] def update(row: ProductmodelproductdescriptioncultureRow): ZIO[ZConnection, Throwable, Boolean] def upsert(unsaved: ProductmodelproductdescriptioncultureRow): ZIO[ZConnection, Throwable, UpdateResult[ProductmodelproductdescriptioncultureRow]] + // Not implementable for zio-jdbc: upsertBatch + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, ProductmodelproductdescriptioncultureRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productmodelproductdescriptionculture/ProductmodelproductdescriptioncultureRepoImpl.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productmodelproductdescriptionculture/ProductmodelproductdescriptioncultureRepoImpl.scala index 46d1c7b41..90d8ecdb1 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productmodelproductdescriptionculture/ProductmodelproductdescriptioncultureRepoImpl.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productmodelproductdescriptionculture/ProductmodelproductdescriptioncultureRepoImpl.scala @@ -126,4 +126,17 @@ class ProductmodelproductdescriptioncultureRepoImpl extends Productmodelproductd "modifieddate" = EXCLUDED."modifieddate" returning "productmodelid", "productdescriptionid", "cultureid", "modifieddate"::text""".insertReturning(using ProductmodelproductdescriptioncultureRow.jdbcDecoder) } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, ProductmodelproductdescriptioncultureRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + val created = sql"create temporary table productmodelproductdescriptionculture_TEMP (like production.productmodelproductdescriptionculture) on commit drop".execute + val copied = streamingInsert(s"""copy productmodelproductdescriptionculture_TEMP("productmodelid", "productdescriptionid", "cultureid", "modifieddate") from stdin""", batchSize, unsaved)(ProductmodelproductdescriptioncultureRow.text) + val merged = sql"""insert into production.productmodelproductdescriptionculture("productmodelid", "productdescriptionid", "cultureid", "modifieddate") + select * from productmodelproductdescriptionculture_TEMP + on conflict ("productmodelid", "productdescriptionid", "cultureid") + do update set + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table productmodelproductdescriptionculture_TEMP;""".update + created *> copied *> merged + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productmodelproductdescriptionculture/ProductmodelproductdescriptioncultureRepoMock.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productmodelproductdescriptionculture/ProductmodelproductdescriptioncultureRepoMock.scala index 19545e0df..326d44cf1 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productmodelproductdescriptionculture/ProductmodelproductdescriptioncultureRepoMock.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productmodelproductdescriptionculture/ProductmodelproductdescriptioncultureRepoMock.scala @@ -104,4 +104,13 @@ class ProductmodelproductdescriptioncultureRepoMock(toRow: Function1[Productmode UpdateResult(1, Chunk.single(unsaved)) } } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, ProductmodelproductdescriptioncultureRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + unsaved.scanZIO(0L) { case (acc, row) => + ZIO.succeed { + map += (row.compositeId -> row) + acc + 1 + } + }.runLast.map(_.getOrElse(0L)) + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productphoto/ProductphotoRepo.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productphoto/ProductphotoRepo.scala index 493c164ac..99411251f 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productphoto/ProductphotoRepo.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productphoto/ProductphotoRepo.scala @@ -32,4 +32,7 @@ trait ProductphotoRepo { def update: UpdateBuilder[ProductphotoFields, ProductphotoRow] def update(row: ProductphotoRow): ZIO[ZConnection, Throwable, Boolean] def upsert(unsaved: ProductphotoRow): ZIO[ZConnection, Throwable, UpdateResult[ProductphotoRow]] + // Not implementable for zio-jdbc: upsertBatch + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, ProductphotoRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productphoto/ProductphotoRepoImpl.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productphoto/ProductphotoRepoImpl.scala index aaababbdb..1e275225c 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productphoto/ProductphotoRepoImpl.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productphoto/ProductphotoRepoImpl.scala @@ -124,4 +124,21 @@ class ProductphotoRepoImpl extends ProductphotoRepo { "modifieddate" = EXCLUDED."modifieddate" returning "productphotoid", "thumbnailphoto", "thumbnailphotofilename", "largephoto", "largephotofilename", "modifieddate"::text""".insertReturning(using ProductphotoRow.jdbcDecoder) } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, ProductphotoRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + val created = sql"create temporary table productphoto_TEMP (like production.productphoto) on commit drop".execute + val copied = streamingInsert(s"""copy productphoto_TEMP("productphotoid", "thumbnailphoto", "thumbnailphotofilename", "largephoto", "largephotofilename", "modifieddate") from stdin""", batchSize, unsaved)(ProductphotoRow.text) + val merged = sql"""insert into production.productphoto("productphotoid", "thumbnailphoto", "thumbnailphotofilename", "largephoto", "largephotofilename", "modifieddate") + select * from productphoto_TEMP + on conflict ("productphotoid") + do update set + "thumbnailphoto" = EXCLUDED."thumbnailphoto", + "thumbnailphotofilename" = EXCLUDED."thumbnailphotofilename", + "largephoto" = EXCLUDED."largephoto", + "largephotofilename" = EXCLUDED."largephotofilename", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table productphoto_TEMP;""".update + created *> copied *> merged + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productphoto/ProductphotoRepoMock.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productphoto/ProductphotoRepoMock.scala index cbcbffd66..d7bd18730 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productphoto/ProductphotoRepoMock.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productphoto/ProductphotoRepoMock.scala @@ -104,4 +104,13 @@ class ProductphotoRepoMock(toRow: Function1[ProductphotoRowUnsaved, Productphoto UpdateResult(1, Chunk.single(unsaved)) } } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, ProductphotoRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + unsaved.scanZIO(0L) { case (acc, row) => + ZIO.succeed { + map += (row.productphotoid -> row) + acc + 1 + } + }.runLast.map(_.getOrElse(0L)) + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productproductphoto/ProductproductphotoRepo.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productproductphoto/ProductproductphotoRepo.scala index eb8617e15..80f5e8a48 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productproductphoto/ProductproductphotoRepo.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productproductphoto/ProductproductphotoRepo.scala @@ -32,4 +32,7 @@ trait ProductproductphotoRepo { def update: UpdateBuilder[ProductproductphotoFields, ProductproductphotoRow] def update(row: ProductproductphotoRow): ZIO[ZConnection, Throwable, Boolean] def upsert(unsaved: ProductproductphotoRow): ZIO[ZConnection, Throwable, UpdateResult[ProductproductphotoRow]] + // Not implementable for zio-jdbc: upsertBatch + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, ProductproductphotoRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productproductphoto/ProductproductphotoRepoImpl.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productproductphoto/ProductproductphotoRepoImpl.scala index 9094ad004..42a3dc62d 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productproductphoto/ProductproductphotoRepoImpl.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productproductphoto/ProductproductphotoRepoImpl.scala @@ -129,4 +129,18 @@ class ProductproductphotoRepoImpl extends ProductproductphotoRepo { "modifieddate" = EXCLUDED."modifieddate" returning "productid", "productphotoid", "primary", "modifieddate"::text""".insertReturning(using ProductproductphotoRow.jdbcDecoder) } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, ProductproductphotoRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + val created = sql"create temporary table productproductphoto_TEMP (like production.productproductphoto) on commit drop".execute + val copied = streamingInsert(s"""copy productproductphoto_TEMP("productid", "productphotoid", "primary", "modifieddate") from stdin""", batchSize, unsaved)(ProductproductphotoRow.text) + val merged = sql"""insert into production.productproductphoto("productid", "productphotoid", "primary", "modifieddate") + select * from productproductphoto_TEMP + on conflict ("productid", "productphotoid") + do update set + "primary" = EXCLUDED."primary", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table productproductphoto_TEMP;""".update + created *> copied *> merged + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productproductphoto/ProductproductphotoRepoMock.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productproductphoto/ProductproductphotoRepoMock.scala index 984227a4c..9afe2290d 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productproductphoto/ProductproductphotoRepoMock.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productproductphoto/ProductproductphotoRepoMock.scala @@ -104,4 +104,13 @@ class ProductproductphotoRepoMock(toRow: Function1[ProductproductphotoRowUnsaved UpdateResult(1, Chunk.single(unsaved)) } } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, ProductproductphotoRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + unsaved.scanZIO(0L) { case (acc, row) => + ZIO.succeed { + map += (row.compositeId -> row) + acc + 1 + } + }.runLast.map(_.getOrElse(0L)) + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productreview/ProductreviewRepo.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productreview/ProductreviewRepo.scala index d6f45cb20..5d9570ad5 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productreview/ProductreviewRepo.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productreview/ProductreviewRepo.scala @@ -32,4 +32,7 @@ trait ProductreviewRepo { def update: UpdateBuilder[ProductreviewFields, ProductreviewRow] def update(row: ProductreviewRow): ZIO[ZConnection, Throwable, Boolean] def upsert(unsaved: ProductreviewRow): ZIO[ZConnection, Throwable, UpdateResult[ProductreviewRow]] + // Not implementable for zio-jdbc: upsertBatch + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, ProductreviewRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productreview/ProductreviewRepoImpl.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productreview/ProductreviewRepoImpl.scala index d2ae45bed..b49714ffa 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productreview/ProductreviewRepoImpl.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productreview/ProductreviewRepoImpl.scala @@ -136,4 +136,23 @@ class ProductreviewRepoImpl extends ProductreviewRepo { "modifieddate" = EXCLUDED."modifieddate" returning "productreviewid", "productid", "reviewername", "reviewdate"::text, "emailaddress", "rating", "comments", "modifieddate"::text""".insertReturning(using ProductreviewRow.jdbcDecoder) } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, ProductreviewRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + val created = sql"create temporary table productreview_TEMP (like production.productreview) on commit drop".execute + val copied = streamingInsert(s"""copy productreview_TEMP("productreviewid", "productid", "reviewername", "reviewdate", "emailaddress", "rating", "comments", "modifieddate") from stdin""", batchSize, unsaved)(ProductreviewRow.text) + val merged = sql"""insert into production.productreview("productreviewid", "productid", "reviewername", "reviewdate", "emailaddress", "rating", "comments", "modifieddate") + select * from productreview_TEMP + on conflict ("productreviewid") + do update set + "productid" = EXCLUDED."productid", + "reviewername" = EXCLUDED."reviewername", + "reviewdate" = EXCLUDED."reviewdate", + "emailaddress" = EXCLUDED."emailaddress", + "rating" = EXCLUDED."rating", + "comments" = EXCLUDED."comments", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table productreview_TEMP;""".update + created *> copied *> merged + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productreview/ProductreviewRepoMock.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productreview/ProductreviewRepoMock.scala index 791c10e64..d39abe0e9 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productreview/ProductreviewRepoMock.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productreview/ProductreviewRepoMock.scala @@ -104,4 +104,13 @@ class ProductreviewRepoMock(toRow: Function1[ProductreviewRowUnsaved, Productrev UpdateResult(1, Chunk.single(unsaved)) } } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, ProductreviewRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + unsaved.scanZIO(0L) { case (acc, row) => + ZIO.succeed { + map += (row.productreviewid -> row) + acc + 1 + } + }.runLast.map(_.getOrElse(0L)) + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productsubcategory/ProductsubcategoryRepo.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productsubcategory/ProductsubcategoryRepo.scala index b0d3f3952..2c8f26b32 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productsubcategory/ProductsubcategoryRepo.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productsubcategory/ProductsubcategoryRepo.scala @@ -32,4 +32,7 @@ trait ProductsubcategoryRepo { def update: UpdateBuilder[ProductsubcategoryFields, ProductsubcategoryRow] def update(row: ProductsubcategoryRow): ZIO[ZConnection, Throwable, Boolean] def upsert(unsaved: ProductsubcategoryRow): ZIO[ZConnection, Throwable, UpdateResult[ProductsubcategoryRow]] + // Not implementable for zio-jdbc: upsertBatch + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, ProductsubcategoryRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productsubcategory/ProductsubcategoryRepoImpl.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productsubcategory/ProductsubcategoryRepoImpl.scala index 0cf7c5975..20da56b53 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productsubcategory/ProductsubcategoryRepoImpl.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productsubcategory/ProductsubcategoryRepoImpl.scala @@ -124,4 +124,20 @@ class ProductsubcategoryRepoImpl extends ProductsubcategoryRepo { "modifieddate" = EXCLUDED."modifieddate" returning "productsubcategoryid", "productcategoryid", "name", "rowguid", "modifieddate"::text""".insertReturning(using ProductsubcategoryRow.jdbcDecoder) } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, ProductsubcategoryRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + val created = sql"create temporary table productsubcategory_TEMP (like production.productsubcategory) on commit drop".execute + val copied = streamingInsert(s"""copy productsubcategory_TEMP("productsubcategoryid", "productcategoryid", "name", "rowguid", "modifieddate") from stdin""", batchSize, unsaved)(ProductsubcategoryRow.text) + val merged = sql"""insert into production.productsubcategory("productsubcategoryid", "productcategoryid", "name", "rowguid", "modifieddate") + select * from productsubcategory_TEMP + on conflict ("productsubcategoryid") + do update set + "productcategoryid" = EXCLUDED."productcategoryid", + "name" = EXCLUDED."name", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table productsubcategory_TEMP;""".update + created *> copied *> merged + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productsubcategory/ProductsubcategoryRepoMock.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productsubcategory/ProductsubcategoryRepoMock.scala index e34b3f74b..0fcf2060c 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productsubcategory/ProductsubcategoryRepoMock.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productsubcategory/ProductsubcategoryRepoMock.scala @@ -104,4 +104,13 @@ class ProductsubcategoryRepoMock(toRow: Function1[ProductsubcategoryRowUnsaved, UpdateResult(1, Chunk.single(unsaved)) } } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, ProductsubcategoryRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + unsaved.scanZIO(0L) { case (acc, row) => + ZIO.succeed { + map += (row.productsubcategoryid -> row) + acc + 1 + } + }.runLast.map(_.getOrElse(0L)) + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/scrapreason/ScrapreasonRepo.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/scrapreason/ScrapreasonRepo.scala index 591e1f6f0..f9762d2b0 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/scrapreason/ScrapreasonRepo.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/scrapreason/ScrapreasonRepo.scala @@ -32,4 +32,7 @@ trait ScrapreasonRepo { def update: UpdateBuilder[ScrapreasonFields, ScrapreasonRow] def update(row: ScrapreasonRow): ZIO[ZConnection, Throwable, Boolean] def upsert(unsaved: ScrapreasonRow): ZIO[ZConnection, Throwable, UpdateResult[ScrapreasonRow]] + // Not implementable for zio-jdbc: upsertBatch + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, ScrapreasonRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/scrapreason/ScrapreasonRepoImpl.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/scrapreason/ScrapreasonRepoImpl.scala index 13c70d4bb..fd8f70a67 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/scrapreason/ScrapreasonRepoImpl.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/scrapreason/ScrapreasonRepoImpl.scala @@ -111,4 +111,18 @@ class ScrapreasonRepoImpl extends ScrapreasonRepo { "modifieddate" = EXCLUDED."modifieddate" returning "scrapreasonid", "name", "modifieddate"::text""".insertReturning(using ScrapreasonRow.jdbcDecoder) } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, ScrapreasonRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + val created = sql"create temporary table scrapreason_TEMP (like production.scrapreason) on commit drop".execute + val copied = streamingInsert(s"""copy scrapreason_TEMP("scrapreasonid", "name", "modifieddate") from stdin""", batchSize, unsaved)(ScrapreasonRow.text) + val merged = sql"""insert into production.scrapreason("scrapreasonid", "name", "modifieddate") + select * from scrapreason_TEMP + on conflict ("scrapreasonid") + do update set + "name" = EXCLUDED."name", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table scrapreason_TEMP;""".update + created *> copied *> merged + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/scrapreason/ScrapreasonRepoMock.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/scrapreason/ScrapreasonRepoMock.scala index bd35f68e1..b6aaf6a0e 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/scrapreason/ScrapreasonRepoMock.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/scrapreason/ScrapreasonRepoMock.scala @@ -104,4 +104,13 @@ class ScrapreasonRepoMock(toRow: Function1[ScrapreasonRowUnsaved, ScrapreasonRow UpdateResult(1, Chunk.single(unsaved)) } } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, ScrapreasonRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + unsaved.scanZIO(0L) { case (acc, row) => + ZIO.succeed { + map += (row.scrapreasonid -> row) + acc + 1 + } + }.runLast.map(_.getOrElse(0L)) + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/transactionhistory/TransactionhistoryRepo.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/transactionhistory/TransactionhistoryRepo.scala index 5fd64695f..d2dddd5c9 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/transactionhistory/TransactionhistoryRepo.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/transactionhistory/TransactionhistoryRepo.scala @@ -32,4 +32,7 @@ trait TransactionhistoryRepo { def update: UpdateBuilder[TransactionhistoryFields, TransactionhistoryRow] def update(row: TransactionhistoryRow): ZIO[ZConnection, Throwable, Boolean] def upsert(unsaved: TransactionhistoryRow): ZIO[ZConnection, Throwable, UpdateResult[TransactionhistoryRow]] + // Not implementable for zio-jdbc: upsertBatch + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, TransactionhistoryRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/transactionhistory/TransactionhistoryRepoImpl.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/transactionhistory/TransactionhistoryRepoImpl.scala index fc3444f3d..cdb82a497 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/transactionhistory/TransactionhistoryRepoImpl.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/transactionhistory/TransactionhistoryRepoImpl.scala @@ -142,4 +142,24 @@ class TransactionhistoryRepoImpl extends TransactionhistoryRepo { "modifieddate" = EXCLUDED."modifieddate" returning "transactionid", "productid", "referenceorderid", "referenceorderlineid", "transactiondate"::text, "transactiontype", "quantity", "actualcost", "modifieddate"::text""".insertReturning(using TransactionhistoryRow.jdbcDecoder) } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, TransactionhistoryRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + val created = sql"create temporary table transactionhistory_TEMP (like production.transactionhistory) on commit drop".execute + val copied = streamingInsert(s"""copy transactionhistory_TEMP("transactionid", "productid", "referenceorderid", "referenceorderlineid", "transactiondate", "transactiontype", "quantity", "actualcost", "modifieddate") from stdin""", batchSize, unsaved)(TransactionhistoryRow.text) + val merged = sql"""insert into production.transactionhistory("transactionid", "productid", "referenceorderid", "referenceorderlineid", "transactiondate", "transactiontype", "quantity", "actualcost", "modifieddate") + select * from transactionhistory_TEMP + on conflict ("transactionid") + do update set + "productid" = EXCLUDED."productid", + "referenceorderid" = EXCLUDED."referenceorderid", + "referenceorderlineid" = EXCLUDED."referenceorderlineid", + "transactiondate" = EXCLUDED."transactiondate", + "transactiontype" = EXCLUDED."transactiontype", + "quantity" = EXCLUDED."quantity", + "actualcost" = EXCLUDED."actualcost", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table transactionhistory_TEMP;""".update + created *> copied *> merged + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/transactionhistory/TransactionhistoryRepoMock.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/transactionhistory/TransactionhistoryRepoMock.scala index d2093da59..a1c76efc2 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/transactionhistory/TransactionhistoryRepoMock.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/transactionhistory/TransactionhistoryRepoMock.scala @@ -104,4 +104,13 @@ class TransactionhistoryRepoMock(toRow: Function1[TransactionhistoryRowUnsaved, UpdateResult(1, Chunk.single(unsaved)) } } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, TransactionhistoryRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + unsaved.scanZIO(0L) { case (acc, row) => + ZIO.succeed { + map += (row.transactionid -> row) + acc + 1 + } + }.runLast.map(_.getOrElse(0L)) + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/transactionhistoryarchive/TransactionhistoryarchiveRepo.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/transactionhistoryarchive/TransactionhistoryarchiveRepo.scala index f9a6d2ebc..d87034500 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/transactionhistoryarchive/TransactionhistoryarchiveRepo.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/transactionhistoryarchive/TransactionhistoryarchiveRepo.scala @@ -32,4 +32,7 @@ trait TransactionhistoryarchiveRepo { def update: UpdateBuilder[TransactionhistoryarchiveFields, TransactionhistoryarchiveRow] def update(row: TransactionhistoryarchiveRow): ZIO[ZConnection, Throwable, Boolean] def upsert(unsaved: TransactionhistoryarchiveRow): ZIO[ZConnection, Throwable, UpdateResult[TransactionhistoryarchiveRow]] + // Not implementable for zio-jdbc: upsertBatch + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, TransactionhistoryarchiveRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/transactionhistoryarchive/TransactionhistoryarchiveRepoImpl.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/transactionhistoryarchive/TransactionhistoryarchiveRepoImpl.scala index cef5180d3..b84c63161 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/transactionhistoryarchive/TransactionhistoryarchiveRepoImpl.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/transactionhistoryarchive/TransactionhistoryarchiveRepoImpl.scala @@ -138,4 +138,24 @@ class TransactionhistoryarchiveRepoImpl extends TransactionhistoryarchiveRepo { "modifieddate" = EXCLUDED."modifieddate" returning "transactionid", "productid", "referenceorderid", "referenceorderlineid", "transactiondate"::text, "transactiontype", "quantity", "actualcost", "modifieddate"::text""".insertReturning(using TransactionhistoryarchiveRow.jdbcDecoder) } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, TransactionhistoryarchiveRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + val created = sql"create temporary table transactionhistoryarchive_TEMP (like production.transactionhistoryarchive) on commit drop".execute + val copied = streamingInsert(s"""copy transactionhistoryarchive_TEMP("transactionid", "productid", "referenceorderid", "referenceorderlineid", "transactiondate", "transactiontype", "quantity", "actualcost", "modifieddate") from stdin""", batchSize, unsaved)(TransactionhistoryarchiveRow.text) + val merged = sql"""insert into production.transactionhistoryarchive("transactionid", "productid", "referenceorderid", "referenceorderlineid", "transactiondate", "transactiontype", "quantity", "actualcost", "modifieddate") + select * from transactionhistoryarchive_TEMP + on conflict ("transactionid") + do update set + "productid" = EXCLUDED."productid", + "referenceorderid" = EXCLUDED."referenceorderid", + "referenceorderlineid" = EXCLUDED."referenceorderlineid", + "transactiondate" = EXCLUDED."transactiondate", + "transactiontype" = EXCLUDED."transactiontype", + "quantity" = EXCLUDED."quantity", + "actualcost" = EXCLUDED."actualcost", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table transactionhistoryarchive_TEMP;""".update + created *> copied *> merged + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/transactionhistoryarchive/TransactionhistoryarchiveRepoMock.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/transactionhistoryarchive/TransactionhistoryarchiveRepoMock.scala index d7dbcae65..fd34236e6 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/transactionhistoryarchive/TransactionhistoryarchiveRepoMock.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/transactionhistoryarchive/TransactionhistoryarchiveRepoMock.scala @@ -104,4 +104,13 @@ class TransactionhistoryarchiveRepoMock(toRow: Function1[Transactionhistoryarchi UpdateResult(1, Chunk.single(unsaved)) } } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, TransactionhistoryarchiveRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + unsaved.scanZIO(0L) { case (acc, row) => + ZIO.succeed { + map += (row.transactionid -> row) + acc + 1 + } + }.runLast.map(_.getOrElse(0L)) + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/unitmeasure/UnitmeasureRepo.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/unitmeasure/UnitmeasureRepo.scala index 7de3b8745..447430e09 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/unitmeasure/UnitmeasureRepo.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/unitmeasure/UnitmeasureRepo.scala @@ -32,4 +32,7 @@ trait UnitmeasureRepo { def update: UpdateBuilder[UnitmeasureFields, UnitmeasureRow] def update(row: UnitmeasureRow): ZIO[ZConnection, Throwable, Boolean] def upsert(unsaved: UnitmeasureRow): ZIO[ZConnection, Throwable, UpdateResult[UnitmeasureRow]] + // Not implementable for zio-jdbc: upsertBatch + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, UnitmeasureRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/unitmeasure/UnitmeasureRepoImpl.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/unitmeasure/UnitmeasureRepoImpl.scala index 885cb06da..5698a7ff7 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/unitmeasure/UnitmeasureRepoImpl.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/unitmeasure/UnitmeasureRepoImpl.scala @@ -108,4 +108,18 @@ class UnitmeasureRepoImpl extends UnitmeasureRepo { "modifieddate" = EXCLUDED."modifieddate" returning "unitmeasurecode", "name", "modifieddate"::text""".insertReturning(using UnitmeasureRow.jdbcDecoder) } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, UnitmeasureRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + val created = sql"create temporary table unitmeasure_TEMP (like production.unitmeasure) on commit drop".execute + val copied = streamingInsert(s"""copy unitmeasure_TEMP("unitmeasurecode", "name", "modifieddate") from stdin""", batchSize, unsaved)(UnitmeasureRow.text) + val merged = sql"""insert into production.unitmeasure("unitmeasurecode", "name", "modifieddate") + select * from unitmeasure_TEMP + on conflict ("unitmeasurecode") + do update set + "name" = EXCLUDED."name", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table unitmeasure_TEMP;""".update + created *> copied *> merged + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/unitmeasure/UnitmeasureRepoMock.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/unitmeasure/UnitmeasureRepoMock.scala index ff47e0dcc..1cafad1f6 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/unitmeasure/UnitmeasureRepoMock.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/unitmeasure/UnitmeasureRepoMock.scala @@ -104,4 +104,13 @@ class UnitmeasureRepoMock(toRow: Function1[UnitmeasureRowUnsaved, UnitmeasureRow UpdateResult(1, Chunk.single(unsaved)) } } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, UnitmeasureRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + unsaved.scanZIO(0L) { case (acc, row) => + ZIO.succeed { + map += (row.unitmeasurecode -> row) + acc + 1 + } + }.runLast.map(_.getOrElse(0L)) + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/workorder/WorkorderRepo.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/workorder/WorkorderRepo.scala index f58124a29..76e4ac767 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/workorder/WorkorderRepo.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/workorder/WorkorderRepo.scala @@ -32,4 +32,7 @@ trait WorkorderRepo { def update: UpdateBuilder[WorkorderFields, WorkorderRow] def update(row: WorkorderRow): ZIO[ZConnection, Throwable, Boolean] def upsert(unsaved: WorkorderRow): ZIO[ZConnection, Throwable, UpdateResult[WorkorderRow]] + // Not implementable for zio-jdbc: upsertBatch + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, WorkorderRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/workorder/WorkorderRepoImpl.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/workorder/WorkorderRepoImpl.scala index ae17d3504..b3ec67bd1 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/workorder/WorkorderRepoImpl.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/workorder/WorkorderRepoImpl.scala @@ -138,4 +138,24 @@ class WorkorderRepoImpl extends WorkorderRepo { "modifieddate" = EXCLUDED."modifieddate" returning "workorderid", "productid", "orderqty", "scrappedqty", "startdate"::text, "enddate"::text, "duedate"::text, "scrapreasonid", "modifieddate"::text""".insertReturning(using WorkorderRow.jdbcDecoder) } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, WorkorderRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + val created = sql"create temporary table workorder_TEMP (like production.workorder) on commit drop".execute + val copied = streamingInsert(s"""copy workorder_TEMP("workorderid", "productid", "orderqty", "scrappedqty", "startdate", "enddate", "duedate", "scrapreasonid", "modifieddate") from stdin""", batchSize, unsaved)(WorkorderRow.text) + val merged = sql"""insert into production.workorder("workorderid", "productid", "orderqty", "scrappedqty", "startdate", "enddate", "duedate", "scrapreasonid", "modifieddate") + select * from workorder_TEMP + on conflict ("workorderid") + do update set + "productid" = EXCLUDED."productid", + "orderqty" = EXCLUDED."orderqty", + "scrappedqty" = EXCLUDED."scrappedqty", + "startdate" = EXCLUDED."startdate", + "enddate" = EXCLUDED."enddate", + "duedate" = EXCLUDED."duedate", + "scrapreasonid" = EXCLUDED."scrapreasonid", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table workorder_TEMP;""".update + created *> copied *> merged + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/workorder/WorkorderRepoMock.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/workorder/WorkorderRepoMock.scala index 5f6fb7a10..1d0ef61f0 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/workorder/WorkorderRepoMock.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/workorder/WorkorderRepoMock.scala @@ -104,4 +104,13 @@ class WorkorderRepoMock(toRow: Function1[WorkorderRowUnsaved, WorkorderRow], UpdateResult(1, Chunk.single(unsaved)) } } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, WorkorderRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + unsaved.scanZIO(0L) { case (acc, row) => + ZIO.succeed { + map += (row.workorderid -> row) + acc + 1 + } + }.runLast.map(_.getOrElse(0L)) + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/workorderrouting/WorkorderroutingRepo.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/workorderrouting/WorkorderroutingRepo.scala index 69e484545..738cf38d9 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/workorderrouting/WorkorderroutingRepo.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/workorderrouting/WorkorderroutingRepo.scala @@ -32,4 +32,7 @@ trait WorkorderroutingRepo { def update: UpdateBuilder[WorkorderroutingFields, WorkorderroutingRow] def update(row: WorkorderroutingRow): ZIO[ZConnection, Throwable, Boolean] def upsert(unsaved: WorkorderroutingRow): ZIO[ZConnection, Throwable, UpdateResult[WorkorderroutingRow]] + // Not implementable for zio-jdbc: upsertBatch + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, WorkorderroutingRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/workorderrouting/WorkorderroutingRepoImpl.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/workorderrouting/WorkorderroutingRepoImpl.scala index a40f78010..664c45955 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/workorderrouting/WorkorderroutingRepoImpl.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/workorderrouting/WorkorderroutingRepoImpl.scala @@ -159,4 +159,25 @@ class WorkorderroutingRepoImpl extends WorkorderroutingRepo { "modifieddate" = EXCLUDED."modifieddate" returning "workorderid", "productid", "operationsequence", "locationid", "scheduledstartdate"::text, "scheduledenddate"::text, "actualstartdate"::text, "actualenddate"::text, "actualresourcehrs", "plannedcost", "actualcost", "modifieddate"::text""".insertReturning(using WorkorderroutingRow.jdbcDecoder) } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, WorkorderroutingRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + val created = sql"create temporary table workorderrouting_TEMP (like production.workorderrouting) on commit drop".execute + val copied = streamingInsert(s"""copy workorderrouting_TEMP("workorderid", "productid", "operationsequence", "locationid", "scheduledstartdate", "scheduledenddate", "actualstartdate", "actualenddate", "actualresourcehrs", "plannedcost", "actualcost", "modifieddate") from stdin""", batchSize, unsaved)(WorkorderroutingRow.text) + val merged = sql"""insert into production.workorderrouting("workorderid", "productid", "operationsequence", "locationid", "scheduledstartdate", "scheduledenddate", "actualstartdate", "actualenddate", "actualresourcehrs", "plannedcost", "actualcost", "modifieddate") + select * from workorderrouting_TEMP + on conflict ("workorderid", "productid", "operationsequence") + do update set + "locationid" = EXCLUDED."locationid", + "scheduledstartdate" = EXCLUDED."scheduledstartdate", + "scheduledenddate" = EXCLUDED."scheduledenddate", + "actualstartdate" = EXCLUDED."actualstartdate", + "actualenddate" = EXCLUDED."actualenddate", + "actualresourcehrs" = EXCLUDED."actualresourcehrs", + "plannedcost" = EXCLUDED."plannedcost", + "actualcost" = EXCLUDED."actualcost", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table workorderrouting_TEMP;""".update + created *> copied *> merged + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/workorderrouting/WorkorderroutingRepoMock.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/workorderrouting/WorkorderroutingRepoMock.scala index a3d43858c..5c7f3cc12 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/workorderrouting/WorkorderroutingRepoMock.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/workorderrouting/WorkorderroutingRepoMock.scala @@ -104,4 +104,13 @@ class WorkorderroutingRepoMock(toRow: Function1[WorkorderroutingRowUnsaved, Work UpdateResult(1, Chunk.single(unsaved)) } } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, WorkorderroutingRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + unsaved.scanZIO(0L) { case (acc, row) => + ZIO.succeed { + map += (row.compositeId -> row) + acc + 1 + } + }.runLast.map(_.getOrElse(0L)) + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/public/flaff/FlaffRepo.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/public/flaff/FlaffRepo.scala index f1237e876..716341029 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/public/flaff/FlaffRepo.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/public/flaff/FlaffRepo.scala @@ -29,4 +29,7 @@ trait FlaffRepo { def update: UpdateBuilder[FlaffFields, FlaffRow] def update(row: FlaffRow): ZIO[ZConnection, Throwable, Boolean] def upsert(unsaved: FlaffRow): ZIO[ZConnection, Throwable, UpdateResult[FlaffRow]] + // Not implementable for zio-jdbc: upsertBatch + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, FlaffRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/public/flaff/FlaffRepoImpl.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/public/flaff/FlaffRepoImpl.scala index 9d7cf7a6a..9a3e5ad45 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/public/flaff/FlaffRepoImpl.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/public/flaff/FlaffRepoImpl.scala @@ -97,4 +97,17 @@ class FlaffRepoImpl extends FlaffRepo { "parentspecifier" = EXCLUDED."parentspecifier" returning "code", "another_code", "some_number", "specifier", "parentspecifier"""".insertReturning(using FlaffRow.jdbcDecoder) } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, FlaffRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + val created = sql"create temporary table flaff_TEMP (like public.flaff) on commit drop".execute + val copied = streamingInsert(s"""copy flaff_TEMP("code", "another_code", "some_number", "specifier", "parentspecifier") from stdin""", batchSize, unsaved)(FlaffRow.text) + val merged = sql"""insert into public.flaff("code", "another_code", "some_number", "specifier", "parentspecifier") + select * from flaff_TEMP + on conflict ("code", "another_code", "some_number", "specifier") + do update set + "parentspecifier" = EXCLUDED."parentspecifier" + ; + drop table flaff_TEMP;""".update + created *> copied *> merged + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/public/flaff/FlaffRepoMock.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/public/flaff/FlaffRepoMock.scala index c498852dd..4850617e1 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/public/flaff/FlaffRepoMock.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/public/flaff/FlaffRepoMock.scala @@ -90,4 +90,13 @@ class FlaffRepoMock(map: scala.collection.mutable.Map[FlaffId, FlaffRow] = scala UpdateResult(1, Chunk.single(unsaved)) } } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, FlaffRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + unsaved.scanZIO(0L) { case (acc, row) => + ZIO.succeed { + map += (row.compositeId -> row) + acc + 1 + } + }.runLast.map(_.getOrElse(0L)) + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/public/identity_test/IdentityTestRepo.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/public/identity_test/IdentityTestRepo.scala index 94abafbe0..e5850f885 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/public/identity_test/IdentityTestRepo.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/public/identity_test/IdentityTestRepo.scala @@ -32,4 +32,7 @@ trait IdentityTestRepo { def update: UpdateBuilder[IdentityTestFields, IdentityTestRow] def update(row: IdentityTestRow): ZIO[ZConnection, Throwable, Boolean] def upsert(unsaved: IdentityTestRow): ZIO[ZConnection, Throwable, UpdateResult[IdentityTestRow]] + // Not implementable for zio-jdbc: upsertBatch + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, IdentityTestRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/public/identity_test/IdentityTestRepoImpl.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/public/identity_test/IdentityTestRepoImpl.scala index 1d70a2026..c67824ee6 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/public/identity_test/IdentityTestRepoImpl.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/public/identity_test/IdentityTestRepoImpl.scala @@ -106,4 +106,18 @@ class IdentityTestRepoImpl extends IdentityTestRepo { "default_generated" = EXCLUDED."default_generated" returning "always_generated", "default_generated", "name"""".insertReturning(using IdentityTestRow.jdbcDecoder) } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, IdentityTestRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + val created = sql"create temporary table identity-test_TEMP (like public.identity-test) on commit drop".execute + val copied = streamingInsert(s"""copy identity-test_TEMP("always_generated", "default_generated", "name") from stdin""", batchSize, unsaved)(IdentityTestRow.text) + val merged = sql"""insert into public.identity-test("always_generated", "default_generated", "name") + select * from identity-test_TEMP + on conflict ("name") + do update set + "always_generated" = EXCLUDED."always_generated", + "default_generated" = EXCLUDED."default_generated" + ; + drop table identity-test_TEMP;""".update + created *> copied *> merged + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/public/identity_test/IdentityTestRepoMock.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/public/identity_test/IdentityTestRepoMock.scala index 885b7ceaf..892fffec6 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/public/identity_test/IdentityTestRepoMock.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/public/identity_test/IdentityTestRepoMock.scala @@ -104,4 +104,13 @@ class IdentityTestRepoMock(toRow: Function1[IdentityTestRowUnsaved, IdentityTest UpdateResult(1, Chunk.single(unsaved)) } } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, IdentityTestRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + unsaved.scanZIO(0L) { case (acc, row) => + ZIO.succeed { + map += (row.name -> row) + acc + 1 + } + }.runLast.map(_.getOrElse(0L)) + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/public/users/UsersRepo.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/public/users/UsersRepo.scala index ff5fc2d61..79bb9246b 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/public/users/UsersRepo.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/public/users/UsersRepo.scala @@ -34,4 +34,7 @@ trait UsersRepo { def update: UpdateBuilder[UsersFields, UsersRow] def update(row: UsersRow): ZIO[ZConnection, Throwable, Boolean] def upsert(unsaved: UsersRow): ZIO[ZConnection, Throwable, UpdateResult[UsersRow]] + // Not implementable for zio-jdbc: upsertBatch + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, UsersRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/public/users/UsersRepoImpl.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/public/users/UsersRepoImpl.scala index ca99aff9a..e3709fd55 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/public/users/UsersRepoImpl.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/public/users/UsersRepoImpl.scala @@ -131,4 +131,22 @@ class UsersRepoImpl extends UsersRepo { "verified_on" = EXCLUDED."verified_on" returning "user_id", "name", "last_name", "email"::text, "password", "created_at"::text, "verified_on"::text""".insertReturning(using UsersRow.jdbcDecoder) } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, UsersRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + val created = sql"create temporary table users_TEMP (like public.users) on commit drop".execute + val copied = streamingInsert(s"""copy users_TEMP("user_id", "name", "last_name", "email", "password", "created_at", "verified_on") from stdin""", batchSize, unsaved)(UsersRow.text) + val merged = sql"""insert into public.users("user_id", "name", "last_name", "email", "password", "created_at", "verified_on") + select * from users_TEMP + on conflict ("user_id") + do update set + "name" = EXCLUDED."name", + "last_name" = EXCLUDED."last_name", + "email" = EXCLUDED."email", + "password" = EXCLUDED."password", + "created_at" = EXCLUDED."created_at", + "verified_on" = EXCLUDED."verified_on" + ; + drop table users_TEMP;""".update + created *> copied *> merged + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/public/users/UsersRepoMock.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/public/users/UsersRepoMock.scala index 578a0d276..fe9fddaac 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/public/users/UsersRepoMock.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/public/users/UsersRepoMock.scala @@ -108,4 +108,13 @@ class UsersRepoMock(toRow: Function1[UsersRowUnsaved, UsersRow], UpdateResult(1, Chunk.single(unsaved)) } } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, UsersRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + unsaved.scanZIO(0L) { case (acc, row) => + ZIO.succeed { + map += (row.userId -> row) + acc + 1 + } + }.runLast.map(_.getOrElse(0L)) + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/purchasing/productvendor/ProductvendorRepo.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/purchasing/productvendor/ProductvendorRepo.scala index 4b42e7a0f..d3511502e 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/purchasing/productvendor/ProductvendorRepo.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/purchasing/productvendor/ProductvendorRepo.scala @@ -32,4 +32,7 @@ trait ProductvendorRepo { def update: UpdateBuilder[ProductvendorFields, ProductvendorRow] def update(row: ProductvendorRow): ZIO[ZConnection, Throwable, Boolean] def upsert(unsaved: ProductvendorRow): ZIO[ZConnection, Throwable, UpdateResult[ProductvendorRow]] + // Not implementable for zio-jdbc: upsertBatch + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, ProductvendorRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/purchasing/productvendor/ProductvendorRepoImpl.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/purchasing/productvendor/ProductvendorRepoImpl.scala index 196340722..ff46675a0 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/purchasing/productvendor/ProductvendorRepoImpl.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/purchasing/productvendor/ProductvendorRepoImpl.scala @@ -155,4 +155,25 @@ class ProductvendorRepoImpl extends ProductvendorRepo { "modifieddate" = EXCLUDED."modifieddate" returning "productid", "businessentityid", "averageleadtime", "standardprice", "lastreceiptcost", "lastreceiptdate"::text, "minorderqty", "maxorderqty", "onorderqty", "unitmeasurecode", "modifieddate"::text""".insertReturning(using ProductvendorRow.jdbcDecoder) } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, ProductvendorRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + val created = sql"create temporary table productvendor_TEMP (like purchasing.productvendor) on commit drop".execute + val copied = streamingInsert(s"""copy productvendor_TEMP("productid", "businessentityid", "averageleadtime", "standardprice", "lastreceiptcost", "lastreceiptdate", "minorderqty", "maxorderqty", "onorderqty", "unitmeasurecode", "modifieddate") from stdin""", batchSize, unsaved)(ProductvendorRow.text) + val merged = sql"""insert into purchasing.productvendor("productid", "businessentityid", "averageleadtime", "standardprice", "lastreceiptcost", "lastreceiptdate", "minorderqty", "maxorderqty", "onorderqty", "unitmeasurecode", "modifieddate") + select * from productvendor_TEMP + on conflict ("productid", "businessentityid") + do update set + "averageleadtime" = EXCLUDED."averageleadtime", + "standardprice" = EXCLUDED."standardprice", + "lastreceiptcost" = EXCLUDED."lastreceiptcost", + "lastreceiptdate" = EXCLUDED."lastreceiptdate", + "minorderqty" = EXCLUDED."minorderqty", + "maxorderqty" = EXCLUDED."maxorderqty", + "onorderqty" = EXCLUDED."onorderqty", + "unitmeasurecode" = EXCLUDED."unitmeasurecode", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table productvendor_TEMP;""".update + created *> copied *> merged + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/purchasing/productvendor/ProductvendorRepoMock.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/purchasing/productvendor/ProductvendorRepoMock.scala index f5aa4d850..ca967f944 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/purchasing/productvendor/ProductvendorRepoMock.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/purchasing/productvendor/ProductvendorRepoMock.scala @@ -104,4 +104,13 @@ class ProductvendorRepoMock(toRow: Function1[ProductvendorRowUnsaved, Productven UpdateResult(1, Chunk.single(unsaved)) } } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, ProductvendorRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + unsaved.scanZIO(0L) { case (acc, row) => + ZIO.succeed { + map += (row.compositeId -> row) + acc + 1 + } + }.runLast.map(_.getOrElse(0L)) + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/purchasing/purchaseorderheader/PurchaseorderheaderRepo.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/purchasing/purchaseorderheader/PurchaseorderheaderRepo.scala index 3e7e3ea94..8e4f2212b 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/purchasing/purchaseorderheader/PurchaseorderheaderRepo.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/purchasing/purchaseorderheader/PurchaseorderheaderRepo.scala @@ -32,4 +32,7 @@ trait PurchaseorderheaderRepo { def update: UpdateBuilder[PurchaseorderheaderFields, PurchaseorderheaderRow] def update(row: PurchaseorderheaderRow): ZIO[ZConnection, Throwable, Boolean] def upsert(unsaved: PurchaseorderheaderRow): ZIO[ZConnection, Throwable, UpdateResult[PurchaseorderheaderRow]] + // Not implementable for zio-jdbc: upsertBatch + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, PurchaseorderheaderRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/purchasing/purchaseorderheader/PurchaseorderheaderRepoImpl.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/purchasing/purchaseorderheader/PurchaseorderheaderRepoImpl.scala index b2a9f8cf2..60b8ca79f 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/purchasing/purchaseorderheader/PurchaseorderheaderRepoImpl.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/purchasing/purchaseorderheader/PurchaseorderheaderRepoImpl.scala @@ -168,4 +168,27 @@ class PurchaseorderheaderRepoImpl extends PurchaseorderheaderRepo { "modifieddate" = EXCLUDED."modifieddate" returning "purchaseorderid", "revisionnumber", "status", "employeeid", "vendorid", "shipmethodid", "orderdate"::text, "shipdate"::text, "subtotal", "taxamt", "freight", "modifieddate"::text""".insertReturning(using PurchaseorderheaderRow.jdbcDecoder) } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, PurchaseorderheaderRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + val created = sql"create temporary table purchaseorderheader_TEMP (like purchasing.purchaseorderheader) on commit drop".execute + val copied = streamingInsert(s"""copy purchaseorderheader_TEMP("purchaseorderid", "revisionnumber", "status", "employeeid", "vendorid", "shipmethodid", "orderdate", "shipdate", "subtotal", "taxamt", "freight", "modifieddate") from stdin""", batchSize, unsaved)(PurchaseorderheaderRow.text) + val merged = sql"""insert into purchasing.purchaseorderheader("purchaseorderid", "revisionnumber", "status", "employeeid", "vendorid", "shipmethodid", "orderdate", "shipdate", "subtotal", "taxamt", "freight", "modifieddate") + select * from purchaseorderheader_TEMP + on conflict ("purchaseorderid") + do update set + "revisionnumber" = EXCLUDED."revisionnumber", + "status" = EXCLUDED."status", + "employeeid" = EXCLUDED."employeeid", + "vendorid" = EXCLUDED."vendorid", + "shipmethodid" = EXCLUDED."shipmethodid", + "orderdate" = EXCLUDED."orderdate", + "shipdate" = EXCLUDED."shipdate", + "subtotal" = EXCLUDED."subtotal", + "taxamt" = EXCLUDED."taxamt", + "freight" = EXCLUDED."freight", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table purchaseorderheader_TEMP;""".update + created *> copied *> merged + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/purchasing/purchaseorderheader/PurchaseorderheaderRepoMock.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/purchasing/purchaseorderheader/PurchaseorderheaderRepoMock.scala index 6e718679a..bad95dac9 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/purchasing/purchaseorderheader/PurchaseorderheaderRepoMock.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/purchasing/purchaseorderheader/PurchaseorderheaderRepoMock.scala @@ -104,4 +104,13 @@ class PurchaseorderheaderRepoMock(toRow: Function1[PurchaseorderheaderRowUnsaved UpdateResult(1, Chunk.single(unsaved)) } } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, PurchaseorderheaderRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + unsaved.scanZIO(0L) { case (acc, row) => + ZIO.succeed { + map += (row.purchaseorderid -> row) + acc + 1 + } + }.runLast.map(_.getOrElse(0L)) + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/purchasing/shipmethod/ShipmethodRepo.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/purchasing/shipmethod/ShipmethodRepo.scala index 981ee3cb9..411bd4d23 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/purchasing/shipmethod/ShipmethodRepo.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/purchasing/shipmethod/ShipmethodRepo.scala @@ -32,4 +32,7 @@ trait ShipmethodRepo { def update: UpdateBuilder[ShipmethodFields, ShipmethodRow] def update(row: ShipmethodRow): ZIO[ZConnection, Throwable, Boolean] def upsert(unsaved: ShipmethodRow): ZIO[ZConnection, Throwable, UpdateResult[ShipmethodRow]] + // Not implementable for zio-jdbc: upsertBatch + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, ShipmethodRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/purchasing/shipmethod/ShipmethodRepoImpl.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/purchasing/shipmethod/ShipmethodRepoImpl.scala index 1720360cc..d62b09679 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/purchasing/shipmethod/ShipmethodRepoImpl.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/purchasing/shipmethod/ShipmethodRepoImpl.scala @@ -134,4 +134,21 @@ class ShipmethodRepoImpl extends ShipmethodRepo { "modifieddate" = EXCLUDED."modifieddate" returning "shipmethodid", "name", "shipbase", "shiprate", "rowguid", "modifieddate"::text""".insertReturning(using ShipmethodRow.jdbcDecoder) } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, ShipmethodRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + val created = sql"create temporary table shipmethod_TEMP (like purchasing.shipmethod) on commit drop".execute + val copied = streamingInsert(s"""copy shipmethod_TEMP("shipmethodid", "name", "shipbase", "shiprate", "rowguid", "modifieddate") from stdin""", batchSize, unsaved)(ShipmethodRow.text) + val merged = sql"""insert into purchasing.shipmethod("shipmethodid", "name", "shipbase", "shiprate", "rowguid", "modifieddate") + select * from shipmethod_TEMP + on conflict ("shipmethodid") + do update set + "name" = EXCLUDED."name", + "shipbase" = EXCLUDED."shipbase", + "shiprate" = EXCLUDED."shiprate", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table shipmethod_TEMP;""".update + created *> copied *> merged + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/purchasing/shipmethod/ShipmethodRepoMock.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/purchasing/shipmethod/ShipmethodRepoMock.scala index cfcfc0d4b..4e5493283 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/purchasing/shipmethod/ShipmethodRepoMock.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/purchasing/shipmethod/ShipmethodRepoMock.scala @@ -104,4 +104,13 @@ class ShipmethodRepoMock(toRow: Function1[ShipmethodRowUnsaved, ShipmethodRow], UpdateResult(1, Chunk.single(unsaved)) } } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, ShipmethodRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + unsaved.scanZIO(0L) { case (acc, row) => + ZIO.succeed { + map += (row.shipmethodid -> row) + acc + 1 + } + }.runLast.map(_.getOrElse(0L)) + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/purchasing/vendor/VendorRepo.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/purchasing/vendor/VendorRepo.scala index 8c492a5d3..86e851db0 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/purchasing/vendor/VendorRepo.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/purchasing/vendor/VendorRepo.scala @@ -33,4 +33,7 @@ trait VendorRepo { def update: UpdateBuilder[VendorFields, VendorRow] def update(row: VendorRow): ZIO[ZConnection, Throwable, Boolean] def upsert(unsaved: VendorRow): ZIO[ZConnection, Throwable, UpdateResult[VendorRow]] + // Not implementable for zio-jdbc: upsertBatch + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, VendorRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/purchasing/vendor/VendorRepoImpl.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/purchasing/vendor/VendorRepoImpl.scala index 4950511a5..170d7d770 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/purchasing/vendor/VendorRepoImpl.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/purchasing/vendor/VendorRepoImpl.scala @@ -139,4 +139,23 @@ class VendorRepoImpl extends VendorRepo { "modifieddate" = EXCLUDED."modifieddate" returning "businessentityid", "accountnumber", "name", "creditrating", "preferredvendorstatus", "activeflag", "purchasingwebserviceurl", "modifieddate"::text""".insertReturning(using VendorRow.jdbcDecoder) } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, VendorRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + val created = sql"create temporary table vendor_TEMP (like purchasing.vendor) on commit drop".execute + val copied = streamingInsert(s"""copy vendor_TEMP("businessentityid", "accountnumber", "name", "creditrating", "preferredvendorstatus", "activeflag", "purchasingwebserviceurl", "modifieddate") from stdin""", batchSize, unsaved)(VendorRow.text) + val merged = sql"""insert into purchasing.vendor("businessentityid", "accountnumber", "name", "creditrating", "preferredvendorstatus", "activeflag", "purchasingwebserviceurl", "modifieddate") + select * from vendor_TEMP + on conflict ("businessentityid") + do update set + "accountnumber" = EXCLUDED."accountnumber", + "name" = EXCLUDED."name", + "creditrating" = EXCLUDED."creditrating", + "preferredvendorstatus" = EXCLUDED."preferredvendorstatus", + "activeflag" = EXCLUDED."activeflag", + "purchasingwebserviceurl" = EXCLUDED."purchasingwebserviceurl", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table vendor_TEMP;""".update + created *> copied *> merged + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/purchasing/vendor/VendorRepoMock.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/purchasing/vendor/VendorRepoMock.scala index 1d744413b..f34c631b9 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/purchasing/vendor/VendorRepoMock.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/purchasing/vendor/VendorRepoMock.scala @@ -105,4 +105,13 @@ class VendorRepoMock(toRow: Function1[VendorRowUnsaved, VendorRow], UpdateResult(1, Chunk.single(unsaved)) } } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, VendorRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + unsaved.scanZIO(0L) { case (acc, row) => + ZIO.succeed { + map += (row.businessentityid -> row) + acc + 1 + } + }.runLast.map(_.getOrElse(0L)) + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/countryregioncurrency/CountryregioncurrencyRepo.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/countryregioncurrency/CountryregioncurrencyRepo.scala index 1b00302e4..e37bcc973 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/countryregioncurrency/CountryregioncurrencyRepo.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/countryregioncurrency/CountryregioncurrencyRepo.scala @@ -32,4 +32,7 @@ trait CountryregioncurrencyRepo { def update: UpdateBuilder[CountryregioncurrencyFields, CountryregioncurrencyRow] def update(row: CountryregioncurrencyRow): ZIO[ZConnection, Throwable, Boolean] def upsert(unsaved: CountryregioncurrencyRow): ZIO[ZConnection, Throwable, UpdateResult[CountryregioncurrencyRow]] + // Not implementable for zio-jdbc: upsertBatch + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, CountryregioncurrencyRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/countryregioncurrency/CountryregioncurrencyRepoImpl.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/countryregioncurrency/CountryregioncurrencyRepoImpl.scala index 115741d37..e86e7c213 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/countryregioncurrency/CountryregioncurrencyRepoImpl.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/countryregioncurrency/CountryregioncurrencyRepoImpl.scala @@ -121,4 +121,17 @@ class CountryregioncurrencyRepoImpl extends CountryregioncurrencyRepo { "modifieddate" = EXCLUDED."modifieddate" returning "countryregioncode", "currencycode", "modifieddate"::text""".insertReturning(using CountryregioncurrencyRow.jdbcDecoder) } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, CountryregioncurrencyRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + val created = sql"create temporary table countryregioncurrency_TEMP (like sales.countryregioncurrency) on commit drop".execute + val copied = streamingInsert(s"""copy countryregioncurrency_TEMP("countryregioncode", "currencycode", "modifieddate") from stdin""", batchSize, unsaved)(CountryregioncurrencyRow.text) + val merged = sql"""insert into sales.countryregioncurrency("countryregioncode", "currencycode", "modifieddate") + select * from countryregioncurrency_TEMP + on conflict ("countryregioncode", "currencycode") + do update set + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table countryregioncurrency_TEMP;""".update + created *> copied *> merged + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/countryregioncurrency/CountryregioncurrencyRepoMock.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/countryregioncurrency/CountryregioncurrencyRepoMock.scala index 6cdc2736f..a7c3a291a 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/countryregioncurrency/CountryregioncurrencyRepoMock.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/countryregioncurrency/CountryregioncurrencyRepoMock.scala @@ -104,4 +104,13 @@ class CountryregioncurrencyRepoMock(toRow: Function1[CountryregioncurrencyRowUns UpdateResult(1, Chunk.single(unsaved)) } } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, CountryregioncurrencyRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + unsaved.scanZIO(0L) { case (acc, row) => + ZIO.succeed { + map += (row.compositeId -> row) + acc + 1 + } + }.runLast.map(_.getOrElse(0L)) + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/creditcard/CreditcardRepo.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/creditcard/CreditcardRepo.scala index c8ec9706e..17ea39071 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/creditcard/CreditcardRepo.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/creditcard/CreditcardRepo.scala @@ -34,4 +34,7 @@ trait CreditcardRepo { def update: UpdateBuilder[CreditcardFields, CreditcardRow] def update(row: CreditcardRow): ZIO[ZConnection, Throwable, Boolean] def upsert(unsaved: CreditcardRow): ZIO[ZConnection, Throwable, UpdateResult[CreditcardRow]] + // Not implementable for zio-jdbc: upsertBatch + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, CreditcardRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/creditcard/CreditcardRepoImpl.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/creditcard/CreditcardRepoImpl.scala index 8aec1fe2a..12bff0a19 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/creditcard/CreditcardRepoImpl.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/creditcard/CreditcardRepoImpl.scala @@ -126,4 +126,21 @@ class CreditcardRepoImpl extends CreditcardRepo { "modifieddate" = EXCLUDED."modifieddate" returning "creditcardid", "cardtype", "cardnumber", "expmonth", "expyear", "modifieddate"::text""".insertReturning(using CreditcardRow.jdbcDecoder) } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, CreditcardRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + val created = sql"create temporary table creditcard_TEMP (like sales.creditcard) on commit drop".execute + val copied = streamingInsert(s"""copy creditcard_TEMP("creditcardid", "cardtype", "cardnumber", "expmonth", "expyear", "modifieddate") from stdin""", batchSize, unsaved)(CreditcardRow.text) + val merged = sql"""insert into sales.creditcard("creditcardid", "cardtype", "cardnumber", "expmonth", "expyear", "modifieddate") + select * from creditcard_TEMP + on conflict ("creditcardid") + do update set + "cardtype" = EXCLUDED."cardtype", + "cardnumber" = EXCLUDED."cardnumber", + "expmonth" = EXCLUDED."expmonth", + "expyear" = EXCLUDED."expyear", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table creditcard_TEMP;""".update + created *> copied *> merged + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/creditcard/CreditcardRepoMock.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/creditcard/CreditcardRepoMock.scala index 291a41d1f..b646d6f38 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/creditcard/CreditcardRepoMock.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/creditcard/CreditcardRepoMock.scala @@ -106,4 +106,13 @@ class CreditcardRepoMock(toRow: Function1[CreditcardRowUnsaved, CreditcardRow], UpdateResult(1, Chunk.single(unsaved)) } } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, CreditcardRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + unsaved.scanZIO(0L) { case (acc, row) => + ZIO.succeed { + map += (row.creditcardid -> row) + acc + 1 + } + }.runLast.map(_.getOrElse(0L)) + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/currency/CurrencyRepo.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/currency/CurrencyRepo.scala index debac359b..bdcbfb7d2 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/currency/CurrencyRepo.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/currency/CurrencyRepo.scala @@ -32,4 +32,7 @@ trait CurrencyRepo { def update: UpdateBuilder[CurrencyFields, CurrencyRow] def update(row: CurrencyRow): ZIO[ZConnection, Throwable, Boolean] def upsert(unsaved: CurrencyRow): ZIO[ZConnection, Throwable, UpdateResult[CurrencyRow]] + // Not implementable for zio-jdbc: upsertBatch + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, CurrencyRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/currency/CurrencyRepoImpl.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/currency/CurrencyRepoImpl.scala index 9249342c2..8c7538da1 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/currency/CurrencyRepoImpl.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/currency/CurrencyRepoImpl.scala @@ -108,4 +108,18 @@ class CurrencyRepoImpl extends CurrencyRepo { "modifieddate" = EXCLUDED."modifieddate" returning "currencycode", "name", "modifieddate"::text""".insertReturning(using CurrencyRow.jdbcDecoder) } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, CurrencyRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + val created = sql"create temporary table currency_TEMP (like sales.currency) on commit drop".execute + val copied = streamingInsert(s"""copy currency_TEMP("currencycode", "name", "modifieddate") from stdin""", batchSize, unsaved)(CurrencyRow.text) + val merged = sql"""insert into sales.currency("currencycode", "name", "modifieddate") + select * from currency_TEMP + on conflict ("currencycode") + do update set + "name" = EXCLUDED."name", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table currency_TEMP;""".update + created *> copied *> merged + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/currency/CurrencyRepoMock.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/currency/CurrencyRepoMock.scala index 1ad286085..9424f1700 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/currency/CurrencyRepoMock.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/currency/CurrencyRepoMock.scala @@ -104,4 +104,13 @@ class CurrencyRepoMock(toRow: Function1[CurrencyRowUnsaved, CurrencyRow], UpdateResult(1, Chunk.single(unsaved)) } } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, CurrencyRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + unsaved.scanZIO(0L) { case (acc, row) => + ZIO.succeed { + map += (row.currencycode -> row) + acc + 1 + } + }.runLast.map(_.getOrElse(0L)) + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/currencyrate/CurrencyrateRepo.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/currencyrate/CurrencyrateRepo.scala index 124c5481b..c5b9b66b8 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/currencyrate/CurrencyrateRepo.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/currencyrate/CurrencyrateRepo.scala @@ -32,4 +32,7 @@ trait CurrencyrateRepo { def update: UpdateBuilder[CurrencyrateFields, CurrencyrateRow] def update(row: CurrencyrateRow): ZIO[ZConnection, Throwable, Boolean] def upsert(unsaved: CurrencyrateRow): ZIO[ZConnection, Throwable, UpdateResult[CurrencyrateRow]] + // Not implementable for zio-jdbc: upsertBatch + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, CurrencyrateRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/currencyrate/CurrencyrateRepoImpl.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/currencyrate/CurrencyrateRepoImpl.scala index ffbc50a10..c1c91a89a 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/currencyrate/CurrencyrateRepoImpl.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/currencyrate/CurrencyrateRepoImpl.scala @@ -128,4 +128,22 @@ class CurrencyrateRepoImpl extends CurrencyrateRepo { "modifieddate" = EXCLUDED."modifieddate" returning "currencyrateid", "currencyratedate"::text, "fromcurrencycode", "tocurrencycode", "averagerate", "endofdayrate", "modifieddate"::text""".insertReturning(using CurrencyrateRow.jdbcDecoder) } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, CurrencyrateRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + val created = sql"create temporary table currencyrate_TEMP (like sales.currencyrate) on commit drop".execute + val copied = streamingInsert(s"""copy currencyrate_TEMP("currencyrateid", "currencyratedate", "fromcurrencycode", "tocurrencycode", "averagerate", "endofdayrate", "modifieddate") from stdin""", batchSize, unsaved)(CurrencyrateRow.text) + val merged = sql"""insert into sales.currencyrate("currencyrateid", "currencyratedate", "fromcurrencycode", "tocurrencycode", "averagerate", "endofdayrate", "modifieddate") + select * from currencyrate_TEMP + on conflict ("currencyrateid") + do update set + "currencyratedate" = EXCLUDED."currencyratedate", + "fromcurrencycode" = EXCLUDED."fromcurrencycode", + "tocurrencycode" = EXCLUDED."tocurrencycode", + "averagerate" = EXCLUDED."averagerate", + "endofdayrate" = EXCLUDED."endofdayrate", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table currencyrate_TEMP;""".update + created *> copied *> merged + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/currencyrate/CurrencyrateRepoMock.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/currencyrate/CurrencyrateRepoMock.scala index fbb47db58..3917a4ba2 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/currencyrate/CurrencyrateRepoMock.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/currencyrate/CurrencyrateRepoMock.scala @@ -104,4 +104,13 @@ class CurrencyrateRepoMock(toRow: Function1[CurrencyrateRowUnsaved, Currencyrate UpdateResult(1, Chunk.single(unsaved)) } } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, CurrencyrateRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + unsaved.scanZIO(0L) { case (acc, row) => + ZIO.succeed { + map += (row.currencyrateid -> row) + acc + 1 + } + }.runLast.map(_.getOrElse(0L)) + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/customer/CustomerRepo.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/customer/CustomerRepo.scala index 709ff71e3..11a7ffa9f 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/customer/CustomerRepo.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/customer/CustomerRepo.scala @@ -32,4 +32,7 @@ trait CustomerRepo { def update: UpdateBuilder[CustomerFields, CustomerRow] def update(row: CustomerRow): ZIO[ZConnection, Throwable, Boolean] def upsert(unsaved: CustomerRow): ZIO[ZConnection, Throwable, UpdateResult[CustomerRow]] + // Not implementable for zio-jdbc: upsertBatch + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, CustomerRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/customer/CustomerRepoImpl.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/customer/CustomerRepoImpl.scala index 3c5be1cab..8c6ac3c9f 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/customer/CustomerRepoImpl.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/customer/CustomerRepoImpl.scala @@ -129,4 +129,21 @@ class CustomerRepoImpl extends CustomerRepo { "modifieddate" = EXCLUDED."modifieddate" returning "customerid", "personid", "storeid", "territoryid", "rowguid", "modifieddate"::text""".insertReturning(using CustomerRow.jdbcDecoder) } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, CustomerRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + val created = sql"create temporary table customer_TEMP (like sales.customer) on commit drop".execute + val copied = streamingInsert(s"""copy customer_TEMP("customerid", "personid", "storeid", "territoryid", "rowguid", "modifieddate") from stdin""", batchSize, unsaved)(CustomerRow.text) + val merged = sql"""insert into sales.customer("customerid", "personid", "storeid", "territoryid", "rowguid", "modifieddate") + select * from customer_TEMP + on conflict ("customerid") + do update set + "personid" = EXCLUDED."personid", + "storeid" = EXCLUDED."storeid", + "territoryid" = EXCLUDED."territoryid", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table customer_TEMP;""".update + created *> copied *> merged + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/customer/CustomerRepoMock.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/customer/CustomerRepoMock.scala index 9381b3495..076868052 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/customer/CustomerRepoMock.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/customer/CustomerRepoMock.scala @@ -104,4 +104,13 @@ class CustomerRepoMock(toRow: Function1[CustomerRowUnsaved, CustomerRow], UpdateResult(1, Chunk.single(unsaved)) } } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, CustomerRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + unsaved.scanZIO(0L) { case (acc, row) => + ZIO.succeed { + map += (row.customerid -> row) + acc + 1 + } + }.runLast.map(_.getOrElse(0L)) + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/personcreditcard/PersoncreditcardRepo.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/personcreditcard/PersoncreditcardRepo.scala index 71c56ddf0..4176a31b8 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/personcreditcard/PersoncreditcardRepo.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/personcreditcard/PersoncreditcardRepo.scala @@ -34,4 +34,7 @@ trait PersoncreditcardRepo { def update: UpdateBuilder[PersoncreditcardFields, PersoncreditcardRow] def update(row: PersoncreditcardRow): ZIO[ZConnection, Throwable, Boolean] def upsert(unsaved: PersoncreditcardRow): ZIO[ZConnection, Throwable, UpdateResult[PersoncreditcardRow]] + // Not implementable for zio-jdbc: upsertBatch + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, PersoncreditcardRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/personcreditcard/PersoncreditcardRepoImpl.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/personcreditcard/PersoncreditcardRepoImpl.scala index 172b8a34f..cd81bfa6f 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/personcreditcard/PersoncreditcardRepoImpl.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/personcreditcard/PersoncreditcardRepoImpl.scala @@ -122,4 +122,17 @@ class PersoncreditcardRepoImpl extends PersoncreditcardRepo { "modifieddate" = EXCLUDED."modifieddate" returning "businessentityid", "creditcardid", "modifieddate"::text""".insertReturning(using PersoncreditcardRow.jdbcDecoder) } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, PersoncreditcardRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + val created = sql"create temporary table personcreditcard_TEMP (like sales.personcreditcard) on commit drop".execute + val copied = streamingInsert(s"""copy personcreditcard_TEMP("businessentityid", "creditcardid", "modifieddate") from stdin""", batchSize, unsaved)(PersoncreditcardRow.text) + val merged = sql"""insert into sales.personcreditcard("businessentityid", "creditcardid", "modifieddate") + select * from personcreditcard_TEMP + on conflict ("businessentityid", "creditcardid") + do update set + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table personcreditcard_TEMP;""".update + created *> copied *> merged + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/personcreditcard/PersoncreditcardRepoMock.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/personcreditcard/PersoncreditcardRepoMock.scala index 1f97ec8e5..11aa812aa 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/personcreditcard/PersoncreditcardRepoMock.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/personcreditcard/PersoncreditcardRepoMock.scala @@ -106,4 +106,13 @@ class PersoncreditcardRepoMock(toRow: Function1[PersoncreditcardRowUnsaved, Pers UpdateResult(1, Chunk.single(unsaved)) } } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, PersoncreditcardRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + unsaved.scanZIO(0L) { case (acc, row) => + ZIO.succeed { + map += (row.compositeId -> row) + acc + 1 + } + }.runLast.map(_.getOrElse(0L)) + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/salesorderdetail/SalesorderdetailRepo.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/salesorderdetail/SalesorderdetailRepo.scala index fcf64e94e..5fdc24bdb 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/salesorderdetail/SalesorderdetailRepo.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/salesorderdetail/SalesorderdetailRepo.scala @@ -32,4 +32,7 @@ trait SalesorderdetailRepo { def update: UpdateBuilder[SalesorderdetailFields, SalesorderdetailRow] def update(row: SalesorderdetailRow): ZIO[ZConnection, Throwable, Boolean] def upsert(unsaved: SalesorderdetailRow): ZIO[ZConnection, Throwable, UpdateResult[SalesorderdetailRow]] + // Not implementable for zio-jdbc: upsertBatch + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, SalesorderdetailRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/salesorderdetail/SalesorderdetailRepoImpl.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/salesorderdetail/SalesorderdetailRepoImpl.scala index 53ac8405c..136377185 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/salesorderdetail/SalesorderdetailRepoImpl.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/salesorderdetail/SalesorderdetailRepoImpl.scala @@ -162,4 +162,24 @@ class SalesorderdetailRepoImpl extends SalesorderdetailRepo { "modifieddate" = EXCLUDED."modifieddate" returning "salesorderid", "salesorderdetailid", "carriertrackingnumber", "orderqty", "productid", "specialofferid", "unitprice", "unitpricediscount", "rowguid", "modifieddate"::text""".insertReturning(using SalesorderdetailRow.jdbcDecoder) } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, SalesorderdetailRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + val created = sql"create temporary table salesorderdetail_TEMP (like sales.salesorderdetail) on commit drop".execute + val copied = streamingInsert(s"""copy salesorderdetail_TEMP("salesorderid", "salesorderdetailid", "carriertrackingnumber", "orderqty", "productid", "specialofferid", "unitprice", "unitpricediscount", "rowguid", "modifieddate") from stdin""", batchSize, unsaved)(SalesorderdetailRow.text) + val merged = sql"""insert into sales.salesorderdetail("salesorderid", "salesorderdetailid", "carriertrackingnumber", "orderqty", "productid", "specialofferid", "unitprice", "unitpricediscount", "rowguid", "modifieddate") + select * from salesorderdetail_TEMP + on conflict ("salesorderid", "salesorderdetailid") + do update set + "carriertrackingnumber" = EXCLUDED."carriertrackingnumber", + "orderqty" = EXCLUDED."orderqty", + "productid" = EXCLUDED."productid", + "specialofferid" = EXCLUDED."specialofferid", + "unitprice" = EXCLUDED."unitprice", + "unitpricediscount" = EXCLUDED."unitpricediscount", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table salesorderdetail_TEMP;""".update + created *> copied *> merged + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/salesorderdetail/SalesorderdetailRepoMock.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/salesorderdetail/SalesorderdetailRepoMock.scala index f3c464945..9c0332c8d 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/salesorderdetail/SalesorderdetailRepoMock.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/salesorderdetail/SalesorderdetailRepoMock.scala @@ -104,4 +104,13 @@ class SalesorderdetailRepoMock(toRow: Function1[SalesorderdetailRowUnsaved, Sale UpdateResult(1, Chunk.single(unsaved)) } } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, SalesorderdetailRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + unsaved.scanZIO(0L) { case (acc, row) => + ZIO.succeed { + map += (row.compositeId -> row) + acc + 1 + } + }.runLast.map(_.getOrElse(0L)) + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/salesorderheader/SalesorderheaderRepo.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/salesorderheader/SalesorderheaderRepo.scala index b4a0ba40f..0b4b8057c 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/salesorderheader/SalesorderheaderRepo.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/salesorderheader/SalesorderheaderRepo.scala @@ -32,4 +32,7 @@ trait SalesorderheaderRepo { def update: UpdateBuilder[SalesorderheaderFields, SalesorderheaderRow] def update(row: SalesorderheaderRow): ZIO[ZConnection, Throwable, Boolean] def upsert(unsaved: SalesorderheaderRow): ZIO[ZConnection, Throwable, UpdateResult[SalesorderheaderRow]] + // Not implementable for zio-jdbc: upsertBatch + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, SalesorderheaderRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/salesorderheader/SalesorderheaderRepoImpl.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/salesorderheader/SalesorderheaderRepoImpl.scala index 67f2d5b14..da802bde6 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/salesorderheader/SalesorderheaderRepoImpl.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/salesorderheader/SalesorderheaderRepoImpl.scala @@ -235,4 +235,40 @@ class SalesorderheaderRepoImpl extends SalesorderheaderRepo { "modifieddate" = EXCLUDED."modifieddate" returning "salesorderid", "revisionnumber", "orderdate"::text, "duedate"::text, "shipdate"::text, "status", "onlineorderflag", "purchaseordernumber", "accountnumber", "customerid", "salespersonid", "territoryid", "billtoaddressid", "shiptoaddressid", "shipmethodid", "creditcardid", "creditcardapprovalcode", "currencyrateid", "subtotal", "taxamt", "freight", "totaldue", "comment", "rowguid", "modifieddate"::text""".insertReturning(using SalesorderheaderRow.jdbcDecoder) } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, SalesorderheaderRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + val created = sql"create temporary table salesorderheader_TEMP (like sales.salesorderheader) on commit drop".execute + val copied = streamingInsert(s"""copy salesorderheader_TEMP("salesorderid", "revisionnumber", "orderdate", "duedate", "shipdate", "status", "onlineorderflag", "purchaseordernumber", "accountnumber", "customerid", "salespersonid", "territoryid", "billtoaddressid", "shiptoaddressid", "shipmethodid", "creditcardid", "creditcardapprovalcode", "currencyrateid", "subtotal", "taxamt", "freight", "totaldue", "comment", "rowguid", "modifieddate") from stdin""", batchSize, unsaved)(SalesorderheaderRow.text) + val merged = sql"""insert into sales.salesorderheader("salesorderid", "revisionnumber", "orderdate", "duedate", "shipdate", "status", "onlineorderflag", "purchaseordernumber", "accountnumber", "customerid", "salespersonid", "territoryid", "billtoaddressid", "shiptoaddressid", "shipmethodid", "creditcardid", "creditcardapprovalcode", "currencyrateid", "subtotal", "taxamt", "freight", "totaldue", "comment", "rowguid", "modifieddate") + select * from salesorderheader_TEMP + on conflict ("salesorderid") + do update set + "revisionnumber" = EXCLUDED."revisionnumber", + "orderdate" = EXCLUDED."orderdate", + "duedate" = EXCLUDED."duedate", + "shipdate" = EXCLUDED."shipdate", + "status" = EXCLUDED."status", + "onlineorderflag" = EXCLUDED."onlineorderflag", + "purchaseordernumber" = EXCLUDED."purchaseordernumber", + "accountnumber" = EXCLUDED."accountnumber", + "customerid" = EXCLUDED."customerid", + "salespersonid" = EXCLUDED."salespersonid", + "territoryid" = EXCLUDED."territoryid", + "billtoaddressid" = EXCLUDED."billtoaddressid", + "shiptoaddressid" = EXCLUDED."shiptoaddressid", + "shipmethodid" = EXCLUDED."shipmethodid", + "creditcardid" = EXCLUDED."creditcardid", + "creditcardapprovalcode" = EXCLUDED."creditcardapprovalcode", + "currencyrateid" = EXCLUDED."currencyrateid", + "subtotal" = EXCLUDED."subtotal", + "taxamt" = EXCLUDED."taxamt", + "freight" = EXCLUDED."freight", + "totaldue" = EXCLUDED."totaldue", + "comment" = EXCLUDED."comment", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table salesorderheader_TEMP;""".update + created *> copied *> merged + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/salesorderheader/SalesorderheaderRepoMock.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/salesorderheader/SalesorderheaderRepoMock.scala index ac01ebbca..78c298420 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/salesorderheader/SalesorderheaderRepoMock.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/salesorderheader/SalesorderheaderRepoMock.scala @@ -104,4 +104,13 @@ class SalesorderheaderRepoMock(toRow: Function1[SalesorderheaderRowUnsaved, Sale UpdateResult(1, Chunk.single(unsaved)) } } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, SalesorderheaderRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + unsaved.scanZIO(0L) { case (acc, row) => + ZIO.succeed { + map += (row.salesorderid -> row) + acc + 1 + } + }.runLast.map(_.getOrElse(0L)) + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/salesorderheadersalesreason/SalesorderheadersalesreasonRepo.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/salesorderheadersalesreason/SalesorderheadersalesreasonRepo.scala index 0726ebe73..32536d2d5 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/salesorderheadersalesreason/SalesorderheadersalesreasonRepo.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/salesorderheadersalesreason/SalesorderheadersalesreasonRepo.scala @@ -32,4 +32,7 @@ trait SalesorderheadersalesreasonRepo { def update: UpdateBuilder[SalesorderheadersalesreasonFields, SalesorderheadersalesreasonRow] def update(row: SalesorderheadersalesreasonRow): ZIO[ZConnection, Throwable, Boolean] def upsert(unsaved: SalesorderheadersalesreasonRow): ZIO[ZConnection, Throwable, UpdateResult[SalesorderheadersalesreasonRow]] + // Not implementable for zio-jdbc: upsertBatch + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, SalesorderheadersalesreasonRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/salesorderheadersalesreason/SalesorderheadersalesreasonRepoImpl.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/salesorderheadersalesreason/SalesorderheadersalesreasonRepoImpl.scala index 49e765c06..6ccef607a 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/salesorderheadersalesreason/SalesorderheadersalesreasonRepoImpl.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/salesorderheadersalesreason/SalesorderheadersalesreasonRepoImpl.scala @@ -121,4 +121,17 @@ class SalesorderheadersalesreasonRepoImpl extends SalesorderheadersalesreasonRep "modifieddate" = EXCLUDED."modifieddate" returning "salesorderid", "salesreasonid", "modifieddate"::text""".insertReturning(using SalesorderheadersalesreasonRow.jdbcDecoder) } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, SalesorderheadersalesreasonRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + val created = sql"create temporary table salesorderheadersalesreason_TEMP (like sales.salesorderheadersalesreason) on commit drop".execute + val copied = streamingInsert(s"""copy salesorderheadersalesreason_TEMP("salesorderid", "salesreasonid", "modifieddate") from stdin""", batchSize, unsaved)(SalesorderheadersalesreasonRow.text) + val merged = sql"""insert into sales.salesorderheadersalesreason("salesorderid", "salesreasonid", "modifieddate") + select * from salesorderheadersalesreason_TEMP + on conflict ("salesorderid", "salesreasonid") + do update set + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table salesorderheadersalesreason_TEMP;""".update + created *> copied *> merged + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/salesorderheadersalesreason/SalesorderheadersalesreasonRepoMock.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/salesorderheadersalesreason/SalesorderheadersalesreasonRepoMock.scala index d3eed7395..80540c704 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/salesorderheadersalesreason/SalesorderheadersalesreasonRepoMock.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/salesorderheadersalesreason/SalesorderheadersalesreasonRepoMock.scala @@ -104,4 +104,13 @@ class SalesorderheadersalesreasonRepoMock(toRow: Function1[Salesorderheadersales UpdateResult(1, Chunk.single(unsaved)) } } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, SalesorderheadersalesreasonRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + unsaved.scanZIO(0L) { case (acc, row) => + ZIO.succeed { + map += (row.compositeId -> row) + acc + 1 + } + }.runLast.map(_.getOrElse(0L)) + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/salesperson/SalespersonRepo.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/salesperson/SalespersonRepo.scala index c4aa0d115..8befdd62b 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/salesperson/SalespersonRepo.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/salesperson/SalespersonRepo.scala @@ -33,4 +33,7 @@ trait SalespersonRepo { def update: UpdateBuilder[SalespersonFields, SalespersonRow] def update(row: SalespersonRow): ZIO[ZConnection, Throwable, Boolean] def upsert(unsaved: SalespersonRow): ZIO[ZConnection, Throwable, UpdateResult[SalespersonRow]] + // Not implementable for zio-jdbc: upsertBatch + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, SalespersonRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/salesperson/SalespersonRepoImpl.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/salesperson/SalespersonRepoImpl.scala index 8d50e8976..ada824356 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/salesperson/SalespersonRepoImpl.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/salesperson/SalespersonRepoImpl.scala @@ -150,4 +150,24 @@ class SalespersonRepoImpl extends SalespersonRepo { "modifieddate" = EXCLUDED."modifieddate" returning "businessentityid", "territoryid", "salesquota", "bonus", "commissionpct", "salesytd", "saleslastyear", "rowguid", "modifieddate"::text""".insertReturning(using SalespersonRow.jdbcDecoder) } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, SalespersonRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + val created = sql"create temporary table salesperson_TEMP (like sales.salesperson) on commit drop".execute + val copied = streamingInsert(s"""copy salesperson_TEMP("businessentityid", "territoryid", "salesquota", "bonus", "commissionpct", "salesytd", "saleslastyear", "rowguid", "modifieddate") from stdin""", batchSize, unsaved)(SalespersonRow.text) + val merged = sql"""insert into sales.salesperson("businessentityid", "territoryid", "salesquota", "bonus", "commissionpct", "salesytd", "saleslastyear", "rowguid", "modifieddate") + select * from salesperson_TEMP + on conflict ("businessentityid") + do update set + "territoryid" = EXCLUDED."territoryid", + "salesquota" = EXCLUDED."salesquota", + "bonus" = EXCLUDED."bonus", + "commissionpct" = EXCLUDED."commissionpct", + "salesytd" = EXCLUDED."salesytd", + "saleslastyear" = EXCLUDED."saleslastyear", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table salesperson_TEMP;""".update + created *> copied *> merged + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/salesperson/SalespersonRepoMock.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/salesperson/SalespersonRepoMock.scala index f7c3cf1da..2e5f55c4f 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/salesperson/SalespersonRepoMock.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/salesperson/SalespersonRepoMock.scala @@ -105,4 +105,13 @@ class SalespersonRepoMock(toRow: Function1[SalespersonRowUnsaved, SalespersonRow UpdateResult(1, Chunk.single(unsaved)) } } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, SalespersonRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + unsaved.scanZIO(0L) { case (acc, row) => + ZIO.succeed { + map += (row.businessentityid -> row) + acc + 1 + } + }.runLast.map(_.getOrElse(0L)) + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/salespersonquotahistory/SalespersonquotahistoryRepo.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/salespersonquotahistory/SalespersonquotahistoryRepo.scala index 19ec5ee40..c4595a39f 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/salespersonquotahistory/SalespersonquotahistoryRepo.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/salespersonquotahistory/SalespersonquotahistoryRepo.scala @@ -32,4 +32,7 @@ trait SalespersonquotahistoryRepo { def update: UpdateBuilder[SalespersonquotahistoryFields, SalespersonquotahistoryRow] def update(row: SalespersonquotahistoryRow): ZIO[ZConnection, Throwable, Boolean] def upsert(unsaved: SalespersonquotahistoryRow): ZIO[ZConnection, Throwable, UpdateResult[SalespersonquotahistoryRow]] + // Not implementable for zio-jdbc: upsertBatch + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, SalespersonquotahistoryRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/salespersonquotahistory/SalespersonquotahistoryRepoImpl.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/salespersonquotahistory/SalespersonquotahistoryRepoImpl.scala index b6c9bc46f..b2da1492a 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/salespersonquotahistory/SalespersonquotahistoryRepoImpl.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/salespersonquotahistory/SalespersonquotahistoryRepoImpl.scala @@ -133,4 +133,19 @@ class SalespersonquotahistoryRepoImpl extends SalespersonquotahistoryRepo { "modifieddate" = EXCLUDED."modifieddate" returning "businessentityid", "quotadate"::text, "salesquota", "rowguid", "modifieddate"::text""".insertReturning(using SalespersonquotahistoryRow.jdbcDecoder) } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, SalespersonquotahistoryRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + val created = sql"create temporary table salespersonquotahistory_TEMP (like sales.salespersonquotahistory) on commit drop".execute + val copied = streamingInsert(s"""copy salespersonquotahistory_TEMP("businessentityid", "quotadate", "salesquota", "rowguid", "modifieddate") from stdin""", batchSize, unsaved)(SalespersonquotahistoryRow.text) + val merged = sql"""insert into sales.salespersonquotahistory("businessentityid", "quotadate", "salesquota", "rowguid", "modifieddate") + select * from salespersonquotahistory_TEMP + on conflict ("businessentityid", "quotadate") + do update set + "salesquota" = EXCLUDED."salesquota", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table salespersonquotahistory_TEMP;""".update + created *> copied *> merged + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/salespersonquotahistory/SalespersonquotahistoryRepoMock.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/salespersonquotahistory/SalespersonquotahistoryRepoMock.scala index 6db7161ae..b7e29ce5a 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/salespersonquotahistory/SalespersonquotahistoryRepoMock.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/salespersonquotahistory/SalespersonquotahistoryRepoMock.scala @@ -104,4 +104,13 @@ class SalespersonquotahistoryRepoMock(toRow: Function1[SalespersonquotahistoryRo UpdateResult(1, Chunk.single(unsaved)) } } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, SalespersonquotahistoryRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + unsaved.scanZIO(0L) { case (acc, row) => + ZIO.succeed { + map += (row.compositeId -> row) + acc + 1 + } + }.runLast.map(_.getOrElse(0L)) + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/salesreason/SalesreasonRepo.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/salesreason/SalesreasonRepo.scala index 670156811..400898fbd 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/salesreason/SalesreasonRepo.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/salesreason/SalesreasonRepo.scala @@ -32,4 +32,7 @@ trait SalesreasonRepo { def update: UpdateBuilder[SalesreasonFields, SalesreasonRow] def update(row: SalesreasonRow): ZIO[ZConnection, Throwable, Boolean] def upsert(unsaved: SalesreasonRow): ZIO[ZConnection, Throwable, UpdateResult[SalesreasonRow]] + // Not implementable for zio-jdbc: upsertBatch + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, SalesreasonRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/salesreason/SalesreasonRepoImpl.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/salesreason/SalesreasonRepoImpl.scala index 52076ec05..2a843f8dd 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/salesreason/SalesreasonRepoImpl.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/salesreason/SalesreasonRepoImpl.scala @@ -115,4 +115,19 @@ class SalesreasonRepoImpl extends SalesreasonRepo { "modifieddate" = EXCLUDED."modifieddate" returning "salesreasonid", "name", "reasontype", "modifieddate"::text""".insertReturning(using SalesreasonRow.jdbcDecoder) } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, SalesreasonRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + val created = sql"create temporary table salesreason_TEMP (like sales.salesreason) on commit drop".execute + val copied = streamingInsert(s"""copy salesreason_TEMP("salesreasonid", "name", "reasontype", "modifieddate") from stdin""", batchSize, unsaved)(SalesreasonRow.text) + val merged = sql"""insert into sales.salesreason("salesreasonid", "name", "reasontype", "modifieddate") + select * from salesreason_TEMP + on conflict ("salesreasonid") + do update set + "name" = EXCLUDED."name", + "reasontype" = EXCLUDED."reasontype", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table salesreason_TEMP;""".update + created *> copied *> merged + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/salesreason/SalesreasonRepoMock.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/salesreason/SalesreasonRepoMock.scala index f22e606f0..0017e7b02 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/salesreason/SalesreasonRepoMock.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/salesreason/SalesreasonRepoMock.scala @@ -104,4 +104,13 @@ class SalesreasonRepoMock(toRow: Function1[SalesreasonRowUnsaved, SalesreasonRow UpdateResult(1, Chunk.single(unsaved)) } } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, SalesreasonRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + unsaved.scanZIO(0L) { case (acc, row) => + ZIO.succeed { + map += (row.salesreasonid -> row) + acc + 1 + } + }.runLast.map(_.getOrElse(0L)) + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/salestaxrate/SalestaxrateRepo.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/salestaxrate/SalestaxrateRepo.scala index 89c8a059f..1f3c781ed 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/salestaxrate/SalestaxrateRepo.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/salestaxrate/SalestaxrateRepo.scala @@ -32,4 +32,7 @@ trait SalestaxrateRepo { def update: UpdateBuilder[SalestaxrateFields, SalestaxrateRow] def update(row: SalestaxrateRow): ZIO[ZConnection, Throwable, Boolean] def upsert(unsaved: SalestaxrateRow): ZIO[ZConnection, Throwable, UpdateResult[SalestaxrateRow]] + // Not implementable for zio-jdbc: upsertBatch + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, SalestaxrateRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/salestaxrate/SalestaxrateRepoImpl.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/salestaxrate/SalestaxrateRepoImpl.scala index a4a4e9c7b..7ec348ba4 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/salestaxrate/SalestaxrateRepoImpl.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/salestaxrate/SalestaxrateRepoImpl.scala @@ -137,4 +137,22 @@ class SalestaxrateRepoImpl extends SalestaxrateRepo { "modifieddate" = EXCLUDED."modifieddate" returning "salestaxrateid", "stateprovinceid", "taxtype", "taxrate", "name", "rowguid", "modifieddate"::text""".insertReturning(using SalestaxrateRow.jdbcDecoder) } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, SalestaxrateRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + val created = sql"create temporary table salestaxrate_TEMP (like sales.salestaxrate) on commit drop".execute + val copied = streamingInsert(s"""copy salestaxrate_TEMP("salestaxrateid", "stateprovinceid", "taxtype", "taxrate", "name", "rowguid", "modifieddate") from stdin""", batchSize, unsaved)(SalestaxrateRow.text) + val merged = sql"""insert into sales.salestaxrate("salestaxrateid", "stateprovinceid", "taxtype", "taxrate", "name", "rowguid", "modifieddate") + select * from salestaxrate_TEMP + on conflict ("salestaxrateid") + do update set + "stateprovinceid" = EXCLUDED."stateprovinceid", + "taxtype" = EXCLUDED."taxtype", + "taxrate" = EXCLUDED."taxrate", + "name" = EXCLUDED."name", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table salestaxrate_TEMP;""".update + created *> copied *> merged + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/salestaxrate/SalestaxrateRepoMock.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/salestaxrate/SalestaxrateRepoMock.scala index 2ff8b8c5a..fbc7dc857 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/salestaxrate/SalestaxrateRepoMock.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/salestaxrate/SalestaxrateRepoMock.scala @@ -104,4 +104,13 @@ class SalestaxrateRepoMock(toRow: Function1[SalestaxrateRowUnsaved, Salestaxrate UpdateResult(1, Chunk.single(unsaved)) } } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, SalestaxrateRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + unsaved.scanZIO(0L) { case (acc, row) => + ZIO.succeed { + map += (row.salestaxrateid -> row) + acc + 1 + } + }.runLast.map(_.getOrElse(0L)) + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/salesterritory/SalesterritoryRepo.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/salesterritory/SalesterritoryRepo.scala index 10c0ccfe6..8386e8d03 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/salesterritory/SalesterritoryRepo.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/salesterritory/SalesterritoryRepo.scala @@ -32,4 +32,7 @@ trait SalesterritoryRepo { def update: UpdateBuilder[SalesterritoryFields, SalesterritoryRow] def update(row: SalesterritoryRow): ZIO[ZConnection, Throwable, Boolean] def upsert(unsaved: SalesterritoryRow): ZIO[ZConnection, Throwable, UpdateResult[SalesterritoryRow]] + // Not implementable for zio-jdbc: upsertBatch + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, SalesterritoryRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/salesterritory/SalesterritoryRepoImpl.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/salesterritory/SalesterritoryRepoImpl.scala index e7540f5d7..931da224c 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/salesterritory/SalesterritoryRepoImpl.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/salesterritory/SalesterritoryRepoImpl.scala @@ -157,4 +157,25 @@ class SalesterritoryRepoImpl extends SalesterritoryRepo { "modifieddate" = EXCLUDED."modifieddate" returning "territoryid", "name", "countryregioncode", "group", "salesytd", "saleslastyear", "costytd", "costlastyear", "rowguid", "modifieddate"::text""".insertReturning(using SalesterritoryRow.jdbcDecoder) } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, SalesterritoryRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + val created = sql"create temporary table salesterritory_TEMP (like sales.salesterritory) on commit drop".execute + val copied = streamingInsert(s"""copy salesterritory_TEMP("territoryid", "name", "countryregioncode", "group", "salesytd", "saleslastyear", "costytd", "costlastyear", "rowguid", "modifieddate") from stdin""", batchSize, unsaved)(SalesterritoryRow.text) + val merged = sql"""insert into sales.salesterritory("territoryid", "name", "countryregioncode", "group", "salesytd", "saleslastyear", "costytd", "costlastyear", "rowguid", "modifieddate") + select * from salesterritory_TEMP + on conflict ("territoryid") + do update set + "name" = EXCLUDED."name", + "countryregioncode" = EXCLUDED."countryregioncode", + "group" = EXCLUDED."group", + "salesytd" = EXCLUDED."salesytd", + "saleslastyear" = EXCLUDED."saleslastyear", + "costytd" = EXCLUDED."costytd", + "costlastyear" = EXCLUDED."costlastyear", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table salesterritory_TEMP;""".update + created *> copied *> merged + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/salesterritory/SalesterritoryRepoMock.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/salesterritory/SalesterritoryRepoMock.scala index 43cef8c34..52c492124 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/salesterritory/SalesterritoryRepoMock.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/salesterritory/SalesterritoryRepoMock.scala @@ -104,4 +104,13 @@ class SalesterritoryRepoMock(toRow: Function1[SalesterritoryRowUnsaved, Salester UpdateResult(1, Chunk.single(unsaved)) } } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, SalesterritoryRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + unsaved.scanZIO(0L) { case (acc, row) => + ZIO.succeed { + map += (row.territoryid -> row) + acc + 1 + } + }.runLast.map(_.getOrElse(0L)) + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/salesterritoryhistory/SalesterritoryhistoryRepo.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/salesterritoryhistory/SalesterritoryhistoryRepo.scala index 11e7a642e..8026964c2 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/salesterritoryhistory/SalesterritoryhistoryRepo.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/salesterritoryhistory/SalesterritoryhistoryRepo.scala @@ -32,4 +32,7 @@ trait SalesterritoryhistoryRepo { def update: UpdateBuilder[SalesterritoryhistoryFields, SalesterritoryhistoryRow] def update(row: SalesterritoryhistoryRow): ZIO[ZConnection, Throwable, Boolean] def upsert(unsaved: SalesterritoryhistoryRow): ZIO[ZConnection, Throwable, UpdateResult[SalesterritoryhistoryRow]] + // Not implementable for zio-jdbc: upsertBatch + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, SalesterritoryhistoryRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/salesterritoryhistory/SalesterritoryhistoryRepoImpl.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/salesterritoryhistory/SalesterritoryhistoryRepoImpl.scala index 4fc126ce8..6ca7bd632 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/salesterritoryhistory/SalesterritoryhistoryRepoImpl.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/salesterritoryhistory/SalesterritoryhistoryRepoImpl.scala @@ -138,4 +138,19 @@ class SalesterritoryhistoryRepoImpl extends SalesterritoryhistoryRepo { "modifieddate" = EXCLUDED."modifieddate" returning "businessentityid", "territoryid", "startdate"::text, "enddate"::text, "rowguid", "modifieddate"::text""".insertReturning(using SalesterritoryhistoryRow.jdbcDecoder) } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, SalesterritoryhistoryRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + val created = sql"create temporary table salesterritoryhistory_TEMP (like sales.salesterritoryhistory) on commit drop".execute + val copied = streamingInsert(s"""copy salesterritoryhistory_TEMP("businessentityid", "territoryid", "startdate", "enddate", "rowguid", "modifieddate") from stdin""", batchSize, unsaved)(SalesterritoryhistoryRow.text) + val merged = sql"""insert into sales.salesterritoryhistory("businessentityid", "territoryid", "startdate", "enddate", "rowguid", "modifieddate") + select * from salesterritoryhistory_TEMP + on conflict ("businessentityid", "startdate", "territoryid") + do update set + "enddate" = EXCLUDED."enddate", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table salesterritoryhistory_TEMP;""".update + created *> copied *> merged + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/salesterritoryhistory/SalesterritoryhistoryRepoMock.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/salesterritoryhistory/SalesterritoryhistoryRepoMock.scala index d8b9113f0..1f5d7520d 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/salesterritoryhistory/SalesterritoryhistoryRepoMock.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/salesterritoryhistory/SalesterritoryhistoryRepoMock.scala @@ -104,4 +104,13 @@ class SalesterritoryhistoryRepoMock(toRow: Function1[SalesterritoryhistoryRowUns UpdateResult(1, Chunk.single(unsaved)) } } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, SalesterritoryhistoryRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + unsaved.scanZIO(0L) { case (acc, row) => + ZIO.succeed { + map += (row.compositeId -> row) + acc + 1 + } + }.runLast.map(_.getOrElse(0L)) + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/shoppingcartitem/ShoppingcartitemRepo.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/shoppingcartitem/ShoppingcartitemRepo.scala index fa18348b7..aae757ab3 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/shoppingcartitem/ShoppingcartitemRepo.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/shoppingcartitem/ShoppingcartitemRepo.scala @@ -32,4 +32,7 @@ trait ShoppingcartitemRepo { def update: UpdateBuilder[ShoppingcartitemFields, ShoppingcartitemRow] def update(row: ShoppingcartitemRow): ZIO[ZConnection, Throwable, Boolean] def upsert(unsaved: ShoppingcartitemRow): ZIO[ZConnection, Throwable, UpdateResult[ShoppingcartitemRow]] + // Not implementable for zio-jdbc: upsertBatch + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, ShoppingcartitemRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/shoppingcartitem/ShoppingcartitemRepoImpl.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/shoppingcartitem/ShoppingcartitemRepoImpl.scala index 13d8b9ddf..95b39e32b 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/shoppingcartitem/ShoppingcartitemRepoImpl.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/shoppingcartitem/ShoppingcartitemRepoImpl.scala @@ -130,4 +130,21 @@ class ShoppingcartitemRepoImpl extends ShoppingcartitemRepo { "modifieddate" = EXCLUDED."modifieddate" returning "shoppingcartitemid", "shoppingcartid", "quantity", "productid", "datecreated"::text, "modifieddate"::text""".insertReturning(using ShoppingcartitemRow.jdbcDecoder) } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, ShoppingcartitemRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + val created = sql"create temporary table shoppingcartitem_TEMP (like sales.shoppingcartitem) on commit drop".execute + val copied = streamingInsert(s"""copy shoppingcartitem_TEMP("shoppingcartitemid", "shoppingcartid", "quantity", "productid", "datecreated", "modifieddate") from stdin""", batchSize, unsaved)(ShoppingcartitemRow.text) + val merged = sql"""insert into sales.shoppingcartitem("shoppingcartitemid", "shoppingcartid", "quantity", "productid", "datecreated", "modifieddate") + select * from shoppingcartitem_TEMP + on conflict ("shoppingcartitemid") + do update set + "shoppingcartid" = EXCLUDED."shoppingcartid", + "quantity" = EXCLUDED."quantity", + "productid" = EXCLUDED."productid", + "datecreated" = EXCLUDED."datecreated", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table shoppingcartitem_TEMP;""".update + created *> copied *> merged + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/shoppingcartitem/ShoppingcartitemRepoMock.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/shoppingcartitem/ShoppingcartitemRepoMock.scala index a33d37745..f23e815f7 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/shoppingcartitem/ShoppingcartitemRepoMock.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/shoppingcartitem/ShoppingcartitemRepoMock.scala @@ -104,4 +104,13 @@ class ShoppingcartitemRepoMock(toRow: Function1[ShoppingcartitemRowUnsaved, Shop UpdateResult(1, Chunk.single(unsaved)) } } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, ShoppingcartitemRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + unsaved.scanZIO(0L) { case (acc, row) => + ZIO.succeed { + map += (row.shoppingcartitemid -> row) + acc + 1 + } + }.runLast.map(_.getOrElse(0L)) + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/specialoffer/SpecialofferRepo.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/specialoffer/SpecialofferRepo.scala index 00d3b8c63..214ab73d9 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/specialoffer/SpecialofferRepo.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/specialoffer/SpecialofferRepo.scala @@ -32,4 +32,7 @@ trait SpecialofferRepo { def update: UpdateBuilder[SpecialofferFields, SpecialofferRow] def update(row: SpecialofferRow): ZIO[ZConnection, Throwable, Boolean] def upsert(unsaved: SpecialofferRow): ZIO[ZConnection, Throwable, UpdateResult[SpecialofferRow]] + // Not implementable for zio-jdbc: upsertBatch + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, SpecialofferRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/specialoffer/SpecialofferRepoImpl.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/specialoffer/SpecialofferRepoImpl.scala index 55399e98b..8e855342a 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/specialoffer/SpecialofferRepoImpl.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/specialoffer/SpecialofferRepoImpl.scala @@ -153,4 +153,26 @@ class SpecialofferRepoImpl extends SpecialofferRepo { "modifieddate" = EXCLUDED."modifieddate" returning "specialofferid", "description", "discountpct", "type", "category", "startdate"::text, "enddate"::text, "minqty", "maxqty", "rowguid", "modifieddate"::text""".insertReturning(using SpecialofferRow.jdbcDecoder) } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, SpecialofferRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + val created = sql"create temporary table specialoffer_TEMP (like sales.specialoffer) on commit drop".execute + val copied = streamingInsert(s"""copy specialoffer_TEMP("specialofferid", "description", "discountpct", "type", "category", "startdate", "enddate", "minqty", "maxqty", "rowguid", "modifieddate") from stdin""", batchSize, unsaved)(SpecialofferRow.text) + val merged = sql"""insert into sales.specialoffer("specialofferid", "description", "discountpct", "type", "category", "startdate", "enddate", "minqty", "maxqty", "rowguid", "modifieddate") + select * from specialoffer_TEMP + on conflict ("specialofferid") + do update set + "description" = EXCLUDED."description", + "discountpct" = EXCLUDED."discountpct", + "type" = EXCLUDED."type", + "category" = EXCLUDED."category", + "startdate" = EXCLUDED."startdate", + "enddate" = EXCLUDED."enddate", + "minqty" = EXCLUDED."minqty", + "maxqty" = EXCLUDED."maxqty", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table specialoffer_TEMP;""".update + created *> copied *> merged + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/specialoffer/SpecialofferRepoMock.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/specialoffer/SpecialofferRepoMock.scala index 3977a2f2e..72ee503d8 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/specialoffer/SpecialofferRepoMock.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/specialoffer/SpecialofferRepoMock.scala @@ -104,4 +104,13 @@ class SpecialofferRepoMock(toRow: Function1[SpecialofferRowUnsaved, Specialoffer UpdateResult(1, Chunk.single(unsaved)) } } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, SpecialofferRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + unsaved.scanZIO(0L) { case (acc, row) => + ZIO.succeed { + map += (row.specialofferid -> row) + acc + 1 + } + }.runLast.map(_.getOrElse(0L)) + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/specialofferproduct/SpecialofferproductRepo.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/specialofferproduct/SpecialofferproductRepo.scala index 816c9302d..e871aa226 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/specialofferproduct/SpecialofferproductRepo.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/specialofferproduct/SpecialofferproductRepo.scala @@ -32,4 +32,7 @@ trait SpecialofferproductRepo { def update: UpdateBuilder[SpecialofferproductFields, SpecialofferproductRow] def update(row: SpecialofferproductRow): ZIO[ZConnection, Throwable, Boolean] def upsert(unsaved: SpecialofferproductRow): ZIO[ZConnection, Throwable, UpdateResult[SpecialofferproductRow]] + // Not implementable for zio-jdbc: upsertBatch + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, SpecialofferproductRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/specialofferproduct/SpecialofferproductRepoImpl.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/specialofferproduct/SpecialofferproductRepoImpl.scala index e6ea351d9..07b54ae9d 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/specialofferproduct/SpecialofferproductRepoImpl.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/specialofferproduct/SpecialofferproductRepoImpl.scala @@ -129,4 +129,18 @@ class SpecialofferproductRepoImpl extends SpecialofferproductRepo { "modifieddate" = EXCLUDED."modifieddate" returning "specialofferid", "productid", "rowguid", "modifieddate"::text""".insertReturning(using SpecialofferproductRow.jdbcDecoder) } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, SpecialofferproductRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + val created = sql"create temporary table specialofferproduct_TEMP (like sales.specialofferproduct) on commit drop".execute + val copied = streamingInsert(s"""copy specialofferproduct_TEMP("specialofferid", "productid", "rowguid", "modifieddate") from stdin""", batchSize, unsaved)(SpecialofferproductRow.text) + val merged = sql"""insert into sales.specialofferproduct("specialofferid", "productid", "rowguid", "modifieddate") + select * from specialofferproduct_TEMP + on conflict ("specialofferid", "productid") + do update set + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table specialofferproduct_TEMP;""".update + created *> copied *> merged + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/specialofferproduct/SpecialofferproductRepoMock.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/specialofferproduct/SpecialofferproductRepoMock.scala index 8f1c036d5..e0398a13f 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/specialofferproduct/SpecialofferproductRepoMock.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/specialofferproduct/SpecialofferproductRepoMock.scala @@ -104,4 +104,13 @@ class SpecialofferproductRepoMock(toRow: Function1[SpecialofferproductRowUnsaved UpdateResult(1, Chunk.single(unsaved)) } } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, SpecialofferproductRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + unsaved.scanZIO(0L) { case (acc, row) => + ZIO.succeed { + map += (row.compositeId -> row) + acc + 1 + } + }.runLast.map(_.getOrElse(0L)) + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/store/StoreRepo.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/store/StoreRepo.scala index 158550544..36ec748ee 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/store/StoreRepo.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/store/StoreRepo.scala @@ -33,4 +33,7 @@ trait StoreRepo { def update: UpdateBuilder[StoreFields, StoreRow] def update(row: StoreRow): ZIO[ZConnection, Throwable, Boolean] def upsert(unsaved: StoreRow): ZIO[ZConnection, Throwable, UpdateResult[StoreRow]] + // Not implementable for zio-jdbc: upsertBatch + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, StoreRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/store/StoreRepoImpl.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/store/StoreRepoImpl.scala index f6929c134..ee54b9440 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/store/StoreRepoImpl.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/store/StoreRepoImpl.scala @@ -127,4 +127,21 @@ class StoreRepoImpl extends StoreRepo { "modifieddate" = EXCLUDED."modifieddate" returning "businessentityid", "name", "salespersonid", "demographics", "rowguid", "modifieddate"::text""".insertReturning(using StoreRow.jdbcDecoder) } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, StoreRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + val created = sql"create temporary table store_TEMP (like sales.store) on commit drop".execute + val copied = streamingInsert(s"""copy store_TEMP("businessentityid", "name", "salespersonid", "demographics", "rowguid", "modifieddate") from stdin""", batchSize, unsaved)(StoreRow.text) + val merged = sql"""insert into sales.store("businessentityid", "name", "salespersonid", "demographics", "rowguid", "modifieddate") + select * from store_TEMP + on conflict ("businessentityid") + do update set + "name" = EXCLUDED."name", + "salespersonid" = EXCLUDED."salespersonid", + "demographics" = EXCLUDED."demographics", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table store_TEMP;""".update + created *> copied *> merged + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/store/StoreRepoMock.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/store/StoreRepoMock.scala index edd8c79fe..a150d4e54 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/store/StoreRepoMock.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/store/StoreRepoMock.scala @@ -105,4 +105,13 @@ class StoreRepoMock(toRow: Function1[StoreRowUnsaved, StoreRow], UpdateResult(1, Chunk.single(unsaved)) } } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, StoreRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + unsaved.scanZIO(0L) { case (acc, row) => + ZIO.succeed { + map += (row.businessentityid -> row) + acc + 1 + } + }.runLast.map(_.getOrElse(0L)) + } } diff --git a/typo-tester-zio-jdbc/src/scala/adventureworks/production/product/RepoTest.scala b/typo-tester-zio-jdbc/src/scala/adventureworks/production/product/RepoTest.scala new file mode 100644 index 000000000..4f6e0d290 --- /dev/null +++ b/typo-tester-zio-jdbc/src/scala/adventureworks/production/product/RepoTest.scala @@ -0,0 +1,33 @@ +package adventureworks.production.product + +import adventureworks.customtypes.* +import adventureworks.production.unitmeasure.* +import adventureworks.public.Name +import adventureworks.{SnapshotTest, withConnection} +import org.scalatest.Assertion +import zio.Chunk +import zio.stream.ZStream + +class RepoTest extends SnapshotTest { + def runTest(unitmeasureRepo: UnitmeasureRepo): Assertion = + withConnection { + val um1 = UnitmeasureRow(unitmeasurecode = UnitmeasureId("kg1"), name = Name("name1"), TypoLocalDateTime.now) + val um2 = UnitmeasureRow(unitmeasurecode = UnitmeasureId("kg2"), name = Name("name2"), TypoLocalDateTime.now) + for { + _ <- unitmeasureRepo.upsertStreaming(ZStream(um1, um2)) + _ <- unitmeasureRepo.selectAll.runCollect.map(all => assert(List(um1, um2) == all.sortBy(_.name))) + um1a = um1.copy(name = Name("name1a")) + um2a = um2.copy(name = Name("name2a")) + _ <- unitmeasureRepo.upsertStreaming(ZStream(um1a, um2a)) + all <- unitmeasureRepo.selectAll.runCollect + } yield assert(Chunk(um1a, um2a) == all.sortBy(_.name)) + } + + test("in-memory") { + runTest(new UnitmeasureRepoMock(_.toRow(TypoLocalDateTime.now))) + } + + test("pg") { + runTest(new UnitmeasureRepoImpl) + } +} diff --git a/typo/src/scala/typo/TypesScala.scala b/typo/src/scala/typo/TypesScala.scala index 8d2571cbc..3f1908598 100644 --- a/typo/src/scala/typo/TypesScala.scala +++ b/typo/src/scala/typo/TypesScala.scala @@ -14,6 +14,7 @@ object TypesScala { val Float = sc.Type.Qualified("scala.Float") val Function1 = sc.Type.Qualified("scala.Function1") val Int = sc.Type.Qualified("scala.Int") + val Iterable = sc.Type.Qualified("scala.collection.Iterable") val Iterator = sc.Type.Qualified("scala.collection.Iterator") val Left = sc.Type.Qualified("scala.Left") val List = sc.Type.Qualified("scala.List") diff --git a/typo/src/scala/typo/internal/ComputedTable.scala b/typo/src/scala/typo/internal/ComputedTable.scala index b30f2f338..e356af375 100644 --- a/typo/src/scala/typo/internal/ComputedTable.scala +++ b/typo/src/scala/typo/internal/ComputedTable.scala @@ -176,6 +176,12 @@ case class ComputedTable( RepoMethod.Insert(dbTable.name, cols, unsavedParam, names.RowName) }, if (options.enableStreamingInserts) Some(RepoMethod.InsertStreaming(dbTable.name, cols, names.RowName)) else None, + maybeId.collect { + case id if options.enableStreamingInserts => RepoMethod.UpsertStreaming(dbTable.name, cols, id, names.RowName) + }, + maybeId.collect { case id => + RepoMethod.UpsertBatch(dbTable.name, cols, id, names.RowName) + }, maybeUnsavedRow.map { unsavedRow => val unsavedParam = sc.Param(sc.Ident("unsaved"), unsavedRow.tpe, None) RepoMethod.InsertUnsaved(dbTable.name, cols, unsavedRow, unsavedParam, default, names.RowName) diff --git a/typo/src/scala/typo/internal/RepoMethod.scala b/typo/src/scala/typo/internal/RepoMethod.scala index 39288119f..1e955d273 100644 --- a/typo/src/scala/typo/internal/RepoMethod.scala +++ b/typo/src/scala/typo/internal/RepoMethod.scala @@ -86,6 +86,22 @@ object RepoMethod { rowType: sc.Type ) extends Mutator("upsert") + case class UpsertBatch( + relName: db.RelationName, + cols: NonEmptyList[ComputedColumn], + id: IdComputed, + rowType: sc.Type + ) extends Mutator("upsertBatch") + + case class UpsertStreaming( + relName: db.RelationName, + cols: NonEmptyList[ComputedColumn], + id: IdComputed, + rowType: sc.Type + ) extends Mutator("upsertStreaming") { + override val comment: Option[String] = Some("/* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */") + } + case class Insert( relName: db.RelationName, cols: NonEmptyList[ComputedColumn], diff --git a/typo/src/scala/typo/internal/codegen/DbLib.scala b/typo/src/scala/typo/internal/codegen/DbLib.scala index fc636e043..4433915dd 100644 --- a/typo/src/scala/typo/internal/codegen/DbLib.scala +++ b/typo/src/scala/typo/internal/codegen/DbLib.scala @@ -4,7 +4,7 @@ package codegen trait DbLib { def defaultedInstance: List[sc.Given] - def repoSig(repoMethod: RepoMethod): sc.Code + def repoSig(repoMethod: RepoMethod): Either[DbLib.NotImplementedFor, sc.Code] def repoImpl(repoMethod: RepoMethod): sc.Code def mockRepoImpl(id: IdComputed, repoMethod: RepoMethod, maybeToRow: Option[sc.Param]): sc.Code def testInsertMethod(x: ComputedTestInserts.InsertMethod): sc.Value @@ -17,6 +17,8 @@ trait DbLib { } object DbLib { + case class NotImplementedFor(library: String) + sealed trait RowType object RowType { case object Readable extends RowType diff --git a/typo/src/scala/typo/internal/codegen/DbLibAnorm.scala b/typo/src/scala/typo/internal/codegen/DbLibAnorm.scala index 03196afb1..405e46f5a 100644 --- a/typo/src/scala/typo/internal/codegen/DbLibAnorm.scala +++ b/typo/src/scala/typo/internal/codegen/DbLibAnorm.scala @@ -6,6 +6,7 @@ import typo.internal.analysis.MaybeReturnsRows class DbLibAnorm(pkg: sc.QIdent, inlineImplicits: Boolean, default: ComputedDefault, enableStreamingInserts: Boolean) extends DbLib { + val BatchSql = sc.Type.Qualified("anorm.BatchSql") val Column = sc.Type.Qualified("anorm.Column") val ToStatement = sc.Type.Qualified("anorm.ToStatement") val ToSql = sc.Type.Qualified("anorm.ToSql") @@ -20,6 +21,7 @@ class DbLibAnorm(pkg: sc.QIdent, inlineImplicits: Boolean, default: ComputedDefa val TypeDoesNotMatch = sc.Type.Qualified("anorm.TypeDoesNotMatch") val SimpleSql = sc.Type.Qualified("anorm.SimpleSql") val Row = sc.Type.Qualified("anorm.Row") + val managed = sc.Type.Qualified("resource.managed") def rowParserFor(rowType: sc.Type) = code"$rowType.$rowParserName(1)" @@ -35,16 +37,48 @@ class DbLibAnorm(pkg: sc.QIdent, inlineImplicits: Boolean, default: ComputedDefa val arrayParameterMetaDataName = sc.Ident("arrayParameterMetaData") val textSupport: Option[DbLibTextSupport] = if (enableStreamingInserts) Some(new DbLibTextSupport(pkg, inlineImplicits, None, default)) else None + val ExecuteReturningSyntax = sc.QIdent(List[List[sc.Ident]](List(sc.Ident("anorm")), pkg.idents, List(sc.Ident("ExecuteReturningSyntax"))).flatten) - override val additionalFiles: List[typo.sc.File] = - textSupport match { - case Some(textSupport) => - List( - sc.File(textSupport.Text, DbLibTextImplementations.Text, Nil, scope = Scope.Main), - sc.File(textSupport.streamingInsert, DbLibTextImplementations.streamingInsertAnorm(textSupport.Text), Nil, scope = Scope.Main) + override val additionalFiles: List[typo.sc.File] = { + List[List[sc.File]]( + List( + sc.File( + tpe = sc.Type.Qualified(ExecuteReturningSyntax), + contents = { + // drop structured imports from anorm.*, as the auto-import thing would need to be more clever to handle this + code"""|object ${ExecuteReturningSyntax.name} { + | /* add executeReturning to anorm. it needs to be inside the package, because everything is hidden */ + | implicit class Ops(batchSql: BatchSql) { + | def executeReturning[T](parser: ResultSetParser[T])(implicit c: ${TypesJava.Connection}): T = + | $managed(batchSql.getFilledStatement(c, getGeneratedKeys = true))(using StatementResource, statementClassTag).acquireAndGet { ps => + | ps.executeBatch() + | Sql + | .asTry( + | parser, + | $managed(ps.getGeneratedKeys)(using ResultSetResource, resultSetClassTag), + | onFirstRow = false, + | ColumnAliaser.empty + | ) + | .get + | } + | } + |} + |""".stripMargin + }, + secondaryTypes = Nil, + scope = Scope.Main ) - case None => Nil - } + ), + textSupport match { + case Some(textSupport) => + List( + sc.File(textSupport.Text, DbLibTextImplementations.Text, Nil, scope = Scope.Main), + sc.File(textSupport.streamingInsert, DbLibTextImplementations.streamingInsertAnorm(textSupport.Text), Nil, scope = Scope.Main) + ) + case None => Nil + } + ).flatten + } def runtimeInterpolateValue(name: sc.Code, tpe: sc.Type, forbidInline: Boolean = false): sc.Code = if (inlineImplicits && !forbidInline) @@ -158,58 +192,62 @@ class DbLibAnorm(pkg: sc.QIdent, inlineImplicits: Boolean, default: ComputedDefa case other => sc.Summon(ToStatement.of(other)).code } - override def repoSig(repoMethod: RepoMethod): sc.Code = { + override def repoSig(repoMethod: RepoMethod): Right[Nothing, sc.Code] = { val name = repoMethod.methodName repoMethod match { case RepoMethod.SelectBuilder(_, fieldsType, rowType) => - code"def $name: ${sc.Type.dsl.SelectBuilder.of(fieldsType, rowType)}" + Right(code"def $name: ${sc.Type.dsl.SelectBuilder.of(fieldsType, rowType)}") case RepoMethod.SelectAll(_, _, rowType) => - code"def $name(implicit c: ${TypesJava.Connection}): ${TypesScala.List.of(rowType)}" + Right(code"def $name(implicit c: ${TypesJava.Connection}): ${TypesScala.List.of(rowType)}") case RepoMethod.SelectById(_, _, id, rowType) => - code"def $name(${id.param})(implicit c: ${TypesJava.Connection}): ${TypesScala.Option.of(rowType)}" + Right(code"def $name(${id.param})(implicit c: ${TypesJava.Connection}): ${TypesScala.Option.of(rowType)}") case RepoMethod.SelectByIds(_, _, idComputed, idsParam, rowType) => val usedDefineds = idComputed.userDefinedColTypes.zipWithIndex.map { case (colType, i) => sc.Param(sc.Ident(s"toStatement$i"), ToStatement.of(sc.Type.ArrayOf(colType)), None) } val params = sc.Param(sc.Ident("c"), TypesJava.Connection, None) :: usedDefineds - code"def $name($idsParam)(implicit ${params.map(_.code).mkCode(", ")}): ${TypesScala.List.of(rowType)}" + Right(code"def $name($idsParam)(implicit ${params.map(_.code).mkCode(", ")}): ${TypesScala.List.of(rowType)}") case RepoMethod.SelectByIdsTracked(x) => val usedDefineds = x.idComputed.userDefinedColTypes.zipWithIndex.map { case (colType, i) => sc.Param(sc.Ident(s"toStatement$i"), ToStatement.of(sc.Type.ArrayOf(colType)), None) } val params = sc.Param(sc.Ident("c"), TypesJava.Connection, None) :: usedDefineds - code"def $name(${x.idsParam})(implicit ${params.map(_.code).mkCode(", ")}): ${TypesScala.Map.of(x.idComputed.tpe, x.rowType)}" + Right(code"def $name(${x.idsParam})(implicit ${params.map(_.code).mkCode(", ")}): ${TypesScala.Map.of(x.idComputed.tpe, x.rowType)}") case RepoMethod.SelectByUnique(_, keyColumns, _, rowType) => - code"def $name(${keyColumns.map(_.param.code).mkCode(", ")})(implicit c: ${TypesJava.Connection}): ${TypesScala.Option.of(rowType)}" + Right(code"def $name(${keyColumns.map(_.param.code).mkCode(", ")})(implicit c: ${TypesJava.Connection}): ${TypesScala.Option.of(rowType)}") case RepoMethod.SelectByFieldValues(_, _, _, fieldValueOrIdsParam, rowType) => - code"def $name($fieldValueOrIdsParam)(implicit c: ${TypesJava.Connection}): ${TypesScala.List.of(rowType)}" + Right(code"def $name($fieldValueOrIdsParam)(implicit c: ${TypesJava.Connection}): ${TypesScala.List.of(rowType)}") case RepoMethod.UpdateBuilder(_, fieldsType, rowType) => - code"def $name: ${sc.Type.dsl.UpdateBuilder.of(fieldsType, rowType)}" + Right(code"def $name: ${sc.Type.dsl.UpdateBuilder.of(fieldsType, rowType)}") case RepoMethod.UpdateFieldValues(_, id, varargs, _, _, _) => - code"def $name(${id.param}, $varargs)(implicit c: ${TypesJava.Connection}): ${TypesScala.Boolean}" + Right(code"def $name(${id.param}, $varargs)(implicit c: ${TypesJava.Connection}): ${TypesScala.Boolean}") case RepoMethod.Update(_, _, _, param, _) => - code"def $name($param)(implicit c: ${TypesJava.Connection}): ${TypesScala.Boolean}" + Right(code"def $name($param)(implicit c: ${TypesJava.Connection}): ${TypesScala.Boolean}") case RepoMethod.Insert(_, _, unsavedParam, rowType) => - code"def $name($unsavedParam)(implicit c: ${TypesJava.Connection}): $rowType" + Right(code"def $name($unsavedParam)(implicit c: ${TypesJava.Connection}): $rowType") case RepoMethod.InsertStreaming(_, _, rowType) => - code"def $name(unsaved: ${TypesScala.Iterator.of(rowType)}, batchSize: ${TypesScala.Int} = 10000)(implicit c: ${TypesJava.Connection}): ${TypesScala.Long}" + Right(code"def $name(unsaved: ${TypesScala.Iterator.of(rowType)}, batchSize: ${TypesScala.Int} = 10000)(implicit c: ${TypesJava.Connection}): ${TypesScala.Long}") case RepoMethod.Upsert(_, _, _, unsavedParam, rowType) => - code"def $name($unsavedParam)(implicit c: ${TypesJava.Connection}): $rowType" + Right(code"def $name($unsavedParam)(implicit c: ${TypesJava.Connection}): $rowType") + case RepoMethod.UpsertBatch(_, _, _, rowType) => + Right(code"def $name(unsaved: ${TypesScala.Iterable.of(rowType)})(implicit c: ${TypesJava.Connection}): ${TypesScala.List.of(rowType)}") + case RepoMethod.UpsertStreaming(_, _, _, rowType) => + Right(code"def $name(unsaved: ${TypesScala.Iterator.of(rowType)}, batchSize: ${TypesScala.Int} = 10000)(implicit c: ${TypesJava.Connection}): ${TypesScala.Int}") case RepoMethod.InsertUnsaved(_, _, _, unsavedParam, _, rowType) => - code"def $name($unsavedParam)(implicit c: ${TypesJava.Connection}): $rowType" + Right(code"def $name($unsavedParam)(implicit c: ${TypesJava.Connection}): $rowType") case RepoMethod.InsertUnsavedStreaming(_, unsaved) => - code"def $name(unsaved: ${TypesScala.Iterator.of(unsaved.tpe)}, batchSize: ${TypesScala.Int} = 10000)(implicit c: ${TypesJava.Connection}): ${TypesScala.Long}" + Right(code"def $name(unsaved: ${TypesScala.Iterator.of(unsaved.tpe)}, batchSize: ${TypesScala.Int} = 10000)(implicit c: ${TypesJava.Connection}): ${TypesScala.Long}") case RepoMethod.DeleteBuilder(_, fieldsType, rowType) => - code"def $name: ${sc.Type.dsl.DeleteBuilder.of(fieldsType, rowType)}" + Right(code"def $name: ${sc.Type.dsl.DeleteBuilder.of(fieldsType, rowType)}") case RepoMethod.Delete(_, id) => - code"def $name(${id.param})(implicit c: ${TypesJava.Connection}): ${TypesScala.Boolean}" + Right(code"def $name(${id.param})(implicit c: ${TypesJava.Connection}): ${TypesScala.Boolean}") case RepoMethod.DeleteByIds(_, idComputed, idsParam) => val usedDefineds = idComputed.userDefinedColTypes.zipWithIndex.map { case (colType, i) => sc.Param(sc.Ident(s"toStatement$i"), ToStatement.of(sc.Type.ArrayOf(colType)), None) } val params = sc.Param(sc.Ident("c"), TypesJava.Connection, None) :: usedDefineds - code"def $name($idsParam)(implicit ${params.map(_.code).mkCode(", ")}): ${TypesScala.Int}" + Right(code"def $name($idsParam)(implicit ${params.map(_.code).mkCode(", ")}): ${TypesScala.Int}") case RepoMethod.SqlFile(sqlScript) => val params = sc.Params(sqlScript.params.map(p => sc.Param(p.name, p.tpe, None))) val retType = sqlScript.maybeRowName match { case MaybeReturnsRows.Query(rowName) => TypesScala.List.of(rowName) case MaybeReturnsRows.Update => TypesScala.Int } - code"def $name$params(implicit c: ${TypesJava.Connection}): $retType" + Right(code"def $name$params(implicit c: ${TypesJava.Connection}): $retType") } } @@ -375,9 +413,12 @@ class DbLibAnorm(pkg: sc.QIdent, inlineImplicits: Boolean, default: ComputedDefa runtimeInterpolateValue(code"${unsavedParam.name}.${c.name}", c.tpe).code ++ SqlCast.toPgCode(c) } - val pickExcludedCols = cols.toList - .filterNot(c => id.cols.exists(_.name == c.name)) - .map { c => code"${c.dbName.code} = EXCLUDED.${c.dbName.code}" } + val conflictAction = cols.toList.filterNot(c => id.cols.exists(_.name == c.name)) match { + case Nil => code"do nothing" + case nonEmpty => + code"""|do update set + | ${nonEmpty.map { c => code"${c.dbName.code} = EXCLUDED.${c.dbName.code}" }.mkCode(",\n")}""".stripMargin + } val sql = SQL { code"""|insert into $relName(${dbNames(cols, isRead = false)}) @@ -385,8 +426,7 @@ class DbLibAnorm(pkg: sc.QIdent, inlineImplicits: Boolean, default: ComputedDefa | ${values.mkCode(",\n")} |) |on conflict (${dbNames(id.cols, isRead = false)}) - |do update set - | ${pickExcludedCols.mkCode(",\n")} + |$conflictAction |returning ${dbNames(cols, isRead = true)} |""".stripMargin } @@ -394,6 +434,60 @@ class DbLibAnorm(pkg: sc.QIdent, inlineImplicits: Boolean, default: ComputedDefa code"""|$sql | .executeInsert(${rowParserFor(rowType)}.single) |""" + case RepoMethod.UpsertBatch(relName, cols, id, rowType) => + val conflictAction = cols.toList.filterNot(c => id.cols.exists(_.name == c.name)) match { + case Nil => code"do nothing" + case nonEmpty => + code"""|do update set + | ${nonEmpty.map { c => code"${c.dbName.code} = EXCLUDED.${c.dbName.code}" }.mkCode(",\n")}""".stripMargin + } + + val sql = sc.s { + code"""|insert into $relName(${dbNames(cols, isRead = false)}) + |values (${cols.map(c => code"{${c.dbName.value}}${SqlCast.toPgCode(c)}").mkCode(", ")}) + |on conflict (${dbNames(id.cols, isRead = false)}) + |$conflictAction + |returning ${dbNames(cols, isRead = true)} + |""".stripMargin + } + + code"""|def toNamedParameter(row: $rowType): ${TypesScala.List.of(NamedParameter)} = ${TypesScala.List}( + | ${cols.map(c => code"$NamedParameter(${sc.StrLit(c.dbName.value)}, $ParameterValue(row.${c.name}, null, ${lookupToStatementFor(c.tpe)}))").mkCode(",\n")} + |) + |unsaved.toList match { + | case Nil => ${TypesScala.Nil} + | case head :: rest => + | new $ExecuteReturningSyntax.Ops( + | $BatchSql( + | $sql, + | toNamedParameter(head), + | rest.map(toNamedParameter)* + | ) + | ).executeReturning(${rowParserFor(rowType)}.*) + |}""".stripMargin + + case RepoMethod.UpsertStreaming(relName, cols, id, rowType) => + val conflictAction = cols.toList.filterNot(c => id.cols.exists(_.name == c.name)) match { + case Nil => code"do nothing" + case nonEmpty => + code"""|do update set + | ${nonEmpty.map { c => code"${c.dbName.code} = EXCLUDED.${c.dbName.code}" }.mkCode(",\n")}""".stripMargin + } + val tempTablename = s"${relName.name}_TEMP" + + val copySql = sc.s(code"copy $tempTablename(${dbNames(cols, isRead = false)}) from stdin") + + val mergeSql = SQL { + code"""|insert into $relName(${dbNames(cols, isRead = false)}) + |select * from $tempTablename + |on conflict (${dbNames(id.cols, isRead = false)}) + |$conflictAction + |; + |drop table $tempTablename;""".stripMargin + } + code"""|${SQL(code"create temporary table $tempTablename (like $relName) on commit drop")}.execute(): @${TypesScala.nowarn} + |${textSupport.get.streamingInsert}($copySql, batchSize, unsaved)(${textSupport.get.lookupTextFor(rowType)}, c): @${TypesScala.nowarn} + |$mergeSql.executeUpdate()""".stripMargin case RepoMethod.InsertUnsaved(relName, cols, unsaved, unsavedParam, default, rowType) => val cases0 = unsaved.restCols.map { col => @@ -572,6 +666,16 @@ class DbLibAnorm(pkg: sc.QIdent, inlineImplicits: Boolean, default: ComputedDefa case RepoMethod.Upsert(_, _, _, unsavedParam, _) => code"""|map.put(${unsavedParam.name}.${id.paramName}, ${unsavedParam.name}): @${TypesScala.nowarn} |${unsavedParam.name}""" + case RepoMethod.UpsertStreaming(_, _, id, _) => + code"""|unsaved.foreach { row => + | map += (row.${id.paramName} -> row) + |} + |unsaved.size""".stripMargin + case RepoMethod.UpsertBatch(_, _, id, _) => + code"""|unsaved.map { row => + | map += (row.${id.paramName} -> row) + | row + |}.toList""".stripMargin case RepoMethod.InsertUnsaved(_, _, _, unsavedParam, _, _) => code"insert(${maybeToRow.get.name}(${unsavedParam.name}))" case RepoMethod.InsertStreaming(_, _, _) => diff --git a/typo/src/scala/typo/internal/codegen/DbLibDoobie.scala b/typo/src/scala/typo/internal/codegen/DbLibDoobie.scala index 7579232a7..2db8c6f80 100644 --- a/typo/src/scala/typo/internal/codegen/DbLibDoobie.scala +++ b/typo/src/scala/typo/internal/codegen/DbLibDoobie.scala @@ -27,6 +27,8 @@ class DbLibDoobie(pkg: sc.QIdent, inlineImplicits: Boolean, default: ComputedDef val fromWrite = sc.Type.Qualified("doobie.syntax.SqlInterpolator.SingleFragment.fromWrite") val FragmentOps = sc.Type.Qualified("doobie.postgres.syntax.FragmentOps") val JdbcType = sc.Type.Qualified("doobie.enumerated.JdbcType") + val Update = sc.Type.Qualified("doobie.util.update.Update") + val catsStdInstancesForList = sc.Type.Qualified("cats.instances.list.catsStdInstancesForList") val arrayGetName: sc.Ident = sc.Ident("arrayGet") val arrayPutName: sc.Ident = sc.Ident("arrayPut") @@ -61,64 +63,68 @@ class DbLibDoobie(pkg: sc.QIdent, inlineImplicits: Boolean, default: ComputedDef code"${composite.cols.map(cc => code"${cc.dbName.code} = ${runtimeInterpolateValue(code"${composite.paramName}.${cc.name}", cc.tpe)}").mkCode(" AND ")}" } - override def repoSig(repoMethod: RepoMethod): sc.Code = { + override def repoSig(repoMethod: RepoMethod): Right[Nothing, sc.Code] = { val name = repoMethod.methodName repoMethod match { case RepoMethod.SelectBuilder(_, fieldsType, rowType) => - code"def $name: ${sc.Type.dsl.SelectBuilder.of(fieldsType, rowType)}" + Right(code"def $name: ${sc.Type.dsl.SelectBuilder.of(fieldsType, rowType)}") case RepoMethod.SelectAll(_, _, rowType) => - code"def $name: ${fs2Stream.of(ConnectionIO, rowType)}" + Right(code"def $name: ${fs2Stream.of(ConnectionIO, rowType)}") case RepoMethod.SelectById(_, _, id, rowType) => - code"def $name(${id.param}): ${ConnectionIO.of(TypesScala.Option.of(rowType))}" + Right(code"def $name(${id.param}): ${ConnectionIO.of(TypesScala.Option.of(rowType))}") case RepoMethod.SelectByIds(_, _, idComputed, idsParam, rowType) => val usedDefineds = idComputed.userDefinedColTypes.zipWithIndex.map { case (colType, i) => sc.Param(sc.Ident(s"puts$i"), Put.of(sc.Type.ArrayOf(colType)), None) } usedDefineds match { case Nil => - code"def $name($idsParam): ${fs2Stream.of(ConnectionIO, rowType)}" + Right(code"def $name($idsParam): ${fs2Stream.of(ConnectionIO, rowType)}") case nonEmpty => - code"def $name($idsParam)(implicit ${nonEmpty.map(_.code).mkCode(", ")}): ${fs2Stream.of(ConnectionIO, rowType)}" + Right(code"def $name($idsParam)(implicit ${nonEmpty.map(_.code).mkCode(", ")}): ${fs2Stream.of(ConnectionIO, rowType)}") } case RepoMethod.SelectByIdsTracked(x) => val usedDefineds = x.idComputed.userDefinedColTypes.zipWithIndex.map { case (colType, i) => sc.Param(sc.Ident(s"puts$i"), Put.of(sc.Type.ArrayOf(colType)), None) } val returnType = ConnectionIO.of(TypesScala.Map.of(x.idComputed.tpe, x.rowType)) usedDefineds match { case Nil => - code"def $name(${x.idsParam}): $returnType" + Right(code"def $name(${x.idsParam}): $returnType") case nonEmpty => - code"def $name(${x.idsParam})(implicit ${nonEmpty.map(_.code).mkCode(", ")}): $returnType" + Right(code"def $name(${x.idsParam})(implicit ${nonEmpty.map(_.code).mkCode(", ")}): $returnType") } case RepoMethod.SelectByUnique(_, keyColumns, _, rowType) => - code"def $name(${keyColumns.map(_.param.code).mkCode(", ")}): ${ConnectionIO.of(TypesScala.Option.of(rowType))}" + Right(code"def $name(${keyColumns.map(_.param.code).mkCode(", ")}): ${ConnectionIO.of(TypesScala.Option.of(rowType))}") case RepoMethod.SelectByFieldValues(_, _, _, fieldValueOrIdsParam, rowType) => - code"def $name($fieldValueOrIdsParam): ${fs2Stream.of(ConnectionIO, rowType)}" + Right(code"def $name($fieldValueOrIdsParam): ${fs2Stream.of(ConnectionIO, rowType)}") case RepoMethod.UpdateBuilder(_, fieldsType, rowType) => - code"def $name: ${sc.Type.dsl.UpdateBuilder.of(fieldsType, rowType)}" + Right(code"def $name: ${sc.Type.dsl.UpdateBuilder.of(fieldsType, rowType)}") case RepoMethod.UpdateFieldValues(_, id, varargs, _, _, _) => - code"def $name(${id.param}, $varargs): ${ConnectionIO.of(TypesScala.Boolean)}" + Right(code"def $name(${id.param}, $varargs): ${ConnectionIO.of(TypesScala.Boolean)}") case RepoMethod.Update(_, _, _, param, _) => - code"def $name($param): ${ConnectionIO.of(TypesScala.Boolean)}" + Right(code"def $name($param): ${ConnectionIO.of(TypesScala.Boolean)}") case RepoMethod.Insert(_, _, unsavedParam, rowType) => - code"def $name($unsavedParam): ${ConnectionIO.of(rowType)}" + Right(code"def $name($unsavedParam): ${ConnectionIO.of(rowType)}") case RepoMethod.InsertUnsaved(_, _, _, unsavedParam, _, rowType) => - code"def $name($unsavedParam): ${ConnectionIO.of(rowType)}" + Right(code"def $name($unsavedParam): ${ConnectionIO.of(rowType)}") case RepoMethod.InsertStreaming(_, _, rowType) => - code"def $name(unsaved: ${fs2Stream.of(ConnectionIO, rowType)}, batchSize: ${TypesScala.Int} = 10000): ${ConnectionIO.of(TypesScala.Long)}" + Right(code"def $name(unsaved: ${fs2Stream.of(ConnectionIO, rowType)}, batchSize: ${TypesScala.Int} = 10000): ${ConnectionIO.of(TypesScala.Long)}") + case RepoMethod.UpsertBatch(_, _, _, rowType) => + Right(code"def $name(unsaved: ${TypesScala.List.of(rowType)}): ${fs2Stream.of(ConnectionIO, rowType)}") case RepoMethod.InsertUnsavedStreaming(_, unsaved) => - code"def $name(unsaved: ${fs2Stream.of(ConnectionIO, unsaved.tpe)}, batchSize: ${TypesScala.Int} = 10000): ${ConnectionIO.of(TypesScala.Long)}" + Right(code"def $name(unsaved: ${fs2Stream.of(ConnectionIO, unsaved.tpe)}, batchSize: ${TypesScala.Int} = 10000): ${ConnectionIO.of(TypesScala.Long)}") case RepoMethod.Upsert(_, _, _, unsavedParam, rowType) => - code"def $name($unsavedParam): ${ConnectionIO.of(rowType)}" + Right(code"def $name($unsavedParam): ${ConnectionIO.of(rowType)}") + case RepoMethod.UpsertStreaming(_, _, _, rowType) => + Right(code"def $name(unsaved: ${fs2Stream.of(ConnectionIO, rowType)}, batchSize: ${TypesScala.Int} = 10000): ${ConnectionIO.of(TypesScala.Int)}") case RepoMethod.DeleteBuilder(_, fieldsType, rowType) => - code"def $name: ${sc.Type.dsl.DeleteBuilder.of(fieldsType, rowType)}" + Right(code"def $name: ${sc.Type.dsl.DeleteBuilder.of(fieldsType, rowType)}") case RepoMethod.Delete(_, id) => - code"def $name(${id.param}): ${ConnectionIO.of(TypesScala.Boolean)}" + Right(code"def $name(${id.param}): ${ConnectionIO.of(TypesScala.Boolean)}") case RepoMethod.DeleteByIds(_, idComputed, idsParam) => val usedDefineds = idComputed.userDefinedColTypes.zipWithIndex.map { case (colType, i) => sc.Param(sc.Ident(s"put$i"), Put.of(sc.Type.ArrayOf(colType)), None) } usedDefineds match { case Nil => - code"def $name(${idsParam}): ${ConnectionIO.of(TypesScala.Int)}" + Right(code"def $name($idsParam): ${ConnectionIO.of(TypesScala.Int)}") case nonEmpty => - code"def $name(${idsParam})(implicit ${nonEmpty.map(_.code).mkCode(", ")}): ${ConnectionIO.of(TypesScala.Int)}" + Right(code"def $name($idsParam)(implicit ${nonEmpty.map(_.code).mkCode(", ")}): ${ConnectionIO.of(TypesScala.Int)}") } case RepoMethod.SqlFile(sqlScript) => @@ -129,7 +135,7 @@ class DbLibDoobie(pkg: sc.QIdent, inlineImplicits: Boolean, default: ComputedDef case MaybeReturnsRows.Update => ConnectionIO.of(TypesScala.Int) } - code"def $name$params: $retType" + Right(code"def $name$params: $retType") } } @@ -286,6 +292,7 @@ class DbLibDoobie(pkg: sc.QIdent, inlineImplicits: Boolean, default: ComputedDef if (fixVerySlowImplicit) code"new $FragmentOps($sql).copyIn(unsaved, batchSize)(using ${textSupport.get.lookupTextFor(rowType)})" else code"new $FragmentOps($sql).copyIn[$rowType](unsaved, batchSize)" + case RepoMethod.InsertUnsavedStreaming(relName, unsaved) => val sql = SQL(code"COPY $relName(${dbNames(unsaved.allCols, isRead = false)}) FROM STDIN (DEFAULT '${textSupport.get.DefaultValue}')") @@ -297,24 +304,81 @@ class DbLibDoobie(pkg: sc.QIdent, inlineImplicits: Boolean, default: ComputedDef code"${runtimeInterpolateValue(code"${unsavedParam.name}.${c.name}", c.tpe)}${SqlCast.toPgCode(c)}" } - val pickExcludedCols = cols.toList - .filterNot(c => id.cols.exists(_.name == c.name)) - .map { c => code"${c.dbName.code} = EXCLUDED.${c.dbName.code}" } - + val conflictAction = cols.toList.filterNot(c => id.cols.exists(_.name == c.name)) match { + case Nil => code"do nothing" + case nonEmpty => + code"""|do update set + | ${nonEmpty.map { c => code"${c.dbName.code} = EXCLUDED.${c.dbName.code}" }.mkCode(",\n")}""".stripMargin + } val sql = SQL { code"""|insert into $relName(${dbNames(cols, isRead = false)}) |values ( | ${values.mkCode(",\n")} |) |on conflict (${dbNames(id.cols, isRead = false)}) - |do update set - | ${pickExcludedCols.mkCode(",\n")} + |$conflictAction |returning ${dbNames(cols, isRead = true)} |""".stripMargin } code"${query(sql, rowType)}.unique" + case RepoMethod.UpsertBatch(relName, cols, id, rowType) => + val conflictAction = cols.toList.filterNot(c => id.cols.exists(_.name == c.name)) match { + case Nil => code"do nothing" + case nonEmpty => + code"""|do update set + | ${nonEmpty.map { c => code"${c.dbName.code} = EXCLUDED.${c.dbName.code}" }.mkCode(",\n")}""".stripMargin + } + + val sql = sc.s { + code"""|insert into $relName(${dbNames(cols, isRead = false)}) + |values (${cols.map(c => code"?${SqlCast.toPgCode(c)}").mkCode(code",")}) + |on conflict (${dbNames(id.cols, isRead = false)}) + |$conflictAction + |returning ${dbNames(cols, isRead = true)}""".stripMargin + } + + if (fixVerySlowImplicit) + code"""|${Update.of(rowType)}( + | $sql + |)(using $rowType.$writeName) + |.updateManyWithGeneratedKeys[$rowType](${dbNames(cols, isRead = false)})(unsaved)(using $catsStdInstancesForList, $rowType.$readName)""".stripMargin + else + code"""|${Update.of(rowType)}( + | $sql + |).updateManyWithGeneratedKeys[$rowType](${dbNames(cols, isRead = false)})(unsaved)""" + + case RepoMethod.UpsertStreaming(relName, cols, id, rowType) => + val conflictAction = cols.toList.filterNot(c => id.cols.exists(_.name == c.name)) match { + case Nil => code"do nothing" + case nonEmpty => + code"""|do update set + | ${nonEmpty.map { c => code"${c.dbName.code} = EXCLUDED.${c.dbName.code}" }.mkCode(",\n")}""".stripMargin + } + val tempTablename = s"${relName.name}_TEMP" + + val streamingInsert = { + val sql = SQL(code"copy $tempTablename(${dbNames(cols, isRead = false)}) from stdin") + if (fixVerySlowImplicit) code"new $FragmentOps($sql).copyIn(unsaved, batchSize)(using ${textSupport.get.lookupTextFor(rowType)})" + else code"new $FragmentOps($sql).copyIn[$rowType](unsaved, batchSize)" + } + + val mergeSql = SQL { + code"""|insert into $relName(${dbNames(cols, isRead = false)}) + |select * from $tempTablename + |on conflict (${dbNames(id.cols, isRead = false)}) + |$conflictAction + |; + |drop table $tempTablename;""".stripMargin + } + + code"""|for { + | _ <- ${SQL(code"create temporary table $tempTablename (like $relName) on commit drop")}.update.run + | _ <- $streamingInsert + | res <- $mergeSql.update.run + |} yield res""".stripMargin + case RepoMethod.Insert(relName, cols, unsavedParam, rowType) => val values = cols.map { c => code"${runtimeInterpolateValue(code"${unsavedParam.name}.${c.name}", c.tpe)}${SqlCast.toPgCode(c)}" @@ -405,8 +469,7 @@ class DbLibDoobie(pkg: sc.QIdent, inlineImplicits: Boolean, default: ComputedDef | ${x.idsParam.name}.view.flatMap(id => byId.get(id).map(x => (id, x))).toMap |}""".stripMargin case RepoMethod.SelectByUnique(_, keyColumns, _, _) => - code"${delayCIO}(map.values.find(v => ${keyColumns.map(c => code"${c.name} == v.${c.name}").mkCode(" && ")}))" - + code"$delayCIO(map.values.find(v => ${keyColumns.map(c => code"${c.name} == v.${c.name}").mkCode(" && ")}))" case RepoMethod.SelectByFieldValues(_, cols, fieldValue, fieldValueOrIdsParam, _) => val cases = cols.map { col => code"case (acc, $fieldValue.${col.name}(value)) => acc.filter(_.${col.name} == value)" @@ -462,6 +525,22 @@ class DbLibDoobie(pkg: sc.QIdent, inlineImplicits: Boolean, default: ComputedDef | map.put(${unsavedParam.name}.${id.paramName}, ${unsavedParam.name}): @${TypesScala.nowarn} | ${unsavedParam.name} |}""".stripMargin + case RepoMethod.UpsertStreaming(_, _, _, _) => + code"""|unsaved.compile.toList.map { rows => + | var num = 0 + | rows.foreach { row => + | map += (row.${id.paramName} -> row) + | num += 1 + | } + | num + |}""".stripMargin + case RepoMethod.UpsertBatch(_, _, _, _) => + code"""|$fs2Stream.emits { + | unsaved.map { row => + | map += (row.${id.paramName} -> row) + | row + | } + |}""".stripMargin case RepoMethod.InsertUnsaved(_, _, _, unsavedParam, _, _) => code"insert(${maybeToRow.get.name}(${unsavedParam.name}))" case RepoMethod.InsertStreaming(_, _, _) => @@ -715,9 +794,60 @@ class DbLibDoobie(pkg: sc.QIdent, inlineImplicits: Boolean, default: ComputedDef sc.Given(tparams = Nil, name = readName, implicitParams = Nil, tpe = Read.of(tpe), body = body) } + + val write = { + val puts = { + val all = cols.map { c => + c.tpe match { + case TypesScala.Optional(underlying) => code"(${lookupPutFor(underlying)}, $Nullability.Nullable)" + case other => code"(${lookupPutFor(other)}, $Nullability.NoNulls)" + } + } + code"${TypesScala.List}(${all.mkCode(",\n")})" + } + + val toList = { + val all = cols.map(c => code"x.${c.name}") + code"x => ${TypesScala.List}(${all.mkCode(", ")})" + } + val unsafeSet = { + val all = cols.zipWithIndex.map { case (c, i) => + c.tpe match { + case TypesScala.Optional(underlying) => code"${lookupPutFor(underlying)}.unsafeSetNullable(rs, i + $i, a.${c.name})" + case other => code"${lookupPutFor(other)}.unsafeSetNonNullable(rs, i + $i, a.${c.name})" + } + } + code"""|(rs, i, a) => { + | ${all.mkCode("\n")} + |}""".stripMargin + } + + val unsafeUpdate = { + val all = cols.zipWithIndex.map { case (c, i) => + c.tpe match { + case TypesScala.Optional(underlying) => code"${lookupPutFor(underlying)}.unsafeUpdateNullable(ps, i + $i, a.${c.name})" + case other => code"${lookupPutFor(other)}.unsafeUpdateNonNullable(ps, i + $i, a.${c.name})" + } + } + code"""|(ps, i, a) => { + | ${all.mkCode("\n")} + |}""".stripMargin + } + + val body = + code"""|new ${Write.of(tpe)}( + | puts = $puts, + | toList = $toList, + | unsafeSet = $unsafeSet, + | unsafeUpdate = $unsafeUpdate + |) + |""".stripMargin + + sc.Given(tparams = Nil, name = writeName, implicitParams = Nil, tpe = Write.of(tpe), body = body) + } rowType match { case DbLib.RowType.Writable => text.toList - case DbLib.RowType.ReadWriteable => List(read) ++ text + case DbLib.RowType.ReadWriteable => List(read, write) ++ text case DbLib.RowType.Readable => List(read) } } diff --git a/typo/src/scala/typo/internal/codegen/DbLibZioJdbc.scala b/typo/src/scala/typo/internal/codegen/DbLibZioJdbc.scala index 1f9594d72..3fc4a1d14 100644 --- a/typo/src/scala/typo/internal/codegen/DbLibZioJdbc.scala +++ b/typo/src/scala/typo/internal/codegen/DbLibZioJdbc.scala @@ -178,69 +178,78 @@ class DbLibZioJdbc(pkg: sc.QIdent, inlineImplicits: Boolean, dslEnabled: Boolean code"${composite.cols.map(cc => code"${cc.dbName} = ${runtimeInterpolateValue(code"${composite.paramName}.${cc.name}", cc.tpe)}").mkCode(" AND ")}" } - override def repoSig(repoMethod: RepoMethod): sc.Code = { + override def repoSig(repoMethod: RepoMethod): Either[DbLib.NotImplementedFor, sc.Code] = { val name = repoMethod.methodName repoMethod match { case RepoMethod.SelectBuilder(_, fieldsType, rowType) => - code"def $name: ${sc.Type.dsl.SelectBuilder.of(fieldsType, rowType)}" + Right(code"def $name: ${sc.Type.dsl.SelectBuilder.of(fieldsType, rowType)}") case RepoMethod.SelectAll(_, _, rowType) => - code"def $name: ${ZStream.of(ZConnection, Throwable, rowType)}" + Right(code"def $name: ${ZStream.of(ZConnection, Throwable, rowType)}") case RepoMethod.SelectById(_, _, id, rowType) => - code"def $name(${id.param}): ${ZIO.of(ZConnection, Throwable, TypesScala.Option.of(rowType))}" + Right(code"def $name(${id.param}): ${ZIO.of(ZConnection, Throwable, TypesScala.Option.of(rowType))}") case RepoMethod.SelectByIds(_, _, idComputed, idsParam, rowType) => val usedDefineds = idComputed.userDefinedColTypes.zipWithIndex.map { case (colType, i) => sc.Param(sc.Ident(s"encoder$i"), JdbcEncoder.of(sc.Type.ArrayOf(colType)), None) } usedDefineds match { case Nil => - code"def $name($idsParam): ${ZStream.of(ZConnection, Throwable, rowType)}" + Right(code"def $name($idsParam): ${ZStream.of(ZConnection, Throwable, rowType)}") case nonEmpty => - code"def $name($idsParam)(implicit ${nonEmpty.map(_.code).mkCode(", ")}): ${ZStream.of(ZConnection, Throwable, rowType)}" + Right(code"def $name($idsParam)(implicit ${nonEmpty.map(_.code).mkCode(", ")}): ${ZStream.of(ZConnection, Throwable, rowType)}") } case RepoMethod.SelectByIdsTracked(x) => val usedDefineds = x.idComputed.userDefinedColTypes.zipWithIndex.map { case (colType, i) => sc.Param(sc.Ident(s"encoder$i"), JdbcEncoder.of(sc.Type.ArrayOf(colType)), None) } val returnType = ZIO.of(ZConnection, Throwable, TypesScala.Map.of(x.idComputed.tpe, x.rowType)) usedDefineds match { case Nil => - code"def $name(${x.idsParam}): $returnType" + Right(code"def $name(${x.idsParam}): $returnType") case nonEmpty => - code"def $name(${x.idsParam})(implicit ${nonEmpty.map(_.code).mkCode(", ")}): $returnType" + Right(code"def $name(${x.idsParam})(implicit ${nonEmpty.map(_.code).mkCode(", ")}): $returnType") } case RepoMethod.SelectByUnique(_, keyColumns, _, rowType) => - code"def $name(${keyColumns.map(_.param.code).mkCode(", ")}): ${ZIO.of(ZConnection, Throwable, TypesScala.Option.of(rowType))}" + Right(code"def $name(${keyColumns.map(_.param.code).mkCode(", ")}): ${ZIO.of(ZConnection, Throwable, TypesScala.Option.of(rowType))}") case RepoMethod.SelectByFieldValues(_, _, _, fieldValueOrIdsParam, rowType) => - code"def $name($fieldValueOrIdsParam): ${ZStream.of(ZConnection, Throwable, rowType)}" + Right(code"def $name($fieldValueOrIdsParam): ${ZStream.of(ZConnection, Throwable, rowType)}") case RepoMethod.UpdateBuilder(_, fieldsType, rowType) => - code"def $name: ${sc.Type.dsl.UpdateBuilder.of(fieldsType, rowType)}" + Right(code"def $name: ${sc.Type.dsl.UpdateBuilder.of(fieldsType, rowType)}") case RepoMethod.UpdateFieldValues(_, id, varargs, _, _, _) => - code"def $name(${id.param}, $varargs): ${ZIO.of(ZConnection, Throwable, TypesScala.Boolean)}" + Right(code"def $name(${id.param}, $varargs): ${ZIO.of(ZConnection, Throwable, TypesScala.Boolean)}") case RepoMethod.Update(_, _, _, param, _) => - code"def $name($param): ${ZIO.of(ZConnection, Throwable, TypesScala.Boolean)}" + Right(code"def $name($param): ${ZIO.of(ZConnection, Throwable, TypesScala.Boolean)}") case RepoMethod.Insert(_, _, unsavedParam, rowType) => - code"def $name($unsavedParam): ${ZIO.of(ZConnection, Throwable, rowType)}" + Right(code"def $name($unsavedParam): ${ZIO.of(ZConnection, Throwable, rowType)}") case RepoMethod.InsertUnsaved(_, _, _, unsavedParam, _, rowType) => - code"def $name($unsavedParam): ${ZIO.of(ZConnection, Throwable, rowType)}" + Right(code"def $name($unsavedParam): ${ZIO.of(ZConnection, Throwable, rowType)}") case RepoMethod.InsertStreaming(_, _, rowType) => val in = ZStream.of(ZConnection, TypesJava.Throwable, rowType) val out = ZIO.of(ZConnection, TypesJava.Throwable, TypesScala.Long) - code"def $name(unsaved: $in, batchSize: Int = 10000): $out" + Right(code"def $name(unsaved: $in, batchSize: Int = 10000): $out") + case RepoMethod.UpsertBatch(_, _, _, _) => +// val in = TypesScala.List.of(rowType) +// val out = ZIO.of(ZConnection, TypesJava.Throwable, TypesScala.List.of(rowType)) +// Right(code"def $name(unsaved: $in): $out") + Left(DbLib.NotImplementedFor("zio-jdbc")) + case RepoMethod.UpsertStreaming(_, _, _, rowType) => + val in = ZStream.of(ZConnection, TypesJava.Throwable, rowType) + val out = ZIO.of(ZConnection, TypesJava.Throwable, TypesScala.Long) + Right(code"def $name(unsaved: $in, batchSize: Int = 10000): $out") case RepoMethod.Upsert(_, _, _, unsavedParam, rowType) => - code"def $name($unsavedParam): ${ZIO.of(ZConnection, Throwable, UpdateResult.of(rowType))}" + Right(code"def $name($unsavedParam): ${ZIO.of(ZConnection, Throwable, UpdateResult.of(rowType))}") case RepoMethod.InsertUnsavedStreaming(_, unsaved) => val in = ZStream.of(ZConnection, TypesJava.Throwable, unsaved.tpe) val out = ZIO.of(ZConnection, TypesJava.Throwable, TypesScala.Long) - code"def $name(unsaved: $in, batchSize: ${TypesScala.Int} = 10000): $out" + Right(code"def $name(unsaved: $in, batchSize: ${TypesScala.Int} = 10000): $out") case RepoMethod.DeleteBuilder(_, fieldsType, rowType) => - code"def $name: ${sc.Type.dsl.DeleteBuilder.of(fieldsType, rowType)}" + Right(code"def $name: ${sc.Type.dsl.DeleteBuilder.of(fieldsType, rowType)}") case RepoMethod.Delete(_, id) => - code"def $name(${id.param}): ${ZIO.of(ZConnection, Throwable, TypesScala.Boolean)}" + Right(code"def $name(${id.param}): ${ZIO.of(ZConnection, Throwable, TypesScala.Boolean)}") case RepoMethod.DeleteByIds(_, idComputed, idsParam) => val usedDefineds = idComputed.userDefinedColTypes.zipWithIndex.map { case (colType, i) => sc.Param(sc.Ident(s"encoder$i"), JdbcEncoder.of(sc.Type.ArrayOf(colType)), None) } usedDefineds match { case Nil => - code"def $name(${idsParam}): ${ZIO.of(ZConnection, Throwable, TypesScala.Long)}" + Right(code"def $name(${idsParam}): ${ZIO.of(ZConnection, Throwable, TypesScala.Long)}") case nonEmpty => - code"def $name(${idsParam})(implicit ${nonEmpty.map(_.code).mkCode(", ")}): ${ZIO.of(ZConnection, Throwable, TypesScala.Long)}" + Right(code"def $name(${idsParam})(implicit ${nonEmpty.map(_.code).mkCode(", ")}): ${ZIO.of(ZConnection, Throwable, TypesScala.Long)}") } case RepoMethod.SqlFile(sqlScript) => val params = sc.Params(sqlScript.params.map(p => sc.Param(p.name, p.tpe, None))) @@ -250,7 +259,7 @@ class DbLibZioJdbc(pkg: sc.QIdent, inlineImplicits: Boolean, dslEnabled: Boolean case MaybeReturnsRows.Update => ZIO.of(ZConnection, Throwable, TypesScala.Long) } - code"def $name$params: $retType" + Right(code"def $name$params: $retType") } } @@ -394,36 +403,50 @@ class DbLibZioJdbc(pkg: sc.QIdent, inlineImplicits: Boolean, dslEnabled: Boolean code"${runtimeInterpolateValue(code"${unsavedParam.name}.${c.name}", c.tpe)}${SqlCast.toPgCode(c)}" } - val pickExcludedCols = cols.toList - .filterNot(c => id.cols.exists(_.name == c.name)) - .map { c => code"${c.dbName} = EXCLUDED.${c.dbName}" } + val conflictAction = cols.toList.filterNot(c => id.cols.exists(_.name == c.name)) match { + case Nil => code"do nothing" + case nonEmpty => + code"""|do update set + | ${nonEmpty.map { c => code"${c.dbName.code} = EXCLUDED.${c.dbName.code}" }.mkCode(",\n")}""".stripMargin + } - val base: sc.Code = + val sql = SQL { code"""|insert into $relName(${dbNames(cols, isRead = false)}) |values ( | ${values.mkCode(",\n")} |) - |on conflict (${dbNames(id.cols, isRead = false)})""".stripMargin - - val exclusion: Option[sc.Code] = - if (pickExcludedCols.isEmpty) None - else - Some { - code"""|do update set - | ${pickExcludedCols.mkCode(",\n")}""".stripMargin - } + |on conflict (${dbNames(id.cols, isRead = false)}) + |$conflictAction + |returning ${dbNames(cols, isRead = true)}""".stripMargin + } - val returning: sc.Code = code"returning ${dbNames(cols, isRead = true)}" + code"$sql.insertReturning(using ${lookupJdbcDecoder(rowType)})" - val sql = SQL { - List( - Some(base), - exclusion, - Some(returning) - ).flatten.mkCode("\n") + case RepoMethod.UpsertBatch(_, _, _, _) => + "???" + case RepoMethod.UpsertStreaming(relName, cols, id, rowType) => + val conflictAction = cols.toList.filterNot(c => id.cols.exists(_.name == c.name)) match { + case Nil => code"do nothing" + case nonEmpty => + code"""|do update set + | ${nonEmpty.map { c => code"${c.dbName.code} = EXCLUDED.${c.dbName.code}" }.mkCode(",\n")}""".stripMargin } + val tempTablename = s"${relName.name}_TEMP" - code"$sql.insertReturning(using ${lookupJdbcDecoder(rowType)})" + val copySql = sc.s(code"copy $tempTablename(${dbNames(cols, isRead = false)}) from stdin") + + val mergeSql = SQL { + code"""|insert into $relName(${dbNames(cols, isRead = false)}) + |select * from $tempTablename + |on conflict (${dbNames(id.cols, isRead = false)}) + |$conflictAction + |; + |drop table $tempTablename;""".stripMargin + } + code"""|val created = ${SQL(code"create temporary table $tempTablename (like $relName) on commit drop")}.execute + |val copied = ${textSupport.get.streamingInsert}($copySql, batchSize, unsaved)(${textSupport.get.lookupTextFor(rowType)}) + |val merged = $mergeSql.update + |created *> copied *> merged""".stripMargin case RepoMethod.Insert(relName, cols, unsavedParam, rowType) => val values = cols.map { c => @@ -579,6 +602,20 @@ class DbLibZioJdbc(pkg: sc.QIdent, inlineImplicits: Boolean, dslEnabled: Boolean | map.put(${unsavedParam.name}.${id.paramName}, ${unsavedParam.name}): @${TypesScala.nowarn} | $UpdateResult(1, $Chunk.single(${unsavedParam.name})) |}""".stripMargin + case RepoMethod.UpsertBatch(_, _, id, _) => + code"""|ZIO.succeed { + | unsaved.map{ row => + | map += (row.${id.paramName} -> row) + | row + | } + |}""".stripMargin + case RepoMethod.UpsertStreaming(_, _, _, _) => + code"""|unsaved.scanZIO(0L) { case (acc, row) => + | ZIO.succeed { + | map += (row.${id.paramName} -> row) + | acc + 1 + | } + |}.runLast.map(_.getOrElse(0L))""".stripMargin case RepoMethod.InsertUnsaved(_, _, _, unsavedParam, _, _) => code"insert(${maybeToRow.get.name}(${unsavedParam.name}))" diff --git a/typo/src/scala/typo/internal/codegen/FilesRelation.scala b/typo/src/scala/typo/internal/codegen/FilesRelation.scala index 4bf3acc72..55e571ae4 100644 --- a/typo/src/scala/typo/internal/codegen/FilesRelation.scala +++ b/typo/src/scala/typo/internal/codegen/FilesRelation.scala @@ -297,7 +297,10 @@ case class FilesRelation( def RepoTraitFile(dbLib: DbLib, repoMethods: NonEmptyList[RepoMethod]): sc.File = { val renderedMethods = repoMethods.map { repoMethod => - code"${repoMethod.comment.fold("")(c => c + "\n")}${dbLib.repoSig(repoMethod)}" + dbLib.repoSig(repoMethod) match { + case Left(DbLib.NotImplementedFor(lib)) => code"// Not implementable for $lib: ${repoMethod.methodName}" + case Right(sig) => code"${repoMethod.comment.fold("")(c => c + "\n")}$sig" + } } val str = code"""trait ${names.RepoName.name} { @@ -309,10 +312,12 @@ case class FilesRelation( } def RepoImplFile(dbLib: DbLib, repoMethods: NonEmptyList[RepoMethod]): sc.File = { - val renderedMethods: NonEmptyList[sc.Code] = repoMethods.map { repoMethod => - code"""|${repoMethod.comment.fold("")(c => c + "\n")}override ${dbLib.repoSig(repoMethod)} = { - | ${dbLib.repoImpl(repoMethod)} - |}""".stripMargin + val renderedMethods: List[sc.Code] = repoMethods.toList.flatMap { repoMethod => + dbLib.repoSig(repoMethod).toOption.map { sig => + code"""|${repoMethod.comment.fold("")(c => c + "\n")}override $sig = { + | ${dbLib.repoImpl(repoMethod)} + |}""".stripMargin + } } val str = code"""|class ${names.RepoImplName.name} extends ${names.RepoName} { @@ -329,11 +334,13 @@ case class FilesRelation( sc.Param(sc.Ident("toRow"), TypesScala.Function1.of(unsaved.tpe, names.RowName), None) } - val methods: NonEmptyList[sc.Code] = - repoMethods.map { repoMethod => - code"""|${repoMethod.comment.fold("")(c => c + "\n")}override ${dbLib.repoSig(repoMethod)} = { - | ${dbLib.mockRepoImpl(idComputed, repoMethod, maybeToRowParam)} - |}""".stripMargin + val methods: List[sc.Code] = + repoMethods.toList.flatMap { repoMethod => + dbLib.repoSig(repoMethod).toOption.map { sig => + code"""|${repoMethod.comment.fold("")(c => c + "\n")}override $sig = { + | ${dbLib.mockRepoImpl(idComputed, repoMethod, maybeToRowParam)} + |}""".stripMargin + } } val classParams = List( diff --git a/typo/src/scala/typo/sc.scala b/typo/src/scala/typo/sc.scala index 5463e7d5d..0b68e08e4 100644 --- a/typo/src/scala/typo/sc.scala +++ b/typo/src/scala/typo/sc.scala @@ -140,6 +140,7 @@ object sc { TypesScala.Float, TypesScala.Function1, TypesScala.Int, + TypesScala.Iterable, TypesScala.Iterator, TypesJava.Character, TypesJava.Integer,