From 098a1bc6f4d9e2d7fc1411d1a9ce95998a0c4838 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=98yvind=20Raddum=20Berg?= Date: Sat, 13 Jul 2024 17:12:29 +0200 Subject: [PATCH] Add `upsertStreaming` and `upsertBatch` (fix #115) (#117) * Add `upsertStreaming` (fix #115) * Add `upsertBatch` repo method. caveats: - anorm didn't support returning rows for batch queries, so it's monkey-patched in into the `anorm` package - zio-jdbc cannot express batch updates at all. For this reason it was necessary to add support for not implementing a repo method for a dblib. * fix upsert for tables where all columns appear in id --- .../hardcoded/ExecuteReturningSyntax.scala | 29 +++ .../compositepk/person/PersonRepo.scala | 3 + .../compositepk/person/PersonRepoImpl.scala | 38 +++ .../compositepk/person/PersonRepoMock.scala | 13 ++ .../football_club/FootballClubRepo.scala | 3 + .../football_club/FootballClubRepoImpl.scala | 37 +++ .../football_club/FootballClubRepoMock.scala | 13 ++ .../marital_status/MaritalStatusRepo.scala | 3 + .../MaritalStatusRepoImpl.scala | 37 ++- .../MaritalStatusRepoMock.scala | 13 ++ .../myschema/person/PersonRepo.scala | 3 + .../myschema/person/PersonRepoImpl.scala | 67 ++++++ .../myschema/person/PersonRepoMock.scala | 13 ++ .../hardcoded/ExecuteReturningSyntax.scala | 29 +++ .../compositepk/person/PersonRepo.scala | 3 + .../compositepk/person/PersonRepoImpl.scala | 38 +++ .../compositepk/person/PersonRepoMock.scala | 13 ++ .../football_club/FootballClubRepo.scala | 3 + .../football_club/FootballClubRepoImpl.scala | 37 +++ .../football_club/FootballClubRepoMock.scala | 13 ++ .../marital_status/MaritalStatusRepo.scala | 3 + .../MaritalStatusRepoImpl.scala | 37 ++- .../MaritalStatusRepoMock.scala | 13 ++ .../myschema/person/PersonRepo.scala | 3 + .../myschema/person/PersonRepoImpl.scala | 67 ++++++ .../myschema/person/PersonRepoMock.scala | 13 ++ .../compositepk/person/PersonRepo.scala | 3 + .../compositepk/person/PersonRepoImpl.scala | 27 +++ .../compositepk/person/PersonRepoMock.scala | 19 ++ .../compositepk/person/PersonRow.scala | 17 ++ .../football_club/FootballClubRepo.scala | 3 + .../football_club/FootballClubRepoImpl.scala | 27 +++ .../football_club/FootballClubRepoMock.scala | 19 ++ .../football_club/FootballClubRow.scala | 14 ++ .../marital_status/MaritalStatusRepo.scala | 3 + .../MaritalStatusRepoImpl.scala | 28 ++- .../MaritalStatusRepoMock.scala | 19 ++ .../marital_status/MaritalStatusRow.scala | 11 + .../myschema/person/PersonRepo.scala | 3 + .../myschema/person/PersonRepoImpl.scala | 47 ++++ .../myschema/person/PersonRepoMock.scala | 19 ++ .../hardcoded/myschema/person/PersonRow.scala | 44 ++++ .../compositepk/person/PersonRepo.scala | 3 + .../compositepk/person/PersonRepoImpl.scala | 27 +++ .../compositepk/person/PersonRepoMock.scala | 19 ++ .../compositepk/person/PersonRow.scala | 17 ++ .../football_club/FootballClubRepo.scala | 3 + .../football_club/FootballClubRepoImpl.scala | 27 +++ .../football_club/FootballClubRepoMock.scala | 19 ++ .../football_club/FootballClubRow.scala | 14 ++ .../marital_status/MaritalStatusRepo.scala | 3 + .../MaritalStatusRepoImpl.scala | 28 ++- .../MaritalStatusRepoMock.scala | 19 ++ .../marital_status/MaritalStatusRow.scala | 11 + .../myschema/person/PersonRepo.scala | 3 + .../myschema/person/PersonRepoImpl.scala | 47 ++++ .../myschema/person/PersonRepoMock.scala | 19 ++ .../hardcoded/myschema/person/PersonRow.scala | 44 ++++ .../compositepk/person/PersonRepo.scala | 3 + .../compositepk/person/PersonRepoImpl.scala | 13 ++ .../compositepk/person/PersonRepoMock.scala | 9 + .../football_club/FootballClubRepo.scala | 3 + .../football_club/FootballClubRepoImpl.scala | 13 ++ .../football_club/FootballClubRepoMock.scala | 9 + .../marital_status/MaritalStatusRepo.scala | 3 + .../MaritalStatusRepoImpl.scala | 13 ++ .../MaritalStatusRepoMock.scala | 9 + .../myschema/person/PersonRepo.scala | 3 + .../myschema/person/PersonRepoImpl.scala | 23 ++ .../myschema/person/PersonRepoMock.scala | 9 + .../compositepk/person/PersonRepo.scala | 3 + .../compositepk/person/PersonRepoImpl.scala | 13 ++ .../compositepk/person/PersonRepoMock.scala | 9 + .../football_club/FootballClubRepo.scala | 3 + .../football_club/FootballClubRepoImpl.scala | 13 ++ .../football_club/FootballClubRepoMock.scala | 9 + .../marital_status/MaritalStatusRepo.scala | 3 + .../MaritalStatusRepoImpl.scala | 13 ++ .../MaritalStatusRepoMock.scala | 9 + .../myschema/person/PersonRepo.scala | 3 + .../myschema/person/PersonRepoImpl.scala | 23 ++ .../myschema/person/PersonRepoMock.scala | 9 + site-in/other-features/testing-with-stubs.md | 12 + .../department/DepartmentRepo.scala | 3 + .../department/DepartmentRepoImpl.scala | 43 ++++ .../department/DepartmentRepoMock.scala | 13 ++ .../employee/EmployeeRepo.scala | 3 + .../employee/EmployeeRepoImpl.scala | 76 ++++++ .../employee/EmployeeRepoMock.scala | 13 ++ .../EmployeedepartmenthistoryRepo.scala | 3 + .../EmployeedepartmenthistoryRepoImpl.scala | 43 ++++ .../EmployeedepartmenthistoryRepoMock.scala | 13 ++ .../EmployeepayhistoryRepo.scala | 3 + .../EmployeepayhistoryRepoImpl.scala | 44 ++++ .../EmployeepayhistoryRepoMock.scala | 13 ++ .../jobcandidate/JobcandidateRepo.scala | 3 + .../jobcandidate/JobcandidateRepoImpl.scala | 43 ++++ .../jobcandidate/JobcandidateRepoMock.scala | 13 ++ .../humanresources/shift/ShiftRepo.scala | 3 + .../humanresources/shift/ShiftRepoImpl.scala | 46 ++++ .../humanresources/shift/ShiftRepoMock.scala | 13 ++ .../information_schema/CardinalNumber.scala | 38 +++ .../information_schema/CharacterData.scala | 38 +++ .../information_schema/SqlIdentifier.scala | 38 +++ .../information_schema/TimeStamp.scala | 39 ++++ .../information_schema/YesOrNo.scala | 38 +++ .../person/address/AddressRepo.scala | 3 + .../person/address/AddressRepoImpl.scala | 58 +++++ .../person/address/AddressRepoMock.scala | 13 ++ .../person/addresstype/AddresstypeRepo.scala | 3 + .../addresstype/AddresstypeRepoImpl.scala | 43 ++++ .../addresstype/AddresstypeRepoMock.scala | 13 ++ .../businessentity/BusinessentityRepo.scala | 3 + .../BusinessentityRepoImpl.scala | 40 ++++ .../BusinessentityRepoMock.scala | 13 ++ .../BusinessentityaddressRepo.scala | 3 + .../BusinessentityaddressRepoImpl.scala | 42 ++++ .../BusinessentityaddressRepoMock.scala | 13 ++ .../BusinessentitycontactRepo.scala | 3 + .../BusinessentitycontactRepoImpl.scala | 42 ++++ .../BusinessentitycontactRepoMock.scala | 13 ++ .../person/contacttype/ContacttypeRepo.scala | 3 + .../contacttype/ContacttypeRepoImpl.scala | 40 ++++ .../contacttype/ContacttypeRepoMock.scala | 13 ++ .../countryregion/CountryregionRepo.scala | 3 + .../countryregion/CountryregionRepoImpl.scala | 40 ++++ .../countryregion/CountryregionRepoMock.scala | 13 ++ .../emailaddress/EmailaddressRepo.scala | 3 + .../emailaddress/EmailaddressRepoImpl.scala | 44 ++++ .../emailaddress/EmailaddressRepoMock.scala | 13 ++ .../person/password/PasswordRepo.scala | 3 + .../person/password/PasswordRepoImpl.scala | 46 ++++ .../person/password/PasswordRepoMock.scala | 13 ++ .../person/person/PersonRepo.scala | 3 + .../person/person/PersonRepoImpl.scala | 70 ++++++ .../person/person/PersonRepoMock.scala | 13 ++ .../person/personphone/PersonphoneRepo.scala | 3 + .../personphone/PersonphoneRepoImpl.scala | 39 ++++ .../personphone/PersonphoneRepoMock.scala | 13 ++ .../phonenumbertype/PhonenumbertypeRepo.scala | 3 + .../PhonenumbertypeRepoImpl.scala | 40 ++++ .../PhonenumbertypeRepoMock.scala | 13 ++ .../stateprovince/StateprovinceRepo.scala | 3 + .../stateprovince/StateprovinceRepoImpl.scala | 55 +++++ .../stateprovince/StateprovinceRepoMock.scala | 13 ++ .../billofmaterials/BillofmaterialsRepo.scala | 3 + .../BillofmaterialsRepoImpl.scala | 58 +++++ .../BillofmaterialsRepoMock.scala | 13 ++ .../production/culture/CultureRepo.scala | 3 + .../production/culture/CultureRepoImpl.scala | 40 ++++ .../production/culture/CultureRepoMock.scala | 13 ++ .../production/document/DocumentRepo.scala | 3 + .../document/DocumentRepoImpl.scala | 70 ++++++ .../document/DocumentRepoMock.scala | 13 ++ .../illustration/IllustrationRepo.scala | 3 + .../illustration/IllustrationRepoImpl.scala | 40 ++++ .../illustration/IllustrationRepoMock.scala | 13 ++ .../production/location/LocationRepo.scala | 3 + .../location/LocationRepoImpl.scala | 46 ++++ .../location/LocationRepoMock.scala | 13 ++ .../production/product/ProductRepo.scala | 3 + .../production/product/ProductRepoImpl.scala | 106 +++++++++ .../production/product/ProductRepoMock.scala | 13 ++ .../productcategory/ProductcategoryRepo.scala | 3 + .../ProductcategoryRepoImpl.scala | 43 ++++ .../ProductcategoryRepoMock.scala | 13 ++ .../ProductcosthistoryRepo.scala | 3 + .../ProductcosthistoryRepoImpl.scala | 44 ++++ .../ProductcosthistoryRepoMock.scala | 13 ++ .../ProductdescriptionRepo.scala | 3 + .../ProductdescriptionRepoImpl.scala | 43 ++++ .../ProductdescriptionRepoMock.scala | 13 ++ .../productdocument/ProductdocumentRepo.scala | 3 + .../ProductdocumentRepoImpl.scala | 38 +++ .../ProductdocumentRepoMock.scala | 13 ++ .../ProductinventoryRepo.scala | 3 + .../ProductinventoryRepoImpl.scala | 50 ++++ .../ProductinventoryRepoMock.scala | 13 ++ .../ProductlistpricehistoryRepo.scala | 3 + .../ProductlistpricehistoryRepoImpl.scala | 44 ++++ .../ProductlistpricehistoryRepoMock.scala | 13 ++ .../productmodel/ProductmodelRepo.scala | 3 + .../productmodel/ProductmodelRepoImpl.scala | 49 ++++ .../productmodel/ProductmodelRepoMock.scala | 13 ++ .../ProductmodelillustrationRepo.scala | 3 + .../ProductmodelillustrationRepoImpl.scala | 38 +++ .../ProductmodelillustrationRepoMock.scala | 13 ++ ...ctmodelproductdescriptioncultureRepo.scala | 3 + ...delproductdescriptioncultureRepoImpl.scala | 39 ++++ ...delproductdescriptioncultureRepoMock.scala | 13 ++ .../productphoto/ProductphotoRepo.scala | 3 + .../productphoto/ProductphotoRepoImpl.scala | 49 ++++ .../productphoto/ProductphotoRepoMock.scala | 13 ++ .../ProductproductphotoRepo.scala | 3 + .../ProductproductphotoRepoImpl.scala | 41 ++++ .../ProductproductphotoRepoMock.scala | 13 ++ .../productreview/ProductreviewRepo.scala | 3 + .../productreview/ProductreviewRepoImpl.scala | 55 +++++ .../productreview/ProductreviewRepoMock.scala | 13 ++ .../ProductsubcategoryRepo.scala | 3 + .../ProductsubcategoryRepoImpl.scala | 46 ++++ .../ProductsubcategoryRepoMock.scala | 13 ++ .../scrapreason/ScrapreasonRepo.scala | 3 + .../scrapreason/ScrapreasonRepoImpl.scala | 40 ++++ .../scrapreason/ScrapreasonRepoMock.scala | 13 ++ .../TransactionhistoryRepo.scala | 3 + .../TransactionhistoryRepoImpl.scala | 58 +++++ .../TransactionhistoryRepoMock.scala | 13 ++ .../TransactionhistoryarchiveRepo.scala | 3 + .../TransactionhistoryarchiveRepoImpl.scala | 58 +++++ .../TransactionhistoryarchiveRepoMock.scala | 13 ++ .../unitmeasure/UnitmeasureRepo.scala | 3 + .../unitmeasure/UnitmeasureRepoImpl.scala | 40 ++++ .../unitmeasure/UnitmeasureRepoMock.scala | 13 ++ .../production/workorder/WorkorderRepo.scala | 3 + .../workorder/WorkorderRepoImpl.scala | 58 +++++ .../workorder/WorkorderRepoMock.scala | 13 ++ .../WorkorderroutingRepo.scala | 3 + .../WorkorderroutingRepoImpl.scala | 63 +++++ .../WorkorderroutingRepoMock.scala | 13 ++ .../public/flaff/FlaffRepo.scala | 3 + .../public/flaff/FlaffRepoImpl.scala | 41 ++++ .../public/flaff/FlaffRepoMock.scala | 13 ++ .../identity_test/IdentityTestRepo.scala | 3 + .../identity_test/IdentityTestRepoImpl.scala | 40 ++++ .../identity_test/IdentityTestRepoMock.scala | 13 ++ .../public/users/UsersRepo.scala | 3 + .../public/users/UsersRepoImpl.scala | 52 +++++ .../public/users/UsersRepoMock.scala | 13 ++ .../productvendor/ProductvendorRepo.scala | 3 + .../productvendor/ProductvendorRepoImpl.scala | 62 +++++ .../productvendor/ProductvendorRepoMock.scala | 13 ++ .../PurchaseorderheaderRepo.scala | 3 + .../PurchaseorderheaderRepoImpl.scala | 67 ++++++ .../PurchaseorderheaderRepoMock.scala | 13 ++ .../shipmethod/ShipmethodRepo.scala | 3 + .../shipmethod/ShipmethodRepoImpl.scala | 49 ++++ .../shipmethod/ShipmethodRepoMock.scala | 13 ++ .../purchasing/vendor/VendorRepo.scala | 3 + .../purchasing/vendor/VendorRepoImpl.scala | 55 +++++ .../purchasing/vendor/VendorRepoMock.scala | 13 ++ .../CountryregioncurrencyRepo.scala | 3 + .../CountryregioncurrencyRepoImpl.scala | 38 +++ .../CountryregioncurrencyRepoMock.scala | 13 ++ .../sales/creditcard/CreditcardRepo.scala | 3 + .../sales/creditcard/CreditcardRepoImpl.scala | 49 ++++ .../sales/creditcard/CreditcardRepoMock.scala | 13 ++ .../sales/currency/CurrencyRepo.scala | 3 + .../sales/currency/CurrencyRepoImpl.scala | 40 ++++ .../sales/currency/CurrencyRepoMock.scala | 13 ++ .../sales/currencyrate/CurrencyrateRepo.scala | 3 + .../currencyrate/CurrencyrateRepoImpl.scala | 52 +++++ .../currencyrate/CurrencyrateRepoMock.scala | 13 ++ .../sales/customer/CustomerRepo.scala | 3 + .../sales/customer/CustomerRepoImpl.scala | 49 ++++ .../sales/customer/CustomerRepoMock.scala | 13 ++ .../PersoncreditcardRepo.scala | 3 + .../PersoncreditcardRepoImpl.scala | 38 +++ .../PersoncreditcardRepoMock.scala | 13 ++ .../SalesorderdetailRepo.scala | 3 + .../SalesorderdetailRepoImpl.scala | 59 +++++ .../SalesorderdetailRepoMock.scala | 13 ++ .../SalesorderheaderRepo.scala | 3 + .../SalesorderheaderRepoImpl.scala | 106 +++++++++ .../SalesorderheaderRepoMock.scala | 13 ++ .../SalesorderheadersalesreasonRepo.scala | 3 + .../SalesorderheadersalesreasonRepoImpl.scala | 38 +++ .../SalesorderheadersalesreasonRepoMock.scala | 13 ++ .../sales/salesperson/SalespersonRepo.scala | 3 + .../salesperson/SalespersonRepoImpl.scala | 58 +++++ .../salesperson/SalespersonRepoMock.scala | 13 ++ .../SalespersonquotahistoryRepo.scala | 3 + .../SalespersonquotahistoryRepoImpl.scala | 44 ++++ .../SalespersonquotahistoryRepoMock.scala | 13 ++ .../sales/salesreason/SalesreasonRepo.scala | 3 + .../salesreason/SalesreasonRepoImpl.scala | 43 ++++ .../salesreason/SalesreasonRepoMock.scala | 13 ++ .../sales/salestaxrate/SalestaxrateRepo.scala | 3 + .../salestaxrate/SalestaxrateRepoImpl.scala | 52 +++++ .../salestaxrate/SalestaxrateRepoMock.scala | 13 ++ .../salesterritory/SalesterritoryRepo.scala | 3 + .../SalesterritoryRepoImpl.scala | 61 +++++ .../SalesterritoryRepoMock.scala | 13 ++ .../SalesterritoryhistoryRepo.scala | 3 + .../SalesterritoryhistoryRepoImpl.scala | 45 ++++ .../SalesterritoryhistoryRepoMock.scala | 13 ++ .../ShoppingcartitemRepo.scala | 3 + .../ShoppingcartitemRepoImpl.scala | 49 ++++ .../ShoppingcartitemRepoMock.scala | 13 ++ .../sales/specialoffer/SpecialofferRepo.scala | 3 + .../specialoffer/SpecialofferRepoImpl.scala | 64 +++++ .../specialoffer/SpecialofferRepoMock.scala | 13 ++ .../SpecialofferproductRepo.scala | 3 + .../SpecialofferproductRepoImpl.scala | 41 ++++ .../SpecialofferproductRepoMock.scala | 13 ++ .../sales/store/StoreRepo.scala | 3 + .../sales/store/StoreRepoImpl.scala | 49 ++++ .../sales/store/StoreRepoMock.scala | 13 ++ .../ExecuteReturningSyntax.scala | 28 +++ .../production/product/RepoTest.scala | 43 ++++ .../department/DepartmentRepo.scala | 3 + .../department/DepartmentRepoImpl.scala | 31 +++ .../department/DepartmentRepoMock.scala | 19 ++ .../department/DepartmentRow.scala | 20 ++ .../employee/EmployeeRepo.scala | 3 + .../employee/EmployeeRepoImpl.scala | 53 +++++ .../employee/EmployeeRepoMock.scala | 19 ++ .../humanresources/employee/EmployeeRow.scala | 53 +++++ .../EmployeedepartmenthistoryRepo.scala | 3 + .../EmployeedepartmenthistoryRepoImpl.scala | 29 +++ .../EmployeedepartmenthistoryRepoMock.scala | 19 ++ .../EmployeedepartmenthistoryRow.scala | 26 +++ .../EmployeepayhistoryRepo.scala | 3 + .../EmployeepayhistoryRepoImpl.scala | 31 +++ .../EmployeepayhistoryRepoMock.scala | 19 ++ .../EmployeepayhistoryRow.scala | 23 ++ .../jobcandidate/JobcandidateRepo.scala | 3 + .../jobcandidate/JobcandidateRepoImpl.scala | 31 +++ .../jobcandidate/JobcandidateRepoMock.scala | 19 ++ .../jobcandidate/JobcandidateRow.scala | 20 ++ .../humanresources/shift/ShiftRepo.scala | 3 + .../humanresources/shift/ShiftRepoImpl.scala | 33 +++ .../humanresources/shift/ShiftRepoMock.scala | 19 ++ .../humanresources/shift/ShiftRow.scala | 23 ++ .../information_schema/CardinalNumber.scala | 34 +++ .../information_schema/CharacterData.scala | 34 +++ .../information_schema/SqlIdentifier.scala | 34 +++ .../information_schema/TimeStamp.scala | 34 +++ .../information_schema/YesOrNo.scala | 34 +++ .../person/address/AddressRepo.scala | 3 + .../person/address/AddressRepoImpl.scala | 41 ++++ .../person/address/AddressRepoMock.scala | 19 ++ .../person/address/AddressRow.scala | 35 +++ .../person/addresstype/AddresstypeRepo.scala | 3 + .../addresstype/AddresstypeRepoImpl.scala | 31 +++ .../addresstype/AddresstypeRepoMock.scala | 19 ++ .../person/addresstype/AddresstypeRow.scala | 20 ++ .../businessentity/BusinessentityRepo.scala | 3 + .../BusinessentityRepoImpl.scala | 29 +++ .../BusinessentityRepoMock.scala | 19 ++ .../businessentity/BusinessentityRow.scala | 17 ++ .../BusinessentityaddressRepo.scala | 3 + .../BusinessentityaddressRepoImpl.scala | 29 +++ .../BusinessentityaddressRepoMock.scala | 19 ++ .../BusinessentityaddressRow.scala | 23 ++ .../BusinessentitycontactRepo.scala | 3 + .../BusinessentitycontactRepoImpl.scala | 29 +++ .../BusinessentitycontactRepoMock.scala | 19 ++ .../BusinessentitycontactRow.scala | 23 ++ .../person/contacttype/ContacttypeRepo.scala | 3 + .../contacttype/ContacttypeRepoImpl.scala | 29 +++ .../contacttype/ContacttypeRepoMock.scala | 19 ++ .../person/contacttype/ContacttypeRow.scala | 17 ++ .../countryregion/CountryregionRepo.scala | 3 + .../countryregion/CountryregionRepoImpl.scala | 29 +++ .../countryregion/CountryregionRepoMock.scala | 19 ++ .../countryregion/CountryregionRow.scala | 17 ++ .../emailaddress/EmailaddressRepo.scala | 3 + .../emailaddress/EmailaddressRepoImpl.scala | 31 +++ .../emailaddress/EmailaddressRepoMock.scala | 19 ++ .../person/emailaddress/EmailaddressRow.scala | 23 ++ .../person/password/PasswordRepo.scala | 3 + .../person/password/PasswordRepoImpl.scala | 33 +++ .../person/password/PasswordRepoMock.scala | 19 ++ .../person/password/PasswordRow.scala | 23 ++ .../person/person/PersonRepo.scala | 3 + .../person/person/PersonRepoImpl.scala | 49 ++++ .../person/person/PersonRepoMock.scala | 19 ++ .../person/person/PersonRow.scala | 47 ++++ .../person/personphone/PersonphoneRepo.scala | 3 + .../personphone/PersonphoneRepoImpl.scala | 27 +++ .../personphone/PersonphoneRepoMock.scala | 19 ++ .../person/personphone/PersonphoneRow.scala | 20 ++ .../phonenumbertype/PhonenumbertypeRepo.scala | 3 + .../PhonenumbertypeRepoImpl.scala | 29 +++ .../PhonenumbertypeRepoMock.scala | 19 ++ .../phonenumbertype/PhonenumbertypeRow.scala | 17 ++ .../stateprovince/StateprovinceRepo.scala | 3 + .../stateprovince/StateprovinceRepoImpl.scala | 39 ++++ .../stateprovince/StateprovinceRepoMock.scala | 19 ++ .../stateprovince/StateprovinceRow.scala | 32 +++ .../billofmaterials/BillofmaterialsRepo.scala | 3 + .../BillofmaterialsRepoImpl.scala | 41 ++++ .../BillofmaterialsRepoMock.scala | 19 ++ .../billofmaterials/BillofmaterialsRow.scala | 35 +++ .../production/culture/CultureRepo.scala | 3 + .../production/culture/CultureRepoImpl.scala | 29 +++ .../production/culture/CultureRepoMock.scala | 19 ++ .../production/culture/CultureRow.scala | 17 ++ .../production/document/DocumentRepo.scala | 3 + .../document/DocumentRepoImpl.scala | 49 ++++ .../document/DocumentRepoMock.scala | 19 ++ .../production/document/DocumentRow.scala | 47 ++++ .../illustration/IllustrationRepo.scala | 3 + .../illustration/IllustrationRepoImpl.scala | 29 +++ .../illustration/IllustrationRepoMock.scala | 19 ++ .../illustration/IllustrationRow.scala | 17 ++ .../production/location/LocationRepo.scala | 3 + .../location/LocationRepoImpl.scala | 33 +++ .../location/LocationRepoMock.scala | 19 ++ .../production/location/LocationRow.scala | 23 ++ .../production/product/ProductRepo.scala | 3 + .../production/product/ProductRepoImpl.scala | 73 ++++++ .../production/product/ProductRepoMock.scala | 19 ++ .../production/product/ProductRow.scala | 83 +++++++ .../productcategory/ProductcategoryRepo.scala | 3 + .../ProductcategoryRepoImpl.scala | 31 +++ .../ProductcategoryRepoMock.scala | 19 ++ .../productcategory/ProductcategoryRow.scala | 20 ++ .../ProductcosthistoryRepo.scala | 3 + .../ProductcosthistoryRepoImpl.scala | 31 +++ .../ProductcosthistoryRepoMock.scala | 19 ++ .../ProductcosthistoryRow.scala | 23 ++ .../ProductdescriptionRepo.scala | 3 + .../ProductdescriptionRepoImpl.scala | 31 +++ .../ProductdescriptionRepoMock.scala | 19 ++ .../ProductdescriptionRow.scala | 20 ++ .../productdocument/ProductdocumentRepo.scala | 3 + .../ProductdocumentRepoImpl.scala | 27 +++ .../ProductdocumentRepoMock.scala | 19 ++ .../productdocument/ProductdocumentRow.scala | 17 ++ .../ProductinventoryRepo.scala | 3 + .../ProductinventoryRepoImpl.scala | 35 +++ .../ProductinventoryRepoMock.scala | 19 ++ .../ProductinventoryRow.scala | 29 +++ .../ProductlistpricehistoryRepo.scala | 3 + .../ProductlistpricehistoryRepoImpl.scala | 31 +++ .../ProductlistpricehistoryRepoMock.scala | 19 ++ .../ProductlistpricehistoryRow.scala | 23 ++ .../productmodel/ProductmodelRepo.scala | 3 + .../productmodel/ProductmodelRepoImpl.scala | 35 +++ .../productmodel/ProductmodelRepoMock.scala | 19 ++ .../productmodel/ProductmodelRow.scala | 26 +++ .../ProductmodelillustrationRepo.scala | 3 + .../ProductmodelillustrationRepoImpl.scala | 27 +++ .../ProductmodelillustrationRepoMock.scala | 19 ++ .../ProductmodelillustrationRow.scala | 17 ++ ...ctmodelproductdescriptioncultureRepo.scala | 3 + ...delproductdescriptioncultureRepoImpl.scala | 27 +++ ...delproductdescriptioncultureRepoMock.scala | 19 ++ ...uctmodelproductdescriptioncultureRow.scala | 20 ++ .../productphoto/ProductphotoRepo.scala | 3 + .../productphoto/ProductphotoRepoImpl.scala | 35 +++ .../productphoto/ProductphotoRepoMock.scala | 19 ++ .../productphoto/ProductphotoRow.scala | 26 +++ .../ProductproductphotoRepo.scala | 3 + .../ProductproductphotoRepoImpl.scala | 29 +++ .../ProductproductphotoRepoMock.scala | 19 ++ .../ProductproductphotoRow.scala | 20 ++ .../productreview/ProductreviewRepo.scala | 3 + .../productreview/ProductreviewRepoImpl.scala | 39 ++++ .../productreview/ProductreviewRepoMock.scala | 19 ++ .../productreview/ProductreviewRow.scala | 32 +++ .../ProductsubcategoryRepo.scala | 3 + .../ProductsubcategoryRepoImpl.scala | 33 +++ .../ProductsubcategoryRepoMock.scala | 19 ++ .../ProductsubcategoryRow.scala | 23 ++ .../scrapreason/ScrapreasonRepo.scala | 3 + .../scrapreason/ScrapreasonRepoImpl.scala | 29 +++ .../scrapreason/ScrapreasonRepoMock.scala | 19 ++ .../scrapreason/ScrapreasonRow.scala | 17 ++ .../TransactionhistoryRepo.scala | 3 + .../TransactionhistoryRepoImpl.scala | 41 ++++ .../TransactionhistoryRepoMock.scala | 19 ++ .../TransactionhistoryRow.scala | 35 +++ .../TransactionhistoryarchiveRepo.scala | 3 + .../TransactionhistoryarchiveRepoImpl.scala | 41 ++++ .../TransactionhistoryarchiveRepoMock.scala | 19 ++ .../TransactionhistoryarchiveRow.scala | 35 +++ .../unitmeasure/UnitmeasureRepo.scala | 3 + .../unitmeasure/UnitmeasureRepoImpl.scala | 29 +++ .../unitmeasure/UnitmeasureRepoMock.scala | 19 ++ .../unitmeasure/UnitmeasureRow.scala | 17 ++ .../production/workorder/WorkorderRepo.scala | 3 + .../workorder/WorkorderRepoImpl.scala | 41 ++++ .../workorder/WorkorderRepoMock.scala | 19 ++ .../production/workorder/WorkorderRow.scala | 35 +++ .../WorkorderroutingRepo.scala | 3 + .../WorkorderroutingRepoImpl.scala | 43 ++++ .../WorkorderroutingRepoMock.scala | 19 ++ .../WorkorderroutingRow.scala | 44 ++++ .../public/flaff/FlaffRepo.scala | 3 + .../public/flaff/FlaffRepoImpl.scala | 27 +++ .../public/flaff/FlaffRepoMock.scala | 19 ++ .../public/flaff/FlaffRow.scala | 23 ++ .../identity_test/IdentityTestRepo.scala | 3 + .../identity_test/IdentityTestRepoImpl.scala | 29 +++ .../identity_test/IdentityTestRepoMock.scala | 19 ++ .../identity_test/IdentityTestRow.scala | 17 ++ .../public/pgtest/PgtestRow.scala | 218 ++++++++++++++++++ .../public/pgtestnull/PgtestnullRow.scala | 218 ++++++++++++++++++ .../public/users/UsersRepo.scala | 3 + .../public/users/UsersRepoImpl.scala | 37 +++ .../public/users/UsersRepoMock.scala | 19 ++ .../public/users/UsersRow.scala | 29 +++ .../productvendor/ProductvendorRepo.scala | 3 + .../productvendor/ProductvendorRepoImpl.scala | 43 ++++ .../productvendor/ProductvendorRepoMock.scala | 19 ++ .../productvendor/ProductvendorRow.scala | 41 ++++ .../PurchaseorderdetailRow.scala | 35 +++ .../PurchaseorderheaderRepo.scala | 3 + .../PurchaseorderheaderRepoImpl.scala | 47 ++++ .../PurchaseorderheaderRepoMock.scala | 19 ++ .../PurchaseorderheaderRow.scala | 44 ++++ .../shipmethod/ShipmethodRepo.scala | 3 + .../shipmethod/ShipmethodRepoImpl.scala | 35 +++ .../shipmethod/ShipmethodRepoMock.scala | 19 ++ .../purchasing/shipmethod/ShipmethodRow.scala | 26 +++ .../purchasing/vendor/VendorRepo.scala | 3 + .../purchasing/vendor/VendorRepoImpl.scala | 39 ++++ .../purchasing/vendor/VendorRepoMock.scala | 19 ++ .../purchasing/vendor/VendorRow.scala | 32 +++ .../CountryregioncurrencyRepo.scala | 3 + .../CountryregioncurrencyRepoImpl.scala | 27 +++ .../CountryregioncurrencyRepoMock.scala | 19 ++ .../CountryregioncurrencyRow.scala | 17 ++ .../sales/creditcard/CreditcardRepo.scala | 3 + .../sales/creditcard/CreditcardRepoImpl.scala | 35 +++ .../sales/creditcard/CreditcardRepoMock.scala | 19 ++ .../sales/creditcard/CreditcardRow.scala | 26 +++ .../sales/currency/CurrencyRepo.scala | 3 + .../sales/currency/CurrencyRepoImpl.scala | 29 +++ .../sales/currency/CurrencyRepoMock.scala | 19 ++ .../sales/currency/CurrencyRow.scala | 17 ++ .../sales/currencyrate/CurrencyrateRepo.scala | 3 + .../currencyrate/CurrencyrateRepoImpl.scala | 37 +++ .../currencyrate/CurrencyrateRepoMock.scala | 19 ++ .../sales/currencyrate/CurrencyrateRow.scala | 29 +++ .../sales/customer/CustomerRepo.scala | 3 + .../sales/customer/CustomerRepoImpl.scala | 35 +++ .../sales/customer/CustomerRepoMock.scala | 19 ++ .../sales/customer/CustomerRow.scala | 26 +++ .../PersoncreditcardRepo.scala | 3 + .../PersoncreditcardRepoImpl.scala | 27 +++ .../PersoncreditcardRepoMock.scala | 19 ++ .../PersoncreditcardRow.scala | 17 ++ .../SalesorderdetailRepo.scala | 3 + .../SalesorderdetailRepoImpl.scala | 41 ++++ .../SalesorderdetailRepoMock.scala | 19 ++ .../SalesorderdetailRow.scala | 38 +++ .../SalesorderheaderRepo.scala | 3 + .../SalesorderheaderRepoImpl.scala | 73 ++++++ .../SalesorderheaderRepoMock.scala | 19 ++ .../SalesorderheaderRow.scala | 83 +++++++ .../SalesorderheadersalesreasonRepo.scala | 3 + .../SalesorderheadersalesreasonRepoImpl.scala | 27 +++ .../SalesorderheadersalesreasonRepoMock.scala | 19 ++ .../SalesorderheadersalesreasonRow.scala | 17 ++ .../sales/salesperson/SalespersonRepo.scala | 3 + .../salesperson/SalespersonRepoImpl.scala | 41 ++++ .../salesperson/SalespersonRepoMock.scala | 19 ++ .../sales/salesperson/SalespersonRow.scala | 35 +++ .../SalespersonquotahistoryRepo.scala | 3 + .../SalespersonquotahistoryRepoImpl.scala | 31 +++ .../SalespersonquotahistoryRepoMock.scala | 19 ++ .../SalespersonquotahistoryRow.scala | 23 ++ .../sales/salesreason/SalesreasonRepo.scala | 3 + .../salesreason/SalesreasonRepoImpl.scala | 31 +++ .../salesreason/SalesreasonRepoMock.scala | 19 ++ .../sales/salesreason/SalesreasonRow.scala | 20 ++ .../sales/salestaxrate/SalestaxrateRepo.scala | 3 + .../salestaxrate/SalestaxrateRepoImpl.scala | 37 +++ .../salestaxrate/SalestaxrateRepoMock.scala | 19 ++ .../sales/salestaxrate/SalestaxrateRow.scala | 29 +++ .../salesterritory/SalesterritoryRepo.scala | 3 + .../SalesterritoryRepoImpl.scala | 43 ++++ .../SalesterritoryRepoMock.scala | 19 ++ .../salesterritory/SalesterritoryRow.scala | 38 +++ .../SalesterritoryhistoryRepo.scala | 3 + .../SalesterritoryhistoryRepoImpl.scala | 31 +++ .../SalesterritoryhistoryRepoMock.scala | 19 ++ .../SalesterritoryhistoryRow.scala | 26 +++ .../ShoppingcartitemRepo.scala | 3 + .../ShoppingcartitemRepoImpl.scala | 35 +++ .../ShoppingcartitemRepoMock.scala | 19 ++ .../ShoppingcartitemRow.scala | 26 +++ .../sales/specialoffer/SpecialofferRepo.scala | 3 + .../specialoffer/SpecialofferRepoImpl.scala | 45 ++++ .../specialoffer/SpecialofferRepoMock.scala | 19 ++ .../sales/specialoffer/SpecialofferRow.scala | 41 ++++ .../SpecialofferproductRepo.scala | 3 + .../SpecialofferproductRepoImpl.scala | 29 +++ .../SpecialofferproductRepoMock.scala | 19 ++ .../SpecialofferproductRow.scala | 20 ++ .../sales/store/StoreRepo.scala | 3 + .../sales/store/StoreRepoImpl.scala | 35 +++ .../sales/store/StoreRepoMock.scala | 19 ++ .../adventureworks/sales/store/StoreRow.scala | 26 +++ .../production/product/RepoTest.scala | 45 ++++ .../department/DepartmentRepo.scala | 3 + .../department/DepartmentRepoImpl.scala | 15 ++ .../department/DepartmentRepoMock.scala | 9 + .../employee/EmployeeRepo.scala | 3 + .../employee/EmployeeRepoImpl.scala | 26 +++ .../employee/EmployeeRepoMock.scala | 9 + .../EmployeedepartmenthistoryRepo.scala | 3 + .../EmployeedepartmenthistoryRepoImpl.scala | 14 ++ .../EmployeedepartmenthistoryRepoMock.scala | 9 + .../EmployeepayhistoryRepo.scala | 3 + .../EmployeepayhistoryRepoImpl.scala | 15 ++ .../EmployeepayhistoryRepoMock.scala | 9 + .../jobcandidate/JobcandidateRepo.scala | 3 + .../jobcandidate/JobcandidateRepoImpl.scala | 15 ++ .../jobcandidate/JobcandidateRepoMock.scala | 9 + .../humanresources/shift/ShiftRepo.scala | 3 + .../humanresources/shift/ShiftRepoImpl.scala | 16 ++ .../humanresources/shift/ShiftRepoMock.scala | 9 + .../information_schema/CardinalNumber.scala | 38 +++ .../information_schema/CharacterData.scala | 38 +++ .../information_schema/SqlIdentifier.scala | 38 +++ .../information_schema/TimeStamp.scala | 39 ++++ .../information_schema/YesOrNo.scala | 38 +++ .../person/address/AddressRepo.scala | 3 + .../person/address/AddressRepoImpl.scala | 20 ++ .../person/address/AddressRepoMock.scala | 9 + .../person/addresstype/AddresstypeRepo.scala | 3 + .../addresstype/AddresstypeRepoImpl.scala | 15 ++ .../addresstype/AddresstypeRepoMock.scala | 9 + .../businessentity/BusinessentityRepo.scala | 3 + .../BusinessentityRepoImpl.scala | 14 ++ .../BusinessentityRepoMock.scala | 9 + .../BusinessentityaddressRepo.scala | 3 + .../BusinessentityaddressRepoImpl.scala | 14 ++ .../BusinessentityaddressRepoMock.scala | 9 + .../BusinessentitycontactRepo.scala | 3 + .../BusinessentitycontactRepoImpl.scala | 14 ++ .../BusinessentitycontactRepoMock.scala | 9 + .../person/contacttype/ContacttypeRepo.scala | 3 + .../contacttype/ContacttypeRepoImpl.scala | 14 ++ .../contacttype/ContacttypeRepoMock.scala | 9 + .../countryregion/CountryregionRepo.scala | 3 + .../countryregion/CountryregionRepoImpl.scala | 14 ++ .../countryregion/CountryregionRepoMock.scala | 9 + .../emailaddress/EmailaddressRepo.scala | 3 + .../emailaddress/EmailaddressRepoImpl.scala | 15 ++ .../emailaddress/EmailaddressRepoMock.scala | 9 + .../person/password/PasswordRepo.scala | 3 + .../person/password/PasswordRepoImpl.scala | 16 ++ .../person/password/PasswordRepoMock.scala | 9 + .../person/person/PersonRepo.scala | 3 + .../person/person/PersonRepoImpl.scala | 24 ++ .../person/person/PersonRepoMock.scala | 9 + .../person/personphone/PersonphoneRepo.scala | 3 + .../personphone/PersonphoneRepoImpl.scala | 13 ++ .../personphone/PersonphoneRepoMock.scala | 9 + .../phonenumbertype/PhonenumbertypeRepo.scala | 3 + .../PhonenumbertypeRepoImpl.scala | 14 ++ .../PhonenumbertypeRepoMock.scala | 9 + .../stateprovince/StateprovinceRepo.scala | 3 + .../stateprovince/StateprovinceRepoImpl.scala | 19 ++ .../stateprovince/StateprovinceRepoMock.scala | 9 + .../billofmaterials/BillofmaterialsRepo.scala | 3 + .../BillofmaterialsRepoImpl.scala | 20 ++ .../BillofmaterialsRepoMock.scala | 9 + .../production/culture/CultureRepo.scala | 3 + .../production/culture/CultureRepoImpl.scala | 14 ++ .../production/culture/CultureRepoMock.scala | 9 + .../production/document/DocumentRepo.scala | 3 + .../document/DocumentRepoImpl.scala | 24 ++ .../document/DocumentRepoMock.scala | 9 + .../illustration/IllustrationRepo.scala | 3 + .../illustration/IllustrationRepoImpl.scala | 14 ++ .../illustration/IllustrationRepoMock.scala | 9 + .../production/location/LocationRepo.scala | 3 + .../location/LocationRepoImpl.scala | 16 ++ .../location/LocationRepoMock.scala | 9 + .../production/product/ProductRepo.scala | 3 + .../production/product/ProductRepoImpl.scala | 36 +++ .../production/product/ProductRepoMock.scala | 9 + .../productcategory/ProductcategoryRepo.scala | 3 + .../ProductcategoryRepoImpl.scala | 15 ++ .../ProductcategoryRepoMock.scala | 9 + .../ProductcosthistoryRepo.scala | 3 + .../ProductcosthistoryRepoImpl.scala | 15 ++ .../ProductcosthistoryRepoMock.scala | 9 + .../ProductdescriptionRepo.scala | 3 + .../ProductdescriptionRepoImpl.scala | 15 ++ .../ProductdescriptionRepoMock.scala | 9 + .../productdocument/ProductdocumentRepo.scala | 3 + .../ProductdocumentRepoImpl.scala | 13 ++ .../ProductdocumentRepoMock.scala | 9 + .../ProductinventoryRepo.scala | 3 + .../ProductinventoryRepoImpl.scala | 17 ++ .../ProductinventoryRepoMock.scala | 9 + .../ProductlistpricehistoryRepo.scala | 3 + .../ProductlistpricehistoryRepoImpl.scala | 15 ++ .../ProductlistpricehistoryRepoMock.scala | 9 + .../productmodel/ProductmodelRepo.scala | 3 + .../productmodel/ProductmodelRepoImpl.scala | 17 ++ .../productmodel/ProductmodelRepoMock.scala | 9 + .../ProductmodelillustrationRepo.scala | 3 + .../ProductmodelillustrationRepoImpl.scala | 13 ++ .../ProductmodelillustrationRepoMock.scala | 9 + ...ctmodelproductdescriptioncultureRepo.scala | 3 + ...delproductdescriptioncultureRepoImpl.scala | 13 ++ ...delproductdescriptioncultureRepoMock.scala | 9 + .../productphoto/ProductphotoRepo.scala | 3 + .../productphoto/ProductphotoRepoImpl.scala | 17 ++ .../productphoto/ProductphotoRepoMock.scala | 9 + .../ProductproductphotoRepo.scala | 3 + .../ProductproductphotoRepoImpl.scala | 14 ++ .../ProductproductphotoRepoMock.scala | 9 + .../productreview/ProductreviewRepo.scala | 3 + .../productreview/ProductreviewRepoImpl.scala | 19 ++ .../productreview/ProductreviewRepoMock.scala | 9 + .../ProductsubcategoryRepo.scala | 3 + .../ProductsubcategoryRepoImpl.scala | 16 ++ .../ProductsubcategoryRepoMock.scala | 9 + .../scrapreason/ScrapreasonRepo.scala | 3 + .../scrapreason/ScrapreasonRepoImpl.scala | 14 ++ .../scrapreason/ScrapreasonRepoMock.scala | 9 + .../TransactionhistoryRepo.scala | 3 + .../TransactionhistoryRepoImpl.scala | 20 ++ .../TransactionhistoryRepoMock.scala | 9 + .../TransactionhistoryarchiveRepo.scala | 3 + .../TransactionhistoryarchiveRepoImpl.scala | 20 ++ .../TransactionhistoryarchiveRepoMock.scala | 9 + .../unitmeasure/UnitmeasureRepo.scala | 3 + .../unitmeasure/UnitmeasureRepoImpl.scala | 14 ++ .../unitmeasure/UnitmeasureRepoMock.scala | 9 + .../production/workorder/WorkorderRepo.scala | 3 + .../workorder/WorkorderRepoImpl.scala | 20 ++ .../workorder/WorkorderRepoMock.scala | 9 + .../WorkorderroutingRepo.scala | 3 + .../WorkorderroutingRepoImpl.scala | 21 ++ .../WorkorderroutingRepoMock.scala | 9 + .../public/flaff/FlaffRepo.scala | 3 + .../public/flaff/FlaffRepoImpl.scala | 13 ++ .../public/flaff/FlaffRepoMock.scala | 9 + .../identity_test/IdentityTestRepo.scala | 3 + .../identity_test/IdentityTestRepoImpl.scala | 14 ++ .../identity_test/IdentityTestRepoMock.scala | 9 + .../public/users/UsersRepo.scala | 3 + .../public/users/UsersRepoImpl.scala | 18 ++ .../public/users/UsersRepoMock.scala | 9 + .../productvendor/ProductvendorRepo.scala | 3 + .../productvendor/ProductvendorRepoImpl.scala | 21 ++ .../productvendor/ProductvendorRepoMock.scala | 9 + .../PurchaseorderheaderRepo.scala | 3 + .../PurchaseorderheaderRepoImpl.scala | 23 ++ .../PurchaseorderheaderRepoMock.scala | 9 + .../shipmethod/ShipmethodRepo.scala | 3 + .../shipmethod/ShipmethodRepoImpl.scala | 17 ++ .../shipmethod/ShipmethodRepoMock.scala | 9 + .../purchasing/vendor/VendorRepo.scala | 3 + .../purchasing/vendor/VendorRepoImpl.scala | 19 ++ .../purchasing/vendor/VendorRepoMock.scala | 9 + .../CountryregioncurrencyRepo.scala | 3 + .../CountryregioncurrencyRepoImpl.scala | 13 ++ .../CountryregioncurrencyRepoMock.scala | 9 + .../sales/creditcard/CreditcardRepo.scala | 3 + .../sales/creditcard/CreditcardRepoImpl.scala | 17 ++ .../sales/creditcard/CreditcardRepoMock.scala | 9 + .../sales/currency/CurrencyRepo.scala | 3 + .../sales/currency/CurrencyRepoImpl.scala | 14 ++ .../sales/currency/CurrencyRepoMock.scala | 9 + .../sales/currencyrate/CurrencyrateRepo.scala | 3 + .../currencyrate/CurrencyrateRepoImpl.scala | 18 ++ .../currencyrate/CurrencyrateRepoMock.scala | 9 + .../sales/customer/CustomerRepo.scala | 3 + .../sales/customer/CustomerRepoImpl.scala | 17 ++ .../sales/customer/CustomerRepoMock.scala | 9 + .../PersoncreditcardRepo.scala | 3 + .../PersoncreditcardRepoImpl.scala | 13 ++ .../PersoncreditcardRepoMock.scala | 9 + .../SalesorderdetailRepo.scala | 3 + .../SalesorderdetailRepoImpl.scala | 20 ++ .../SalesorderdetailRepoMock.scala | 9 + .../SalesorderheaderRepo.scala | 3 + .../SalesorderheaderRepoImpl.scala | 36 +++ .../SalesorderheaderRepoMock.scala | 9 + .../SalesorderheadersalesreasonRepo.scala | 3 + .../SalesorderheadersalesreasonRepoImpl.scala | 13 ++ .../SalesorderheadersalesreasonRepoMock.scala | 9 + .../sales/salesperson/SalespersonRepo.scala | 3 + .../salesperson/SalespersonRepoImpl.scala | 20 ++ .../salesperson/SalespersonRepoMock.scala | 9 + .../SalespersonquotahistoryRepo.scala | 3 + .../SalespersonquotahistoryRepoImpl.scala | 15 ++ .../SalespersonquotahistoryRepoMock.scala | 9 + .../sales/salesreason/SalesreasonRepo.scala | 3 + .../salesreason/SalesreasonRepoImpl.scala | 15 ++ .../salesreason/SalesreasonRepoMock.scala | 9 + .../sales/salestaxrate/SalestaxrateRepo.scala | 3 + .../salestaxrate/SalestaxrateRepoImpl.scala | 18 ++ .../salestaxrate/SalestaxrateRepoMock.scala | 9 + .../salesterritory/SalesterritoryRepo.scala | 3 + .../SalesterritoryRepoImpl.scala | 21 ++ .../SalesterritoryRepoMock.scala | 9 + .../SalesterritoryhistoryRepo.scala | 3 + .../SalesterritoryhistoryRepoImpl.scala | 15 ++ .../SalesterritoryhistoryRepoMock.scala | 9 + .../ShoppingcartitemRepo.scala | 3 + .../ShoppingcartitemRepoImpl.scala | 17 ++ .../ShoppingcartitemRepoMock.scala | 9 + .../sales/specialoffer/SpecialofferRepo.scala | 3 + .../specialoffer/SpecialofferRepoImpl.scala | 22 ++ .../specialoffer/SpecialofferRepoMock.scala | 9 + .../SpecialofferproductRepo.scala | 3 + .../SpecialofferproductRepoImpl.scala | 14 ++ .../SpecialofferproductRepoMock.scala | 9 + .../sales/store/StoreRepo.scala | 3 + .../sales/store/StoreRepoImpl.scala | 17 ++ .../sales/store/StoreRepoMock.scala | 9 + .../production/product/RepoTest.scala | 33 +++ typo/src/scala/typo/TypesScala.scala | 1 + .../scala/typo/internal/ComputedTable.scala | 6 + typo/src/scala/typo/internal/RepoMethod.scala | 16 ++ .../scala/typo/internal/codegen/DbLib.scala | 4 +- .../typo/internal/codegen/DbLibAnorm.scala | 170 +++++++++++--- .../typo/internal/codegen/DbLibDoobie.scala | 194 +++++++++++++--- .../typo/internal/codegen/DbLibZioJdbc.scala | 125 ++++++---- .../typo/internal/codegen/FilesRelation.scala | 27 ++- typo/src/scala/typo/sc.scala | 1 + 814 files changed, 15574 insertions(+), 128 deletions(-) create mode 100644 .bleep/generated-sources/typo-tester-anorm@jvm213/scripts.GenHardcodedFiles/anorm/testdb/hardcoded/ExecuteReturningSyntax.scala create mode 100644 .bleep/generated-sources/typo-tester-anorm@jvm3/scripts.GenHardcodedFiles/anorm/testdb/hardcoded/ExecuteReturningSyntax.scala create mode 100644 typo-tester-anorm/generated-and-checked-in/adventureworks/information_schema/CardinalNumber.scala create mode 100644 typo-tester-anorm/generated-and-checked-in/adventureworks/information_schema/CharacterData.scala create mode 100644 typo-tester-anorm/generated-and-checked-in/adventureworks/information_schema/SqlIdentifier.scala create mode 100644 typo-tester-anorm/generated-and-checked-in/adventureworks/information_schema/TimeStamp.scala create mode 100644 typo-tester-anorm/generated-and-checked-in/adventureworks/information_schema/YesOrNo.scala create mode 100644 typo-tester-anorm/generated-and-checked-in/anorm/adventureworks/ExecuteReturningSyntax.scala create mode 100644 typo-tester-anorm/src/scala/adventureworks/production/product/RepoTest.scala create mode 100644 typo-tester-doobie/generated-and-checked-in/adventureworks/information_schema/CardinalNumber.scala create mode 100644 typo-tester-doobie/generated-and-checked-in/adventureworks/information_schema/CharacterData.scala create mode 100644 typo-tester-doobie/generated-and-checked-in/adventureworks/information_schema/SqlIdentifier.scala create mode 100644 typo-tester-doobie/generated-and-checked-in/adventureworks/information_schema/TimeStamp.scala create mode 100644 typo-tester-doobie/generated-and-checked-in/adventureworks/information_schema/YesOrNo.scala create mode 100644 typo-tester-doobie/src/scala/adventureworks/production/product/RepoTest.scala create mode 100644 typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/information_schema/CardinalNumber.scala create mode 100644 typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/information_schema/CharacterData.scala create mode 100644 typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/information_schema/SqlIdentifier.scala create mode 100644 typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/information_schema/TimeStamp.scala create mode 100644 typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/information_schema/YesOrNo.scala create mode 100644 typo-tester-zio-jdbc/src/scala/adventureworks/production/product/RepoTest.scala diff --git a/.bleep/generated-sources/typo-tester-anorm@jvm213/scripts.GenHardcodedFiles/anorm/testdb/hardcoded/ExecuteReturningSyntax.scala b/.bleep/generated-sources/typo-tester-anorm@jvm213/scripts.GenHardcodedFiles/anorm/testdb/hardcoded/ExecuteReturningSyntax.scala new file mode 100644 index 0000000000..6fe6f5460c --- /dev/null +++ b/.bleep/generated-sources/typo-tester-anorm@jvm213/scripts.GenHardcodedFiles/anorm/testdb/hardcoded/ExecuteReturningSyntax.scala @@ -0,0 +1,29 @@ +/** + * File automatically generated by `typo` for its own test suite. + * + * IF YOU CHANGE THIS FILE YOUR CHANGES WILL BE OVERWRITTEN + */ +package anorm +package testdb +package hardcoded + +import java.sql.Connection +import resource.managed + +object ExecuteReturningSyntax { + /* add executeReturning to anorm. it needs to be inside the package, because everything is hidden */ + implicit class Ops(batchSql: BatchSql) { + def executeReturning[T](parser: ResultSetParser[T])(implicit c: Connection): T = + managed(batchSql.getFilledStatement(c, getGeneratedKeys = true))(using StatementResource, statementClassTag).acquireAndGet { ps => + ps.executeBatch() + Sql + .asTry( + parser, + managed(ps.getGeneratedKeys)(using ResultSetResource, resultSetClassTag), + onFirstRow = false, + ColumnAliaser.empty + ) + .get + } + } +} diff --git a/.bleep/generated-sources/typo-tester-anorm@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/compositepk/person/PersonRepo.scala b/.bleep/generated-sources/typo-tester-anorm@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/compositepk/person/PersonRepo.scala index 5e479597f4..87d9280d49 100644 --- a/.bleep/generated-sources/typo-tester-anorm@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/compositepk/person/PersonRepo.scala +++ b/.bleep/generated-sources/typo-tester-anorm@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/compositepk/person/PersonRepo.scala @@ -29,4 +29,7 @@ trait PersonRepo { def update(row: PersonRow)(implicit c: Connection): Boolean def updateFieldValues(compositeId: PersonId, fieldValues: List[PersonFieldValue[?]])(implicit c: Connection): Boolean def upsert(unsaved: PersonRow)(implicit c: Connection): PersonRow + def upsertBatch(unsaved: Iterable[PersonRow])(implicit c: Connection): List[PersonRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Iterator[PersonRow], batchSize: Int = 10000)(implicit c: Connection): Int } diff --git a/.bleep/generated-sources/typo-tester-anorm@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/compositepk/person/PersonRepoImpl.scala b/.bleep/generated-sources/typo-tester-anorm@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/compositepk/person/PersonRepoImpl.scala index 41b54c8cd1..9adcf1c63a 100644 --- a/.bleep/generated-sources/typo-tester-anorm@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/compositepk/person/PersonRepoImpl.scala +++ b/.bleep/generated-sources/typo-tester-anorm@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/compositepk/person/PersonRepoImpl.scala @@ -8,6 +8,7 @@ package hardcoded package compositepk package person +import anorm.BatchSql import anorm.NamedParameter import anorm.ParameterMetaData import anorm.ParameterValue @@ -17,6 +18,7 @@ import anorm.SimpleSql import anorm.SqlStringInterpolation import anorm.ToStatement import java.sql.Connection +import scala.annotation.nowarn import testdb.hardcoded.customtypes.Defaulted import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder @@ -148,4 +150,40 @@ class PersonRepoImpl extends PersonRepo { .executeInsert(PersonRow.rowParser(1).single) } + override def upsertBatch(unsaved: Iterable[PersonRow])(implicit c: Connection): List[PersonRow] = { + def toNamedParameter(row: PersonRow): List[NamedParameter] = List( + NamedParameter("one", ParameterValue(row.one, null, ToStatement.longToStatement)), + NamedParameter("two", ParameterValue(row.two, null, ToStatement.optionToStatement(ToStatement.stringToStatement, ParameterMetaData.StringParameterMetaData))), + NamedParameter("name", ParameterValue(row.name, null, ToStatement.optionToStatement(ToStatement.stringToStatement, ParameterMetaData.StringParameterMetaData))) + ) + unsaved.toList match { + case Nil => Nil + case head :: rest => + new anorm.testdb.hardcoded.ExecuteReturningSyntax.Ops( + BatchSql( + s"""insert into compositepk.person("one", "two", "name") + values ({one}::int8, {two}, {name}) + on conflict ("one", "two") + do update set + "name" = EXCLUDED."name" + returning "one", "two", "name" + """, + toNamedParameter(head), + rest.map(toNamedParameter)* + ) + ).executeReturning(PersonRow.rowParser(1).*) + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[PersonRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + SQL"create temporary table person_TEMP (like compositepk.person) on commit drop".execute(): @nowarn + streamingInsert(s"""copy person_TEMP("one", "two", "name") from stdin""", batchSize, unsaved)(PersonRow.text, c): @nowarn + SQL"""insert into compositepk.person("one", "two", "name") + select * from person_TEMP + on conflict ("one", "two") + do update set + "name" = EXCLUDED."name" + ; + drop table person_TEMP;""".executeUpdate() + } } diff --git a/.bleep/generated-sources/typo-tester-anorm@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/compositepk/person/PersonRepoMock.scala b/.bleep/generated-sources/typo-tester-anorm@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/compositepk/person/PersonRepoMock.scala index 2c1451f0fa..12d1979bee 100644 --- a/.bleep/generated-sources/typo-tester-anorm@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/compositepk/person/PersonRepoMock.scala +++ b/.bleep/generated-sources/typo-tester-anorm@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/compositepk/person/PersonRepoMock.scala @@ -100,4 +100,17 @@ class PersonRepoMock(toRow: Function1[PersonRowUnsaved, PersonRow], map.put(unsaved.compositeId, unsaved): @nowarn unsaved } + override def upsertBatch(unsaved: Iterable[PersonRow])(implicit c: Connection): List[PersonRow] = { + unsaved.map { row => + map += (row.compositeId -> row) + row + }.toList + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[PersonRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + unsaved.foreach { row => + map += (row.compositeId -> row) + } + unsaved.size + } } diff --git a/.bleep/generated-sources/typo-tester-anorm@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/football_club/FootballClubRepo.scala b/.bleep/generated-sources/typo-tester-anorm@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/football_club/FootballClubRepo.scala index 0f0a9a4075..7fba23c571 100644 --- a/.bleep/generated-sources/typo-tester-anorm@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/football_club/FootballClubRepo.scala +++ b/.bleep/generated-sources/typo-tester-anorm@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/football_club/FootballClubRepo.scala @@ -29,4 +29,7 @@ trait FootballClubRepo { def update(row: FootballClubRow)(implicit c: Connection): Boolean def updateFieldValues(id: FootballClubId, fieldValues: List[FootballClubFieldValue[?]])(implicit c: Connection): Boolean def upsert(unsaved: FootballClubRow)(implicit c: Connection): FootballClubRow + def upsertBatch(unsaved: Iterable[FootballClubRow])(implicit c: Connection): List[FootballClubRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Iterator[FootballClubRow], batchSize: Int = 10000)(implicit c: Connection): Int } diff --git a/.bleep/generated-sources/typo-tester-anorm@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/football_club/FootballClubRepoImpl.scala b/.bleep/generated-sources/typo-tester-anorm@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/football_club/FootballClubRepoImpl.scala index 8d8836da5b..116c3e0400 100644 --- a/.bleep/generated-sources/typo-tester-anorm@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/football_club/FootballClubRepoImpl.scala +++ b/.bleep/generated-sources/typo-tester-anorm@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/football_club/FootballClubRepoImpl.scala @@ -8,6 +8,7 @@ package hardcoded package myschema package football_club +import anorm.BatchSql import anorm.NamedParameter import anorm.ParameterValue import anorm.RowParser @@ -16,6 +17,7 @@ import anorm.SimpleSql import anorm.SqlStringInterpolation import anorm.ToStatement import java.sql.Connection +import scala.annotation.nowarn import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder import typo.dsl.SelectBuilderSql @@ -130,4 +132,39 @@ class FootballClubRepoImpl extends FootballClubRepo { .executeInsert(FootballClubRow.rowParser(1).single) } + override def upsertBatch(unsaved: Iterable[FootballClubRow])(implicit c: Connection): List[FootballClubRow] = { + def toNamedParameter(row: FootballClubRow): List[NamedParameter] = List( + NamedParameter("id", ParameterValue(row.id, null, FootballClubId.toStatement)), + NamedParameter("name", ParameterValue(row.name, null, ToStatement.stringToStatement)) + ) + unsaved.toList match { + case Nil => Nil + case head :: rest => + new anorm.testdb.hardcoded.ExecuteReturningSyntax.Ops( + BatchSql( + s"""insert into myschema.football_club("id", "name") + values ({id}::int8, {name}) + on conflict ("id") + do update set + "name" = EXCLUDED."name" + returning "id", "name" + """, + toNamedParameter(head), + rest.map(toNamedParameter)* + ) + ).executeReturning(FootballClubRow.rowParser(1).*) + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[FootballClubRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + SQL"create temporary table football_club_TEMP (like myschema.football_club) on commit drop".execute(): @nowarn + streamingInsert(s"""copy football_club_TEMP("id", "name") from stdin""", batchSize, unsaved)(FootballClubRow.text, c): @nowarn + SQL"""insert into myschema.football_club("id", "name") + select * from football_club_TEMP + on conflict ("id") + do update set + "name" = EXCLUDED."name" + ; + drop table football_club_TEMP;""".executeUpdate() + } } diff --git a/.bleep/generated-sources/typo-tester-anorm@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/football_club/FootballClubRepoMock.scala b/.bleep/generated-sources/typo-tester-anorm@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/football_club/FootballClubRepoMock.scala index e0fe8f1bd0..37d177d060 100644 --- a/.bleep/generated-sources/typo-tester-anorm@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/football_club/FootballClubRepoMock.scala +++ b/.bleep/generated-sources/typo-tester-anorm@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/football_club/FootballClubRepoMock.scala @@ -97,4 +97,17 @@ class FootballClubRepoMock(map: scala.collection.mutable.Map[FootballClubId, Foo map.put(unsaved.id, unsaved): @nowarn unsaved } + override def upsertBatch(unsaved: Iterable[FootballClubRow])(implicit c: Connection): List[FootballClubRow] = { + unsaved.map { row => + map += (row.id -> row) + row + }.toList + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[FootballClubRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + unsaved.foreach { row => + map += (row.id -> row) + } + unsaved.size + } } diff --git a/.bleep/generated-sources/typo-tester-anorm@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/marital_status/MaritalStatusRepo.scala b/.bleep/generated-sources/typo-tester-anorm@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/marital_status/MaritalStatusRepo.scala index cf8ea88c39..0afd9d8936 100644 --- a/.bleep/generated-sources/typo-tester-anorm@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/marital_status/MaritalStatusRepo.scala +++ b/.bleep/generated-sources/typo-tester-anorm@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/marital_status/MaritalStatusRepo.scala @@ -27,4 +27,7 @@ trait MaritalStatusRepo { def selectByIdsTracked(ids: Array[MaritalStatusId])(implicit c: Connection): Map[MaritalStatusId, MaritalStatusRow] def update: UpdateBuilder[MaritalStatusFields, MaritalStatusRow] def upsert(unsaved: MaritalStatusRow)(implicit c: Connection): MaritalStatusRow + def upsertBatch(unsaved: Iterable[MaritalStatusRow])(implicit c: Connection): List[MaritalStatusRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Iterator[MaritalStatusRow], batchSize: Int = 10000)(implicit c: Connection): Int } diff --git a/.bleep/generated-sources/typo-tester-anorm@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/marital_status/MaritalStatusRepoImpl.scala b/.bleep/generated-sources/typo-tester-anorm@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/marital_status/MaritalStatusRepoImpl.scala index 9bec11ce6b..256f6c94c7 100644 --- a/.bleep/generated-sources/typo-tester-anorm@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/marital_status/MaritalStatusRepoImpl.scala +++ b/.bleep/generated-sources/typo-tester-anorm@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/marital_status/MaritalStatusRepoImpl.scala @@ -8,6 +8,7 @@ package hardcoded package myschema package marital_status +import anorm.BatchSql import anorm.NamedParameter import anorm.ParameterValue import anorm.RowParser @@ -15,6 +16,7 @@ import anorm.SQL import anorm.SimpleSql import anorm.SqlStringInterpolation import java.sql.Connection +import scala.annotation.nowarn import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder import typo.dsl.SelectBuilderSql @@ -96,11 +98,42 @@ class MaritalStatusRepoImpl extends MaritalStatusRepo { ${ParameterValue(unsaved.id, null, MaritalStatusId.toStatement)}::int8 ) on conflict ("id") - do update set - + do nothing returning "id" """ .executeInsert(MaritalStatusRow.rowParser(1).single) } + override def upsertBatch(unsaved: Iterable[MaritalStatusRow])(implicit c: Connection): List[MaritalStatusRow] = { + def toNamedParameter(row: MaritalStatusRow): List[NamedParameter] = List( + NamedParameter("id", ParameterValue(row.id, null, MaritalStatusId.toStatement)) + ) + unsaved.toList match { + case Nil => Nil + case head :: rest => + new anorm.testdb.hardcoded.ExecuteReturningSyntax.Ops( + BatchSql( + s"""insert into myschema.marital_status("id") + values ({id}::int8) + on conflict ("id") + do nothing + returning "id" + """, + toNamedParameter(head), + rest.map(toNamedParameter)* + ) + ).executeReturning(MaritalStatusRow.rowParser(1).*) + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[MaritalStatusRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + SQL"create temporary table marital_status_TEMP (like myschema.marital_status) on commit drop".execute(): @nowarn + streamingInsert(s"""copy marital_status_TEMP("id") from stdin""", batchSize, unsaved)(MaritalStatusRow.text, c): @nowarn + SQL"""insert into myschema.marital_status("id") + select * from marital_status_TEMP + on conflict ("id") + do nothing + ; + drop table marital_status_TEMP;""".executeUpdate() + } } diff --git a/.bleep/generated-sources/typo-tester-anorm@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/marital_status/MaritalStatusRepoMock.scala b/.bleep/generated-sources/typo-tester-anorm@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/marital_status/MaritalStatusRepoMock.scala index 9b4c2bb8dc..cf8a6121d2 100644 --- a/.bleep/generated-sources/typo-tester-anorm@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/marital_status/MaritalStatusRepoMock.scala +++ b/.bleep/generated-sources/typo-tester-anorm@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/marital_status/MaritalStatusRepoMock.scala @@ -72,4 +72,17 @@ class MaritalStatusRepoMock(map: scala.collection.mutable.Map[MaritalStatusId, M map.put(unsaved.id, unsaved): @nowarn unsaved } + override def upsertBatch(unsaved: Iterable[MaritalStatusRow])(implicit c: Connection): List[MaritalStatusRow] = { + unsaved.map { row => + map += (row.id -> row) + row + }.toList + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[MaritalStatusRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + unsaved.foreach { row => + map += (row.id -> row) + } + unsaved.size + } } diff --git a/.bleep/generated-sources/typo-tester-anorm@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/person/PersonRepo.scala b/.bleep/generated-sources/typo-tester-anorm@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/person/PersonRepo.scala index d3c6b5bd62..b8c43ba307 100644 --- a/.bleep/generated-sources/typo-tester-anorm@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/person/PersonRepo.scala +++ b/.bleep/generated-sources/typo-tester-anorm@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/person/PersonRepo.scala @@ -32,4 +32,7 @@ trait PersonRepo { def update(row: PersonRow)(implicit c: Connection): Boolean def updateFieldValues(id: PersonId, fieldValues: List[PersonFieldValue[?]])(implicit c: Connection): Boolean def upsert(unsaved: PersonRow)(implicit c: Connection): PersonRow + def upsertBatch(unsaved: Iterable[PersonRow])(implicit c: Connection): List[PersonRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Iterator[PersonRow], batchSize: Int = 10000)(implicit c: Connection): Int } diff --git a/.bleep/generated-sources/typo-tester-anorm@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/person/PersonRepoImpl.scala b/.bleep/generated-sources/typo-tester-anorm@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/person/PersonRepoImpl.scala index 0afad0da33..ecb767f39a 100644 --- a/.bleep/generated-sources/typo-tester-anorm@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/person/PersonRepoImpl.scala +++ b/.bleep/generated-sources/typo-tester-anorm@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/person/PersonRepoImpl.scala @@ -8,6 +8,7 @@ package hardcoded package myschema package person +import anorm.BatchSql import anorm.NamedParameter import anorm.ParameterMetaData import anorm.ParameterValue @@ -17,6 +18,7 @@ import anorm.SimpleSql import anorm.SqlStringInterpolation import anorm.ToStatement import java.sql.Connection +import scala.annotation.nowarn import testdb.hardcoded.customtypes.Defaulted import testdb.hardcoded.myschema.football_club.FootballClubId import testdb.hardcoded.myschema.marital_status.MaritalStatusId @@ -231,4 +233,69 @@ class PersonRepoImpl extends PersonRepo { .executeInsert(PersonRow.rowParser(1).single) } + override def upsertBatch(unsaved: Iterable[PersonRow])(implicit c: Connection): List[PersonRow] = { + def toNamedParameter(row: PersonRow): List[NamedParameter] = List( + NamedParameter("id", ParameterValue(row.id, null, PersonId.toStatement)), + NamedParameter("favourite_football_club_id", ParameterValue(row.favouriteFootballClubId, null, FootballClubId.toStatement)), + NamedParameter("name", ParameterValue(row.name, null, ToStatement.stringToStatement)), + NamedParameter("nick_name", ParameterValue(row.nickName, null, ToStatement.optionToStatement(ToStatement.stringToStatement, ParameterMetaData.StringParameterMetaData))), + NamedParameter("blog_url", ParameterValue(row.blogUrl, null, ToStatement.optionToStatement(ToStatement.stringToStatement, ParameterMetaData.StringParameterMetaData))), + NamedParameter("email", ParameterValue(row.email, null, ToStatement.stringToStatement)), + NamedParameter("phone", ParameterValue(row.phone, null, ToStatement.stringToStatement)), + NamedParameter("likes_pizza", ParameterValue(row.likesPizza, null, ToStatement.booleanToStatement)), + NamedParameter("marital_status_id", ParameterValue(row.maritalStatusId, null, MaritalStatusId.toStatement)), + NamedParameter("work_email", ParameterValue(row.workEmail, null, ToStatement.optionToStatement(ToStatement.stringToStatement, ParameterMetaData.StringParameterMetaData))), + NamedParameter("sector", ParameterValue(row.sector, null, Sector.toStatement)), + NamedParameter("favorite_number", ParameterValue(row.favoriteNumber, null, Number.toStatement)) + ) + unsaved.toList match { + case Nil => Nil + case head :: rest => + new anorm.testdb.hardcoded.ExecuteReturningSyntax.Ops( + BatchSql( + s"""insert into myschema.person("id", "favourite_football_club_id", "name", "nick_name", "blog_url", "email", "phone", "likes_pizza", "marital_status_id", "work_email", "sector", "favorite_number") + values ({id}::int8, {favourite_football_club_id}, {name}, {nick_name}, {blog_url}, {email}, {phone}, {likes_pizza}, {marital_status_id}, {work_email}, {sector}::myschema.sector, {favorite_number}::myschema.number) + on conflict ("id") + do update set + "favourite_football_club_id" = EXCLUDED."favourite_football_club_id", + "name" = EXCLUDED."name", + "nick_name" = EXCLUDED."nick_name", + "blog_url" = EXCLUDED."blog_url", + "email" = EXCLUDED."email", + "phone" = EXCLUDED."phone", + "likes_pizza" = EXCLUDED."likes_pizza", + "marital_status_id" = EXCLUDED."marital_status_id", + "work_email" = EXCLUDED."work_email", + "sector" = EXCLUDED."sector", + "favorite_number" = EXCLUDED."favorite_number" + returning "id", "favourite_football_club_id", "name", "nick_name", "blog_url", "email", "phone", "likes_pizza", "marital_status_id", "work_email", "sector", "favorite_number" + """, + toNamedParameter(head), + rest.map(toNamedParameter)* + ) + ).executeReturning(PersonRow.rowParser(1).*) + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[PersonRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + SQL"create temporary table person_TEMP (like myschema.person) on commit drop".execute(): @nowarn + streamingInsert(s"""copy person_TEMP("id", "favourite_football_club_id", "name", "nick_name", "blog_url", "email", "phone", "likes_pizza", "marital_status_id", "work_email", "sector", "favorite_number") from stdin""", batchSize, unsaved)(PersonRow.text, c): @nowarn + SQL"""insert into myschema.person("id", "favourite_football_club_id", "name", "nick_name", "blog_url", "email", "phone", "likes_pizza", "marital_status_id", "work_email", "sector", "favorite_number") + select * from person_TEMP + on conflict ("id") + do update set + "favourite_football_club_id" = EXCLUDED."favourite_football_club_id", + "name" = EXCLUDED."name", + "nick_name" = EXCLUDED."nick_name", + "blog_url" = EXCLUDED."blog_url", + "email" = EXCLUDED."email", + "phone" = EXCLUDED."phone", + "likes_pizza" = EXCLUDED."likes_pizza", + "marital_status_id" = EXCLUDED."marital_status_id", + "work_email" = EXCLUDED."work_email", + "sector" = EXCLUDED."sector", + "favorite_number" = EXCLUDED."favorite_number" + ; + drop table person_TEMP;""".executeUpdate() + } } diff --git a/.bleep/generated-sources/typo-tester-anorm@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/person/PersonRepoMock.scala b/.bleep/generated-sources/typo-tester-anorm@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/person/PersonRepoMock.scala index f74fcf88eb..bb9994c80d 100644 --- a/.bleep/generated-sources/typo-tester-anorm@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/person/PersonRepoMock.scala +++ b/.bleep/generated-sources/typo-tester-anorm@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/person/PersonRepoMock.scala @@ -129,4 +129,17 @@ class PersonRepoMock(toRow: Function1[PersonRowUnsaved, PersonRow], map.put(unsaved.id, unsaved): @nowarn unsaved } + override def upsertBatch(unsaved: Iterable[PersonRow])(implicit c: Connection): List[PersonRow] = { + unsaved.map { row => + map += (row.id -> row) + row + }.toList + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[PersonRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + unsaved.foreach { row => + map += (row.id -> row) + } + unsaved.size + } } diff --git a/.bleep/generated-sources/typo-tester-anorm@jvm3/scripts.GenHardcodedFiles/anorm/testdb/hardcoded/ExecuteReturningSyntax.scala b/.bleep/generated-sources/typo-tester-anorm@jvm3/scripts.GenHardcodedFiles/anorm/testdb/hardcoded/ExecuteReturningSyntax.scala new file mode 100644 index 0000000000..6fe6f5460c --- /dev/null +++ b/.bleep/generated-sources/typo-tester-anorm@jvm3/scripts.GenHardcodedFiles/anorm/testdb/hardcoded/ExecuteReturningSyntax.scala @@ -0,0 +1,29 @@ +/** + * File automatically generated by `typo` for its own test suite. + * + * IF YOU CHANGE THIS FILE YOUR CHANGES WILL BE OVERWRITTEN + */ +package anorm +package testdb +package hardcoded + +import java.sql.Connection +import resource.managed + +object ExecuteReturningSyntax { + /* add executeReturning to anorm. it needs to be inside the package, because everything is hidden */ + implicit class Ops(batchSql: BatchSql) { + def executeReturning[T](parser: ResultSetParser[T])(implicit c: Connection): T = + managed(batchSql.getFilledStatement(c, getGeneratedKeys = true))(using StatementResource, statementClassTag).acquireAndGet { ps => + ps.executeBatch() + Sql + .asTry( + parser, + managed(ps.getGeneratedKeys)(using ResultSetResource, resultSetClassTag), + onFirstRow = false, + ColumnAliaser.empty + ) + .get + } + } +} diff --git a/.bleep/generated-sources/typo-tester-anorm@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/compositepk/person/PersonRepo.scala b/.bleep/generated-sources/typo-tester-anorm@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/compositepk/person/PersonRepo.scala index 5e479597f4..87d9280d49 100644 --- a/.bleep/generated-sources/typo-tester-anorm@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/compositepk/person/PersonRepo.scala +++ b/.bleep/generated-sources/typo-tester-anorm@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/compositepk/person/PersonRepo.scala @@ -29,4 +29,7 @@ trait PersonRepo { def update(row: PersonRow)(implicit c: Connection): Boolean def updateFieldValues(compositeId: PersonId, fieldValues: List[PersonFieldValue[?]])(implicit c: Connection): Boolean def upsert(unsaved: PersonRow)(implicit c: Connection): PersonRow + def upsertBatch(unsaved: Iterable[PersonRow])(implicit c: Connection): List[PersonRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Iterator[PersonRow], batchSize: Int = 10000)(implicit c: Connection): Int } diff --git a/.bleep/generated-sources/typo-tester-anorm@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/compositepk/person/PersonRepoImpl.scala b/.bleep/generated-sources/typo-tester-anorm@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/compositepk/person/PersonRepoImpl.scala index 41b54c8cd1..9adcf1c63a 100644 --- a/.bleep/generated-sources/typo-tester-anorm@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/compositepk/person/PersonRepoImpl.scala +++ b/.bleep/generated-sources/typo-tester-anorm@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/compositepk/person/PersonRepoImpl.scala @@ -8,6 +8,7 @@ package hardcoded package compositepk package person +import anorm.BatchSql import anorm.NamedParameter import anorm.ParameterMetaData import anorm.ParameterValue @@ -17,6 +18,7 @@ import anorm.SimpleSql import anorm.SqlStringInterpolation import anorm.ToStatement import java.sql.Connection +import scala.annotation.nowarn import testdb.hardcoded.customtypes.Defaulted import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder @@ -148,4 +150,40 @@ class PersonRepoImpl extends PersonRepo { .executeInsert(PersonRow.rowParser(1).single) } + override def upsertBatch(unsaved: Iterable[PersonRow])(implicit c: Connection): List[PersonRow] = { + def toNamedParameter(row: PersonRow): List[NamedParameter] = List( + NamedParameter("one", ParameterValue(row.one, null, ToStatement.longToStatement)), + NamedParameter("two", ParameterValue(row.two, null, ToStatement.optionToStatement(ToStatement.stringToStatement, ParameterMetaData.StringParameterMetaData))), + NamedParameter("name", ParameterValue(row.name, null, ToStatement.optionToStatement(ToStatement.stringToStatement, ParameterMetaData.StringParameterMetaData))) + ) + unsaved.toList match { + case Nil => Nil + case head :: rest => + new anorm.testdb.hardcoded.ExecuteReturningSyntax.Ops( + BatchSql( + s"""insert into compositepk.person("one", "two", "name") + values ({one}::int8, {two}, {name}) + on conflict ("one", "two") + do update set + "name" = EXCLUDED."name" + returning "one", "two", "name" + """, + toNamedParameter(head), + rest.map(toNamedParameter)* + ) + ).executeReturning(PersonRow.rowParser(1).*) + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[PersonRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + SQL"create temporary table person_TEMP (like compositepk.person) on commit drop".execute(): @nowarn + streamingInsert(s"""copy person_TEMP("one", "two", "name") from stdin""", batchSize, unsaved)(PersonRow.text, c): @nowarn + SQL"""insert into compositepk.person("one", "two", "name") + select * from person_TEMP + on conflict ("one", "two") + do update set + "name" = EXCLUDED."name" + ; + drop table person_TEMP;""".executeUpdate() + } } diff --git a/.bleep/generated-sources/typo-tester-anorm@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/compositepk/person/PersonRepoMock.scala b/.bleep/generated-sources/typo-tester-anorm@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/compositepk/person/PersonRepoMock.scala index 2c1451f0fa..12d1979bee 100644 --- a/.bleep/generated-sources/typo-tester-anorm@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/compositepk/person/PersonRepoMock.scala +++ b/.bleep/generated-sources/typo-tester-anorm@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/compositepk/person/PersonRepoMock.scala @@ -100,4 +100,17 @@ class PersonRepoMock(toRow: Function1[PersonRowUnsaved, PersonRow], map.put(unsaved.compositeId, unsaved): @nowarn unsaved } + override def upsertBatch(unsaved: Iterable[PersonRow])(implicit c: Connection): List[PersonRow] = { + unsaved.map { row => + map += (row.compositeId -> row) + row + }.toList + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[PersonRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + unsaved.foreach { row => + map += (row.compositeId -> row) + } + unsaved.size + } } diff --git a/.bleep/generated-sources/typo-tester-anorm@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/football_club/FootballClubRepo.scala b/.bleep/generated-sources/typo-tester-anorm@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/football_club/FootballClubRepo.scala index 0f0a9a4075..7fba23c571 100644 --- a/.bleep/generated-sources/typo-tester-anorm@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/football_club/FootballClubRepo.scala +++ b/.bleep/generated-sources/typo-tester-anorm@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/football_club/FootballClubRepo.scala @@ -29,4 +29,7 @@ trait FootballClubRepo { def update(row: FootballClubRow)(implicit c: Connection): Boolean def updateFieldValues(id: FootballClubId, fieldValues: List[FootballClubFieldValue[?]])(implicit c: Connection): Boolean def upsert(unsaved: FootballClubRow)(implicit c: Connection): FootballClubRow + def upsertBatch(unsaved: Iterable[FootballClubRow])(implicit c: Connection): List[FootballClubRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Iterator[FootballClubRow], batchSize: Int = 10000)(implicit c: Connection): Int } diff --git a/.bleep/generated-sources/typo-tester-anorm@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/football_club/FootballClubRepoImpl.scala b/.bleep/generated-sources/typo-tester-anorm@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/football_club/FootballClubRepoImpl.scala index 8d8836da5b..116c3e0400 100644 --- a/.bleep/generated-sources/typo-tester-anorm@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/football_club/FootballClubRepoImpl.scala +++ b/.bleep/generated-sources/typo-tester-anorm@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/football_club/FootballClubRepoImpl.scala @@ -8,6 +8,7 @@ package hardcoded package myschema package football_club +import anorm.BatchSql import anorm.NamedParameter import anorm.ParameterValue import anorm.RowParser @@ -16,6 +17,7 @@ import anorm.SimpleSql import anorm.SqlStringInterpolation import anorm.ToStatement import java.sql.Connection +import scala.annotation.nowarn import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder import typo.dsl.SelectBuilderSql @@ -130,4 +132,39 @@ class FootballClubRepoImpl extends FootballClubRepo { .executeInsert(FootballClubRow.rowParser(1).single) } + override def upsertBatch(unsaved: Iterable[FootballClubRow])(implicit c: Connection): List[FootballClubRow] = { + def toNamedParameter(row: FootballClubRow): List[NamedParameter] = List( + NamedParameter("id", ParameterValue(row.id, null, FootballClubId.toStatement)), + NamedParameter("name", ParameterValue(row.name, null, ToStatement.stringToStatement)) + ) + unsaved.toList match { + case Nil => Nil + case head :: rest => + new anorm.testdb.hardcoded.ExecuteReturningSyntax.Ops( + BatchSql( + s"""insert into myschema.football_club("id", "name") + values ({id}::int8, {name}) + on conflict ("id") + do update set + "name" = EXCLUDED."name" + returning "id", "name" + """, + toNamedParameter(head), + rest.map(toNamedParameter)* + ) + ).executeReturning(FootballClubRow.rowParser(1).*) + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[FootballClubRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + SQL"create temporary table football_club_TEMP (like myschema.football_club) on commit drop".execute(): @nowarn + streamingInsert(s"""copy football_club_TEMP("id", "name") from stdin""", batchSize, unsaved)(FootballClubRow.text, c): @nowarn + SQL"""insert into myschema.football_club("id", "name") + select * from football_club_TEMP + on conflict ("id") + do update set + "name" = EXCLUDED."name" + ; + drop table football_club_TEMP;""".executeUpdate() + } } diff --git a/.bleep/generated-sources/typo-tester-anorm@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/football_club/FootballClubRepoMock.scala b/.bleep/generated-sources/typo-tester-anorm@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/football_club/FootballClubRepoMock.scala index e0fe8f1bd0..37d177d060 100644 --- a/.bleep/generated-sources/typo-tester-anorm@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/football_club/FootballClubRepoMock.scala +++ b/.bleep/generated-sources/typo-tester-anorm@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/football_club/FootballClubRepoMock.scala @@ -97,4 +97,17 @@ class FootballClubRepoMock(map: scala.collection.mutable.Map[FootballClubId, Foo map.put(unsaved.id, unsaved): @nowarn unsaved } + override def upsertBatch(unsaved: Iterable[FootballClubRow])(implicit c: Connection): List[FootballClubRow] = { + unsaved.map { row => + map += (row.id -> row) + row + }.toList + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[FootballClubRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + unsaved.foreach { row => + map += (row.id -> row) + } + unsaved.size + } } diff --git a/.bleep/generated-sources/typo-tester-anorm@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/marital_status/MaritalStatusRepo.scala b/.bleep/generated-sources/typo-tester-anorm@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/marital_status/MaritalStatusRepo.scala index cf8ea88c39..0afd9d8936 100644 --- a/.bleep/generated-sources/typo-tester-anorm@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/marital_status/MaritalStatusRepo.scala +++ b/.bleep/generated-sources/typo-tester-anorm@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/marital_status/MaritalStatusRepo.scala @@ -27,4 +27,7 @@ trait MaritalStatusRepo { def selectByIdsTracked(ids: Array[MaritalStatusId])(implicit c: Connection): Map[MaritalStatusId, MaritalStatusRow] def update: UpdateBuilder[MaritalStatusFields, MaritalStatusRow] def upsert(unsaved: MaritalStatusRow)(implicit c: Connection): MaritalStatusRow + def upsertBatch(unsaved: Iterable[MaritalStatusRow])(implicit c: Connection): List[MaritalStatusRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Iterator[MaritalStatusRow], batchSize: Int = 10000)(implicit c: Connection): Int } diff --git a/.bleep/generated-sources/typo-tester-anorm@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/marital_status/MaritalStatusRepoImpl.scala b/.bleep/generated-sources/typo-tester-anorm@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/marital_status/MaritalStatusRepoImpl.scala index 9bec11ce6b..256f6c94c7 100644 --- a/.bleep/generated-sources/typo-tester-anorm@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/marital_status/MaritalStatusRepoImpl.scala +++ b/.bleep/generated-sources/typo-tester-anorm@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/marital_status/MaritalStatusRepoImpl.scala @@ -8,6 +8,7 @@ package hardcoded package myschema package marital_status +import anorm.BatchSql import anorm.NamedParameter import anorm.ParameterValue import anorm.RowParser @@ -15,6 +16,7 @@ import anorm.SQL import anorm.SimpleSql import anorm.SqlStringInterpolation import java.sql.Connection +import scala.annotation.nowarn import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder import typo.dsl.SelectBuilderSql @@ -96,11 +98,42 @@ class MaritalStatusRepoImpl extends MaritalStatusRepo { ${ParameterValue(unsaved.id, null, MaritalStatusId.toStatement)}::int8 ) on conflict ("id") - do update set - + do nothing returning "id" """ .executeInsert(MaritalStatusRow.rowParser(1).single) } + override def upsertBatch(unsaved: Iterable[MaritalStatusRow])(implicit c: Connection): List[MaritalStatusRow] = { + def toNamedParameter(row: MaritalStatusRow): List[NamedParameter] = List( + NamedParameter("id", ParameterValue(row.id, null, MaritalStatusId.toStatement)) + ) + unsaved.toList match { + case Nil => Nil + case head :: rest => + new anorm.testdb.hardcoded.ExecuteReturningSyntax.Ops( + BatchSql( + s"""insert into myschema.marital_status("id") + values ({id}::int8) + on conflict ("id") + do nothing + returning "id" + """, + toNamedParameter(head), + rest.map(toNamedParameter)* + ) + ).executeReturning(MaritalStatusRow.rowParser(1).*) + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[MaritalStatusRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + SQL"create temporary table marital_status_TEMP (like myschema.marital_status) on commit drop".execute(): @nowarn + streamingInsert(s"""copy marital_status_TEMP("id") from stdin""", batchSize, unsaved)(MaritalStatusRow.text, c): @nowarn + SQL"""insert into myschema.marital_status("id") + select * from marital_status_TEMP + on conflict ("id") + do nothing + ; + drop table marital_status_TEMP;""".executeUpdate() + } } diff --git a/.bleep/generated-sources/typo-tester-anorm@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/marital_status/MaritalStatusRepoMock.scala b/.bleep/generated-sources/typo-tester-anorm@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/marital_status/MaritalStatusRepoMock.scala index 9b4c2bb8dc..cf8a6121d2 100644 --- a/.bleep/generated-sources/typo-tester-anorm@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/marital_status/MaritalStatusRepoMock.scala +++ b/.bleep/generated-sources/typo-tester-anorm@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/marital_status/MaritalStatusRepoMock.scala @@ -72,4 +72,17 @@ class MaritalStatusRepoMock(map: scala.collection.mutable.Map[MaritalStatusId, M map.put(unsaved.id, unsaved): @nowarn unsaved } + override def upsertBatch(unsaved: Iterable[MaritalStatusRow])(implicit c: Connection): List[MaritalStatusRow] = { + unsaved.map { row => + map += (row.id -> row) + row + }.toList + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[MaritalStatusRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + unsaved.foreach { row => + map += (row.id -> row) + } + unsaved.size + } } diff --git a/.bleep/generated-sources/typo-tester-anorm@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/person/PersonRepo.scala b/.bleep/generated-sources/typo-tester-anorm@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/person/PersonRepo.scala index d3c6b5bd62..b8c43ba307 100644 --- a/.bleep/generated-sources/typo-tester-anorm@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/person/PersonRepo.scala +++ b/.bleep/generated-sources/typo-tester-anorm@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/person/PersonRepo.scala @@ -32,4 +32,7 @@ trait PersonRepo { def update(row: PersonRow)(implicit c: Connection): Boolean def updateFieldValues(id: PersonId, fieldValues: List[PersonFieldValue[?]])(implicit c: Connection): Boolean def upsert(unsaved: PersonRow)(implicit c: Connection): PersonRow + def upsertBatch(unsaved: Iterable[PersonRow])(implicit c: Connection): List[PersonRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Iterator[PersonRow], batchSize: Int = 10000)(implicit c: Connection): Int } diff --git a/.bleep/generated-sources/typo-tester-anorm@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/person/PersonRepoImpl.scala b/.bleep/generated-sources/typo-tester-anorm@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/person/PersonRepoImpl.scala index 0afad0da33..ecb767f39a 100644 --- a/.bleep/generated-sources/typo-tester-anorm@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/person/PersonRepoImpl.scala +++ b/.bleep/generated-sources/typo-tester-anorm@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/person/PersonRepoImpl.scala @@ -8,6 +8,7 @@ package hardcoded package myschema package person +import anorm.BatchSql import anorm.NamedParameter import anorm.ParameterMetaData import anorm.ParameterValue @@ -17,6 +18,7 @@ import anorm.SimpleSql import anorm.SqlStringInterpolation import anorm.ToStatement import java.sql.Connection +import scala.annotation.nowarn import testdb.hardcoded.customtypes.Defaulted import testdb.hardcoded.myschema.football_club.FootballClubId import testdb.hardcoded.myschema.marital_status.MaritalStatusId @@ -231,4 +233,69 @@ class PersonRepoImpl extends PersonRepo { .executeInsert(PersonRow.rowParser(1).single) } + override def upsertBatch(unsaved: Iterable[PersonRow])(implicit c: Connection): List[PersonRow] = { + def toNamedParameter(row: PersonRow): List[NamedParameter] = List( + NamedParameter("id", ParameterValue(row.id, null, PersonId.toStatement)), + NamedParameter("favourite_football_club_id", ParameterValue(row.favouriteFootballClubId, null, FootballClubId.toStatement)), + NamedParameter("name", ParameterValue(row.name, null, ToStatement.stringToStatement)), + NamedParameter("nick_name", ParameterValue(row.nickName, null, ToStatement.optionToStatement(ToStatement.stringToStatement, ParameterMetaData.StringParameterMetaData))), + NamedParameter("blog_url", ParameterValue(row.blogUrl, null, ToStatement.optionToStatement(ToStatement.stringToStatement, ParameterMetaData.StringParameterMetaData))), + NamedParameter("email", ParameterValue(row.email, null, ToStatement.stringToStatement)), + NamedParameter("phone", ParameterValue(row.phone, null, ToStatement.stringToStatement)), + NamedParameter("likes_pizza", ParameterValue(row.likesPizza, null, ToStatement.booleanToStatement)), + NamedParameter("marital_status_id", ParameterValue(row.maritalStatusId, null, MaritalStatusId.toStatement)), + NamedParameter("work_email", ParameterValue(row.workEmail, null, ToStatement.optionToStatement(ToStatement.stringToStatement, ParameterMetaData.StringParameterMetaData))), + NamedParameter("sector", ParameterValue(row.sector, null, Sector.toStatement)), + NamedParameter("favorite_number", ParameterValue(row.favoriteNumber, null, Number.toStatement)) + ) + unsaved.toList match { + case Nil => Nil + case head :: rest => + new anorm.testdb.hardcoded.ExecuteReturningSyntax.Ops( + BatchSql( + s"""insert into myschema.person("id", "favourite_football_club_id", "name", "nick_name", "blog_url", "email", "phone", "likes_pizza", "marital_status_id", "work_email", "sector", "favorite_number") + values ({id}::int8, {favourite_football_club_id}, {name}, {nick_name}, {blog_url}, {email}, {phone}, {likes_pizza}, {marital_status_id}, {work_email}, {sector}::myschema.sector, {favorite_number}::myschema.number) + on conflict ("id") + do update set + "favourite_football_club_id" = EXCLUDED."favourite_football_club_id", + "name" = EXCLUDED."name", + "nick_name" = EXCLUDED."nick_name", + "blog_url" = EXCLUDED."blog_url", + "email" = EXCLUDED."email", + "phone" = EXCLUDED."phone", + "likes_pizza" = EXCLUDED."likes_pizza", + "marital_status_id" = EXCLUDED."marital_status_id", + "work_email" = EXCLUDED."work_email", + "sector" = EXCLUDED."sector", + "favorite_number" = EXCLUDED."favorite_number" + returning "id", "favourite_football_club_id", "name", "nick_name", "blog_url", "email", "phone", "likes_pizza", "marital_status_id", "work_email", "sector", "favorite_number" + """, + toNamedParameter(head), + rest.map(toNamedParameter)* + ) + ).executeReturning(PersonRow.rowParser(1).*) + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[PersonRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + SQL"create temporary table person_TEMP (like myschema.person) on commit drop".execute(): @nowarn + streamingInsert(s"""copy person_TEMP("id", "favourite_football_club_id", "name", "nick_name", "blog_url", "email", "phone", "likes_pizza", "marital_status_id", "work_email", "sector", "favorite_number") from stdin""", batchSize, unsaved)(PersonRow.text, c): @nowarn + SQL"""insert into myschema.person("id", "favourite_football_club_id", "name", "nick_name", "blog_url", "email", "phone", "likes_pizza", "marital_status_id", "work_email", "sector", "favorite_number") + select * from person_TEMP + on conflict ("id") + do update set + "favourite_football_club_id" = EXCLUDED."favourite_football_club_id", + "name" = EXCLUDED."name", + "nick_name" = EXCLUDED."nick_name", + "blog_url" = EXCLUDED."blog_url", + "email" = EXCLUDED."email", + "phone" = EXCLUDED."phone", + "likes_pizza" = EXCLUDED."likes_pizza", + "marital_status_id" = EXCLUDED."marital_status_id", + "work_email" = EXCLUDED."work_email", + "sector" = EXCLUDED."sector", + "favorite_number" = EXCLUDED."favorite_number" + ; + drop table person_TEMP;""".executeUpdate() + } } diff --git a/.bleep/generated-sources/typo-tester-anorm@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/person/PersonRepoMock.scala b/.bleep/generated-sources/typo-tester-anorm@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/person/PersonRepoMock.scala index f74fcf88eb..bb9994c80d 100644 --- a/.bleep/generated-sources/typo-tester-anorm@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/person/PersonRepoMock.scala +++ b/.bleep/generated-sources/typo-tester-anorm@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/person/PersonRepoMock.scala @@ -129,4 +129,17 @@ class PersonRepoMock(toRow: Function1[PersonRowUnsaved, PersonRow], map.put(unsaved.id, unsaved): @nowarn unsaved } + override def upsertBatch(unsaved: Iterable[PersonRow])(implicit c: Connection): List[PersonRow] = { + unsaved.map { row => + map += (row.id -> row) + row + }.toList + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[PersonRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + unsaved.foreach { row => + map += (row.id -> row) + } + unsaved.size + } } diff --git a/.bleep/generated-sources/typo-tester-doobie@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/compositepk/person/PersonRepo.scala b/.bleep/generated-sources/typo-tester-doobie@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/compositepk/person/PersonRepo.scala index 300cd52c67..66c742ff96 100644 --- a/.bleep/generated-sources/typo-tester-doobie@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/compositepk/person/PersonRepo.scala +++ b/.bleep/generated-sources/typo-tester-doobie@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/compositepk/person/PersonRepo.scala @@ -30,4 +30,7 @@ trait PersonRepo { def update(row: PersonRow): ConnectionIO[Boolean] def updateFieldValues(compositeId: PersonId, fieldValues: List[PersonFieldValue[?]]): ConnectionIO[Boolean] def upsert(unsaved: PersonRow): ConnectionIO[PersonRow] + def upsertBatch(unsaved: List[PersonRow]): Stream[ConnectionIO, PersonRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Stream[ConnectionIO, PersonRow], batchSize: Int = 10000): ConnectionIO[Int] } diff --git a/.bleep/generated-sources/typo-tester-doobie@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/compositepk/person/PersonRepoImpl.scala b/.bleep/generated-sources/typo-tester-doobie@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/compositepk/person/PersonRepoImpl.scala index 742b01d597..4ecacd8568 100644 --- a/.bleep/generated-sources/typo-tester-doobie@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/compositepk/person/PersonRepoImpl.scala +++ b/.bleep/generated-sources/typo-tester-doobie@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/compositepk/person/PersonRepoImpl.scala @@ -9,6 +9,7 @@ package compositepk package person import cats.data.NonEmptyList +import cats.instances.list.catsStdInstancesForList import doobie.free.connection.ConnectionIO import doobie.free.connection.pure import doobie.postgres.syntax.FragmentOps @@ -18,6 +19,7 @@ import doobie.util.Write import doobie.util.fragment.Fragment import doobie.util.fragments import doobie.util.meta.Meta +import doobie.util.update.Update import fs2.Stream import testdb.hardcoded.customtypes.Defaulted import typo.dsl.DeleteBuilder @@ -130,4 +132,29 @@ class PersonRepoImpl extends PersonRepo { returning "one", "two", "name" """.query(using PersonRow.read).unique } + override def upsertBatch(unsaved: List[PersonRow]): Stream[ConnectionIO, PersonRow] = { + Update[PersonRow]( + s"""insert into compositepk.person("one", "two", "name") + values (?::int8,?,?) + on conflict ("one", "two") + do update set + "name" = EXCLUDED."name" + returning "one", "two", "name"""" + )(using PersonRow.write) + .updateManyWithGeneratedKeys[PersonRow]("one", "two", "name")(unsaved)(using catsStdInstancesForList, PersonRow.read) + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, PersonRow], batchSize: Int = 10000): ConnectionIO[Int] = { + for { + _ <- sql"create temporary table person_TEMP (like compositepk.person) on commit drop".update.run + _ <- new FragmentOps(sql"""copy person_TEMP("one", "two", "name") from stdin""").copyIn(unsaved, batchSize)(using PersonRow.text) + res <- sql"""insert into compositepk.person("one", "two", "name") + select * from person_TEMP + on conflict ("one", "two") + do update set + "name" = EXCLUDED."name" + ; + drop table person_TEMP;""".update.run + } yield res + } } diff --git a/.bleep/generated-sources/typo-tester-doobie@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/compositepk/person/PersonRepoMock.scala b/.bleep/generated-sources/typo-tester-doobie@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/compositepk/person/PersonRepoMock.scala index 0c56cce4d4..a9d2f1be4a 100644 --- a/.bleep/generated-sources/typo-tester-doobie@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/compositepk/person/PersonRepoMock.scala +++ b/.bleep/generated-sources/typo-tester-doobie@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/compositepk/person/PersonRepoMock.scala @@ -120,4 +120,23 @@ class PersonRepoMock(toRow: Function1[PersonRowUnsaved, PersonRow], unsaved } } + override def upsertBatch(unsaved: List[PersonRow]): Stream[ConnectionIO, PersonRow] = { + Stream.emits { + unsaved.map { row => + map += (row.compositeId -> row) + row + } + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, PersonRow], batchSize: Int = 10000): ConnectionIO[Int] = { + unsaved.compile.toList.map { rows => + var num = 0 + rows.foreach { row => + map += (row.compositeId -> row) + num += 1 + } + num + } + } } diff --git a/.bleep/generated-sources/typo-tester-doobie@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/compositepk/person/PersonRow.scala b/.bleep/generated-sources/typo-tester-doobie@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/compositepk/person/PersonRow.scala index cdfb0c8c22..47c9679317 100644 --- a/.bleep/generated-sources/typo-tester-doobie@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/compositepk/person/PersonRow.scala +++ b/.bleep/generated-sources/typo-tester-doobie@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/compositepk/person/PersonRow.scala @@ -11,6 +11,7 @@ package person import doobie.enumerated.Nullability import doobie.postgres.Text import doobie.util.Read +import doobie.util.Write import doobie.util.meta.Meta import io.circe.Decoder import io.circe.Encoder @@ -56,4 +57,20 @@ object PersonRow { sb.append(Text.DELIMETER) Text.option(Text.stringInstance).unsafeEncode(row.name, sb) } + implicit lazy val write: Write[PersonRow] = new Write[PersonRow]( + puts = List((Meta.LongMeta.put, Nullability.NoNulls), + (Meta.StringMeta.put, Nullability.Nullable), + (Meta.StringMeta.put, Nullability.Nullable)), + toList = x => List(x.one, x.two, x.name), + unsafeSet = (rs, i, a) => { + Meta.LongMeta.put.unsafeSetNonNullable(rs, i + 0, a.one) + Meta.StringMeta.put.unsafeSetNullable(rs, i + 1, a.two) + Meta.StringMeta.put.unsafeSetNullable(rs, i + 2, a.name) + }, + unsafeUpdate = (ps, i, a) => { + Meta.LongMeta.put.unsafeUpdateNonNullable(ps, i + 0, a.one) + Meta.StringMeta.put.unsafeUpdateNullable(ps, i + 1, a.two) + Meta.StringMeta.put.unsafeUpdateNullable(ps, i + 2, a.name) + } + ) } diff --git a/.bleep/generated-sources/typo-tester-doobie@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/football_club/FootballClubRepo.scala b/.bleep/generated-sources/typo-tester-doobie@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/football_club/FootballClubRepo.scala index 34e5502bbd..48efb3aea9 100644 --- a/.bleep/generated-sources/typo-tester-doobie@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/football_club/FootballClubRepo.scala +++ b/.bleep/generated-sources/typo-tester-doobie@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/football_club/FootballClubRepo.scala @@ -30,4 +30,7 @@ trait FootballClubRepo { def update(row: FootballClubRow): ConnectionIO[Boolean] def updateFieldValues(id: FootballClubId, fieldValues: List[FootballClubFieldValue[?]]): ConnectionIO[Boolean] def upsert(unsaved: FootballClubRow): ConnectionIO[FootballClubRow] + def upsertBatch(unsaved: List[FootballClubRow]): Stream[ConnectionIO, FootballClubRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Stream[ConnectionIO, FootballClubRow], batchSize: Int = 10000): ConnectionIO[Int] } diff --git a/.bleep/generated-sources/typo-tester-doobie@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/football_club/FootballClubRepoImpl.scala b/.bleep/generated-sources/typo-tester-doobie@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/football_club/FootballClubRepoImpl.scala index 0c0b7adfc4..c435bdc808 100644 --- a/.bleep/generated-sources/typo-tester-doobie@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/football_club/FootballClubRepoImpl.scala +++ b/.bleep/generated-sources/typo-tester-doobie@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/football_club/FootballClubRepoImpl.scala @@ -9,6 +9,7 @@ package myschema package football_club import cats.data.NonEmptyList +import cats.instances.list.catsStdInstancesForList import doobie.free.connection.ConnectionIO import doobie.free.connection.pure import doobie.postgres.syntax.FragmentOps @@ -17,6 +18,7 @@ import doobie.syntax.string.toSqlInterpolator import doobie.util.Write import doobie.util.fragments import doobie.util.meta.Meta +import doobie.util.update.Update import fs2.Stream import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder @@ -107,4 +109,29 @@ class FootballClubRepoImpl extends FootballClubRepo { returning "id", "name" """.query(using FootballClubRow.read).unique } + override def upsertBatch(unsaved: List[FootballClubRow]): Stream[ConnectionIO, FootballClubRow] = { + Update[FootballClubRow]( + s"""insert into myschema.football_club("id", "name") + values (?::int8,?) + on conflict ("id") + do update set + "name" = EXCLUDED."name" + returning "id", "name"""" + )(using FootballClubRow.write) + .updateManyWithGeneratedKeys[FootballClubRow]("id", "name")(unsaved)(using catsStdInstancesForList, FootballClubRow.read) + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, FootballClubRow], batchSize: Int = 10000): ConnectionIO[Int] = { + for { + _ <- sql"create temporary table football_club_TEMP (like myschema.football_club) on commit drop".update.run + _ <- new FragmentOps(sql"""copy football_club_TEMP("id", "name") from stdin""").copyIn(unsaved, batchSize)(using FootballClubRow.text) + res <- sql"""insert into myschema.football_club("id", "name") + select * from football_club_TEMP + on conflict ("id") + do update set + "name" = EXCLUDED."name" + ; + drop table football_club_TEMP;""".update.run + } yield res + } } diff --git a/.bleep/generated-sources/typo-tester-doobie@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/football_club/FootballClubRepoMock.scala b/.bleep/generated-sources/typo-tester-doobie@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/football_club/FootballClubRepoMock.scala index 40df5f1815..de93115fd1 100644 --- a/.bleep/generated-sources/typo-tester-doobie@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/football_club/FootballClubRepoMock.scala +++ b/.bleep/generated-sources/typo-tester-doobie@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/football_club/FootballClubRepoMock.scala @@ -115,4 +115,23 @@ class FootballClubRepoMock(map: scala.collection.mutable.Map[FootballClubId, Foo unsaved } } + override def upsertBatch(unsaved: List[FootballClubRow]): Stream[ConnectionIO, FootballClubRow] = { + Stream.emits { + unsaved.map { row => + map += (row.id -> row) + row + } + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, FootballClubRow], batchSize: Int = 10000): ConnectionIO[Int] = { + unsaved.compile.toList.map { rows => + var num = 0 + rows.foreach { row => + map += (row.id -> row) + num += 1 + } + num + } + } } diff --git a/.bleep/generated-sources/typo-tester-doobie@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/football_club/FootballClubRow.scala b/.bleep/generated-sources/typo-tester-doobie@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/football_club/FootballClubRow.scala index 3b8985a4f8..eaaac16fb0 100644 --- a/.bleep/generated-sources/typo-tester-doobie@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/football_club/FootballClubRow.scala +++ b/.bleep/generated-sources/typo-tester-doobie@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/football_club/FootballClubRow.scala @@ -11,6 +11,7 @@ package football_club import doobie.enumerated.Nullability import doobie.postgres.Text import doobie.util.Read +import doobie.util.Write import doobie.util.meta.Meta import io.circe.Decoder import io.circe.Encoder @@ -42,4 +43,17 @@ object FootballClubRow { sb.append(Text.DELIMETER) Text.stringInstance.unsafeEncode(row.name, sb) } + implicit lazy val write: Write[FootballClubRow] = new Write[FootballClubRow]( + puts = List((FootballClubId.put, Nullability.NoNulls), + (Meta.StringMeta.put, Nullability.NoNulls)), + toList = x => List(x.id, x.name), + unsafeSet = (rs, i, a) => { + FootballClubId.put.unsafeSetNonNullable(rs, i + 0, a.id) + Meta.StringMeta.put.unsafeSetNonNullable(rs, i + 1, a.name) + }, + unsafeUpdate = (ps, i, a) => { + FootballClubId.put.unsafeUpdateNonNullable(ps, i + 0, a.id) + Meta.StringMeta.put.unsafeUpdateNonNullable(ps, i + 1, a.name) + } + ) } diff --git a/.bleep/generated-sources/typo-tester-doobie@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/marital_status/MaritalStatusRepo.scala b/.bleep/generated-sources/typo-tester-doobie@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/marital_status/MaritalStatusRepo.scala index 29564dfea6..4307c05f12 100644 --- a/.bleep/generated-sources/typo-tester-doobie@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/marital_status/MaritalStatusRepo.scala +++ b/.bleep/generated-sources/typo-tester-doobie@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/marital_status/MaritalStatusRepo.scala @@ -28,4 +28,7 @@ trait MaritalStatusRepo { def selectByIdsTracked(ids: Array[MaritalStatusId]): ConnectionIO[Map[MaritalStatusId, MaritalStatusRow]] def update: UpdateBuilder[MaritalStatusFields, MaritalStatusRow] def upsert(unsaved: MaritalStatusRow): ConnectionIO[MaritalStatusRow] + def upsertBatch(unsaved: List[MaritalStatusRow]): Stream[ConnectionIO, MaritalStatusRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Stream[ConnectionIO, MaritalStatusRow], batchSize: Int = 10000): ConnectionIO[Int] } diff --git a/.bleep/generated-sources/typo-tester-doobie@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/marital_status/MaritalStatusRepoImpl.scala b/.bleep/generated-sources/typo-tester-doobie@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/marital_status/MaritalStatusRepoImpl.scala index c2253807d2..bed0f0dc00 100644 --- a/.bleep/generated-sources/typo-tester-doobie@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/marital_status/MaritalStatusRepoImpl.scala +++ b/.bleep/generated-sources/typo-tester-doobie@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/marital_status/MaritalStatusRepoImpl.scala @@ -8,12 +8,14 @@ package hardcoded package myschema package marital_status +import cats.instances.list.catsStdInstancesForList import doobie.free.connection.ConnectionIO import doobie.postgres.syntax.FragmentOps import doobie.syntax.SqlInterpolator.SingleFragment.fromWrite import doobie.syntax.string.toSqlInterpolator import doobie.util.Write import doobie.util.fragments +import doobie.util.update.Update import fs2.Stream import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder @@ -74,9 +76,31 @@ class MaritalStatusRepoImpl extends MaritalStatusRepo { ${fromWrite(unsaved.id)(Write.fromPut(MaritalStatusId.put))}::int8 ) on conflict ("id") - do update set - + do nothing returning "id" """.query(using MaritalStatusRow.read).unique } + override def upsertBatch(unsaved: List[MaritalStatusRow]): Stream[ConnectionIO, MaritalStatusRow] = { + Update[MaritalStatusRow]( + s"""insert into myschema.marital_status("id") + values (?::int8) + on conflict ("id") + do nothing + returning "id"""" + )(using MaritalStatusRow.write) + .updateManyWithGeneratedKeys[MaritalStatusRow]("id")(unsaved)(using catsStdInstancesForList, MaritalStatusRow.read) + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, MaritalStatusRow], batchSize: Int = 10000): ConnectionIO[Int] = { + for { + _ <- sql"create temporary table marital_status_TEMP (like myschema.marital_status) on commit drop".update.run + _ <- new FragmentOps(sql"""copy marital_status_TEMP("id") from stdin""").copyIn(unsaved, batchSize)(using MaritalStatusRow.text) + res <- sql"""insert into myschema.marital_status("id") + select * from marital_status_TEMP + on conflict ("id") + do nothing + ; + drop table marital_status_TEMP;""".update.run + } yield res + } } diff --git a/.bleep/generated-sources/typo-tester-doobie@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/marital_status/MaritalStatusRepoMock.scala b/.bleep/generated-sources/typo-tester-doobie@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/marital_status/MaritalStatusRepoMock.scala index 15191cb112..31d51191de 100644 --- a/.bleep/generated-sources/typo-tester-doobie@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/marital_status/MaritalStatusRepoMock.scala +++ b/.bleep/generated-sources/typo-tester-doobie@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/marital_status/MaritalStatusRepoMock.scala @@ -86,4 +86,23 @@ class MaritalStatusRepoMock(map: scala.collection.mutable.Map[MaritalStatusId, M unsaved } } + override def upsertBatch(unsaved: List[MaritalStatusRow]): Stream[ConnectionIO, MaritalStatusRow] = { + Stream.emits { + unsaved.map { row => + map += (row.id -> row) + row + } + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, MaritalStatusRow], batchSize: Int = 10000): ConnectionIO[Int] = { + unsaved.compile.toList.map { rows => + var num = 0 + rows.foreach { row => + map += (row.id -> row) + num += 1 + } + num + } + } } diff --git a/.bleep/generated-sources/typo-tester-doobie@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/marital_status/MaritalStatusRow.scala b/.bleep/generated-sources/typo-tester-doobie@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/marital_status/MaritalStatusRow.scala index da439766aa..d2a9981c82 100644 --- a/.bleep/generated-sources/typo-tester-doobie@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/marital_status/MaritalStatusRow.scala +++ b/.bleep/generated-sources/typo-tester-doobie@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/marital_status/MaritalStatusRow.scala @@ -11,6 +11,7 @@ package marital_status import doobie.enumerated.Nullability import doobie.postgres.Text import doobie.util.Read +import doobie.util.Write import io.circe.Decoder import io.circe.Encoder import java.sql.ResultSet @@ -35,4 +36,14 @@ object MaritalStatusRow { implicit lazy val text: Text[MaritalStatusRow] = Text.instance[MaritalStatusRow]{ (row, sb) => MaritalStatusId.text.unsafeEncode(row.id, sb) } + implicit lazy val write: Write[MaritalStatusRow] = new Write[MaritalStatusRow]( + puts = List((MaritalStatusId.put, Nullability.NoNulls)), + toList = x => List(x.id), + unsafeSet = (rs, i, a) => { + MaritalStatusId.put.unsafeSetNonNullable(rs, i + 0, a.id) + }, + unsafeUpdate = (ps, i, a) => { + MaritalStatusId.put.unsafeUpdateNonNullable(ps, i + 0, a.id) + } + ) } diff --git a/.bleep/generated-sources/typo-tester-doobie@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/person/PersonRepo.scala b/.bleep/generated-sources/typo-tester-doobie@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/person/PersonRepo.scala index 5e9fe04f97..9db299d5d0 100644 --- a/.bleep/generated-sources/typo-tester-doobie@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/person/PersonRepo.scala +++ b/.bleep/generated-sources/typo-tester-doobie@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/person/PersonRepo.scala @@ -33,4 +33,7 @@ trait PersonRepo { def update(row: PersonRow): ConnectionIO[Boolean] def updateFieldValues(id: PersonId, fieldValues: List[PersonFieldValue[?]]): ConnectionIO[Boolean] def upsert(unsaved: PersonRow): ConnectionIO[PersonRow] + def upsertBatch(unsaved: List[PersonRow]): Stream[ConnectionIO, PersonRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Stream[ConnectionIO, PersonRow], batchSize: Int = 10000): ConnectionIO[Int] } diff --git a/.bleep/generated-sources/typo-tester-doobie@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/person/PersonRepoImpl.scala b/.bleep/generated-sources/typo-tester-doobie@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/person/PersonRepoImpl.scala index cbcdf4cd8b..b499a6d13a 100644 --- a/.bleep/generated-sources/typo-tester-doobie@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/person/PersonRepoImpl.scala +++ b/.bleep/generated-sources/typo-tester-doobie@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/person/PersonRepoImpl.scala @@ -9,6 +9,7 @@ package myschema package person import cats.data.NonEmptyList +import cats.instances.list.catsStdInstancesForList import doobie.free.connection.ConnectionIO import doobie.free.connection.pure import doobie.postgres.syntax.FragmentOps @@ -18,6 +19,7 @@ import doobie.util.Write import doobie.util.fragment.Fragment import doobie.util.fragments import doobie.util.meta.Meta +import doobie.util.update.Update import fs2.Stream import testdb.hardcoded.customtypes.Defaulted import testdb.hardcoded.myschema.football_club.FootballClubId @@ -207,4 +209,49 @@ class PersonRepoImpl extends PersonRepo { returning "id", "favourite_football_club_id", "name", "nick_name", "blog_url", "email", "phone", "likes_pizza", "marital_status_id", "work_email", "sector", "favorite_number" """.query(using PersonRow.read).unique } + override def upsertBatch(unsaved: List[PersonRow]): Stream[ConnectionIO, PersonRow] = { + Update[PersonRow]( + s"""insert into myschema.person("id", "favourite_football_club_id", "name", "nick_name", "blog_url", "email", "phone", "likes_pizza", "marital_status_id", "work_email", "sector", "favorite_number") + values (?::int8,?,?,?,?,?,?,?,?,?,?::myschema.sector,?::myschema.number) + on conflict ("id") + do update set + "favourite_football_club_id" = EXCLUDED."favourite_football_club_id", + "name" = EXCLUDED."name", + "nick_name" = EXCLUDED."nick_name", + "blog_url" = EXCLUDED."blog_url", + "email" = EXCLUDED."email", + "phone" = EXCLUDED."phone", + "likes_pizza" = EXCLUDED."likes_pizza", + "marital_status_id" = EXCLUDED."marital_status_id", + "work_email" = EXCLUDED."work_email", + "sector" = EXCLUDED."sector", + "favorite_number" = EXCLUDED."favorite_number" + returning "id", "favourite_football_club_id", "name", "nick_name", "blog_url", "email", "phone", "likes_pizza", "marital_status_id", "work_email", "sector", "favorite_number"""" + )(using PersonRow.write) + .updateManyWithGeneratedKeys[PersonRow]("id", "favourite_football_club_id", "name", "nick_name", "blog_url", "email", "phone", "likes_pizza", "marital_status_id", "work_email", "sector", "favorite_number")(unsaved)(using catsStdInstancesForList, PersonRow.read) + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, PersonRow], batchSize: Int = 10000): ConnectionIO[Int] = { + for { + _ <- sql"create temporary table person_TEMP (like myschema.person) on commit drop".update.run + _ <- new FragmentOps(sql"""copy person_TEMP("id", "favourite_football_club_id", "name", "nick_name", "blog_url", "email", "phone", "likes_pizza", "marital_status_id", "work_email", "sector", "favorite_number") from stdin""").copyIn(unsaved, batchSize)(using PersonRow.text) + res <- sql"""insert into myschema.person("id", "favourite_football_club_id", "name", "nick_name", "blog_url", "email", "phone", "likes_pizza", "marital_status_id", "work_email", "sector", "favorite_number") + select * from person_TEMP + on conflict ("id") + do update set + "favourite_football_club_id" = EXCLUDED."favourite_football_club_id", + "name" = EXCLUDED."name", + "nick_name" = EXCLUDED."nick_name", + "blog_url" = EXCLUDED."blog_url", + "email" = EXCLUDED."email", + "phone" = EXCLUDED."phone", + "likes_pizza" = EXCLUDED."likes_pizza", + "marital_status_id" = EXCLUDED."marital_status_id", + "work_email" = EXCLUDED."work_email", + "sector" = EXCLUDED."sector", + "favorite_number" = EXCLUDED."favorite_number" + ; + drop table person_TEMP;""".update.run + } yield res + } } diff --git a/.bleep/generated-sources/typo-tester-doobie@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/person/PersonRepoMock.scala b/.bleep/generated-sources/typo-tester-doobie@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/person/PersonRepoMock.scala index 3bcd444f70..75ec1f0511 100644 --- a/.bleep/generated-sources/typo-tester-doobie@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/person/PersonRepoMock.scala +++ b/.bleep/generated-sources/typo-tester-doobie@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/person/PersonRepoMock.scala @@ -151,4 +151,23 @@ class PersonRepoMock(toRow: Function1[PersonRowUnsaved, PersonRow], unsaved } } + override def upsertBatch(unsaved: List[PersonRow]): Stream[ConnectionIO, PersonRow] = { + Stream.emits { + unsaved.map { row => + map += (row.id -> row) + row + } + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, PersonRow], batchSize: Int = 10000): ConnectionIO[Int] = { + unsaved.compile.toList.map { rows => + var num = 0 + rows.foreach { row => + map += (row.id -> row) + num += 1 + } + num + } + } } diff --git a/.bleep/generated-sources/typo-tester-doobie@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/person/PersonRow.scala b/.bleep/generated-sources/typo-tester-doobie@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/person/PersonRow.scala index b8664a1ca1..ffbdd51d9a 100644 --- a/.bleep/generated-sources/typo-tester-doobie@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/person/PersonRow.scala +++ b/.bleep/generated-sources/typo-tester-doobie@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/person/PersonRow.scala @@ -11,6 +11,7 @@ package person import doobie.enumerated.Nullability import doobie.postgres.Text import doobie.util.Read +import doobie.util.Write import doobie.util.meta.Meta import io.circe.Decoder import io.circe.Encoder @@ -104,4 +105,47 @@ object PersonRow { sb.append(Text.DELIMETER) Number.text.unsafeEncode(row.favoriteNumber, sb) } + implicit lazy val write: Write[PersonRow] = new Write[PersonRow]( + puts = List((PersonId.put, Nullability.NoNulls), + (FootballClubId.put, Nullability.NoNulls), + (Meta.StringMeta.put, Nullability.NoNulls), + (Meta.StringMeta.put, Nullability.Nullable), + (Meta.StringMeta.put, Nullability.Nullable), + (Meta.StringMeta.put, Nullability.NoNulls), + (Meta.StringMeta.put, Nullability.NoNulls), + (Meta.BooleanMeta.put, Nullability.NoNulls), + (MaritalStatusId.put, Nullability.NoNulls), + (Meta.StringMeta.put, Nullability.Nullable), + (Sector.put, Nullability.NoNulls), + (Number.put, Nullability.NoNulls)), + toList = x => List(x.id, x.favouriteFootballClubId, x.name, x.nickName, x.blogUrl, x.email, x.phone, x.likesPizza, x.maritalStatusId, x.workEmail, x.sector, x.favoriteNumber), + unsafeSet = (rs, i, a) => { + PersonId.put.unsafeSetNonNullable(rs, i + 0, a.id) + FootballClubId.put.unsafeSetNonNullable(rs, i + 1, a.favouriteFootballClubId) + Meta.StringMeta.put.unsafeSetNonNullable(rs, i + 2, a.name) + Meta.StringMeta.put.unsafeSetNullable(rs, i + 3, a.nickName) + Meta.StringMeta.put.unsafeSetNullable(rs, i + 4, a.blogUrl) + Meta.StringMeta.put.unsafeSetNonNullable(rs, i + 5, a.email) + Meta.StringMeta.put.unsafeSetNonNullable(rs, i + 6, a.phone) + Meta.BooleanMeta.put.unsafeSetNonNullable(rs, i + 7, a.likesPizza) + MaritalStatusId.put.unsafeSetNonNullable(rs, i + 8, a.maritalStatusId) + Meta.StringMeta.put.unsafeSetNullable(rs, i + 9, a.workEmail) + Sector.put.unsafeSetNonNullable(rs, i + 10, a.sector) + Number.put.unsafeSetNonNullable(rs, i + 11, a.favoriteNumber) + }, + unsafeUpdate = (ps, i, a) => { + PersonId.put.unsafeUpdateNonNullable(ps, i + 0, a.id) + FootballClubId.put.unsafeUpdateNonNullable(ps, i + 1, a.favouriteFootballClubId) + Meta.StringMeta.put.unsafeUpdateNonNullable(ps, i + 2, a.name) + Meta.StringMeta.put.unsafeUpdateNullable(ps, i + 3, a.nickName) + Meta.StringMeta.put.unsafeUpdateNullable(ps, i + 4, a.blogUrl) + Meta.StringMeta.put.unsafeUpdateNonNullable(ps, i + 5, a.email) + Meta.StringMeta.put.unsafeUpdateNonNullable(ps, i + 6, a.phone) + Meta.BooleanMeta.put.unsafeUpdateNonNullable(ps, i + 7, a.likesPizza) + MaritalStatusId.put.unsafeUpdateNonNullable(ps, i + 8, a.maritalStatusId) + Meta.StringMeta.put.unsafeUpdateNullable(ps, i + 9, a.workEmail) + Sector.put.unsafeUpdateNonNullable(ps, i + 10, a.sector) + Number.put.unsafeUpdateNonNullable(ps, i + 11, a.favoriteNumber) + } + ) } diff --git a/.bleep/generated-sources/typo-tester-doobie@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/compositepk/person/PersonRepo.scala b/.bleep/generated-sources/typo-tester-doobie@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/compositepk/person/PersonRepo.scala index 300cd52c67..66c742ff96 100644 --- a/.bleep/generated-sources/typo-tester-doobie@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/compositepk/person/PersonRepo.scala +++ b/.bleep/generated-sources/typo-tester-doobie@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/compositepk/person/PersonRepo.scala @@ -30,4 +30,7 @@ trait PersonRepo { def update(row: PersonRow): ConnectionIO[Boolean] def updateFieldValues(compositeId: PersonId, fieldValues: List[PersonFieldValue[?]]): ConnectionIO[Boolean] def upsert(unsaved: PersonRow): ConnectionIO[PersonRow] + def upsertBatch(unsaved: List[PersonRow]): Stream[ConnectionIO, PersonRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Stream[ConnectionIO, PersonRow], batchSize: Int = 10000): ConnectionIO[Int] } diff --git a/.bleep/generated-sources/typo-tester-doobie@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/compositepk/person/PersonRepoImpl.scala b/.bleep/generated-sources/typo-tester-doobie@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/compositepk/person/PersonRepoImpl.scala index 742b01d597..4ecacd8568 100644 --- a/.bleep/generated-sources/typo-tester-doobie@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/compositepk/person/PersonRepoImpl.scala +++ b/.bleep/generated-sources/typo-tester-doobie@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/compositepk/person/PersonRepoImpl.scala @@ -9,6 +9,7 @@ package compositepk package person import cats.data.NonEmptyList +import cats.instances.list.catsStdInstancesForList import doobie.free.connection.ConnectionIO import doobie.free.connection.pure import doobie.postgres.syntax.FragmentOps @@ -18,6 +19,7 @@ import doobie.util.Write import doobie.util.fragment.Fragment import doobie.util.fragments import doobie.util.meta.Meta +import doobie.util.update.Update import fs2.Stream import testdb.hardcoded.customtypes.Defaulted import typo.dsl.DeleteBuilder @@ -130,4 +132,29 @@ class PersonRepoImpl extends PersonRepo { returning "one", "two", "name" """.query(using PersonRow.read).unique } + override def upsertBatch(unsaved: List[PersonRow]): Stream[ConnectionIO, PersonRow] = { + Update[PersonRow]( + s"""insert into compositepk.person("one", "two", "name") + values (?::int8,?,?) + on conflict ("one", "two") + do update set + "name" = EXCLUDED."name" + returning "one", "two", "name"""" + )(using PersonRow.write) + .updateManyWithGeneratedKeys[PersonRow]("one", "two", "name")(unsaved)(using catsStdInstancesForList, PersonRow.read) + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, PersonRow], batchSize: Int = 10000): ConnectionIO[Int] = { + for { + _ <- sql"create temporary table person_TEMP (like compositepk.person) on commit drop".update.run + _ <- new FragmentOps(sql"""copy person_TEMP("one", "two", "name") from stdin""").copyIn(unsaved, batchSize)(using PersonRow.text) + res <- sql"""insert into compositepk.person("one", "two", "name") + select * from person_TEMP + on conflict ("one", "two") + do update set + "name" = EXCLUDED."name" + ; + drop table person_TEMP;""".update.run + } yield res + } } diff --git a/.bleep/generated-sources/typo-tester-doobie@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/compositepk/person/PersonRepoMock.scala b/.bleep/generated-sources/typo-tester-doobie@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/compositepk/person/PersonRepoMock.scala index 0c56cce4d4..a9d2f1be4a 100644 --- a/.bleep/generated-sources/typo-tester-doobie@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/compositepk/person/PersonRepoMock.scala +++ b/.bleep/generated-sources/typo-tester-doobie@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/compositepk/person/PersonRepoMock.scala @@ -120,4 +120,23 @@ class PersonRepoMock(toRow: Function1[PersonRowUnsaved, PersonRow], unsaved } } + override def upsertBatch(unsaved: List[PersonRow]): Stream[ConnectionIO, PersonRow] = { + Stream.emits { + unsaved.map { row => + map += (row.compositeId -> row) + row + } + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, PersonRow], batchSize: Int = 10000): ConnectionIO[Int] = { + unsaved.compile.toList.map { rows => + var num = 0 + rows.foreach { row => + map += (row.compositeId -> row) + num += 1 + } + num + } + } } diff --git a/.bleep/generated-sources/typo-tester-doobie@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/compositepk/person/PersonRow.scala b/.bleep/generated-sources/typo-tester-doobie@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/compositepk/person/PersonRow.scala index cdfb0c8c22..47c9679317 100644 --- a/.bleep/generated-sources/typo-tester-doobie@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/compositepk/person/PersonRow.scala +++ b/.bleep/generated-sources/typo-tester-doobie@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/compositepk/person/PersonRow.scala @@ -11,6 +11,7 @@ package person import doobie.enumerated.Nullability import doobie.postgres.Text import doobie.util.Read +import doobie.util.Write import doobie.util.meta.Meta import io.circe.Decoder import io.circe.Encoder @@ -56,4 +57,20 @@ object PersonRow { sb.append(Text.DELIMETER) Text.option(Text.stringInstance).unsafeEncode(row.name, sb) } + implicit lazy val write: Write[PersonRow] = new Write[PersonRow]( + puts = List((Meta.LongMeta.put, Nullability.NoNulls), + (Meta.StringMeta.put, Nullability.Nullable), + (Meta.StringMeta.put, Nullability.Nullable)), + toList = x => List(x.one, x.two, x.name), + unsafeSet = (rs, i, a) => { + Meta.LongMeta.put.unsafeSetNonNullable(rs, i + 0, a.one) + Meta.StringMeta.put.unsafeSetNullable(rs, i + 1, a.two) + Meta.StringMeta.put.unsafeSetNullable(rs, i + 2, a.name) + }, + unsafeUpdate = (ps, i, a) => { + Meta.LongMeta.put.unsafeUpdateNonNullable(ps, i + 0, a.one) + Meta.StringMeta.put.unsafeUpdateNullable(ps, i + 1, a.two) + Meta.StringMeta.put.unsafeUpdateNullable(ps, i + 2, a.name) + } + ) } diff --git a/.bleep/generated-sources/typo-tester-doobie@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/football_club/FootballClubRepo.scala b/.bleep/generated-sources/typo-tester-doobie@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/football_club/FootballClubRepo.scala index 34e5502bbd..48efb3aea9 100644 --- a/.bleep/generated-sources/typo-tester-doobie@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/football_club/FootballClubRepo.scala +++ b/.bleep/generated-sources/typo-tester-doobie@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/football_club/FootballClubRepo.scala @@ -30,4 +30,7 @@ trait FootballClubRepo { def update(row: FootballClubRow): ConnectionIO[Boolean] def updateFieldValues(id: FootballClubId, fieldValues: List[FootballClubFieldValue[?]]): ConnectionIO[Boolean] def upsert(unsaved: FootballClubRow): ConnectionIO[FootballClubRow] + def upsertBatch(unsaved: List[FootballClubRow]): Stream[ConnectionIO, FootballClubRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Stream[ConnectionIO, FootballClubRow], batchSize: Int = 10000): ConnectionIO[Int] } diff --git a/.bleep/generated-sources/typo-tester-doobie@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/football_club/FootballClubRepoImpl.scala b/.bleep/generated-sources/typo-tester-doobie@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/football_club/FootballClubRepoImpl.scala index 0c0b7adfc4..c435bdc808 100644 --- a/.bleep/generated-sources/typo-tester-doobie@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/football_club/FootballClubRepoImpl.scala +++ b/.bleep/generated-sources/typo-tester-doobie@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/football_club/FootballClubRepoImpl.scala @@ -9,6 +9,7 @@ package myschema package football_club import cats.data.NonEmptyList +import cats.instances.list.catsStdInstancesForList import doobie.free.connection.ConnectionIO import doobie.free.connection.pure import doobie.postgres.syntax.FragmentOps @@ -17,6 +18,7 @@ import doobie.syntax.string.toSqlInterpolator import doobie.util.Write import doobie.util.fragments import doobie.util.meta.Meta +import doobie.util.update.Update import fs2.Stream import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder @@ -107,4 +109,29 @@ class FootballClubRepoImpl extends FootballClubRepo { returning "id", "name" """.query(using FootballClubRow.read).unique } + override def upsertBatch(unsaved: List[FootballClubRow]): Stream[ConnectionIO, FootballClubRow] = { + Update[FootballClubRow]( + s"""insert into myschema.football_club("id", "name") + values (?::int8,?) + on conflict ("id") + do update set + "name" = EXCLUDED."name" + returning "id", "name"""" + )(using FootballClubRow.write) + .updateManyWithGeneratedKeys[FootballClubRow]("id", "name")(unsaved)(using catsStdInstancesForList, FootballClubRow.read) + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, FootballClubRow], batchSize: Int = 10000): ConnectionIO[Int] = { + for { + _ <- sql"create temporary table football_club_TEMP (like myschema.football_club) on commit drop".update.run + _ <- new FragmentOps(sql"""copy football_club_TEMP("id", "name") from stdin""").copyIn(unsaved, batchSize)(using FootballClubRow.text) + res <- sql"""insert into myschema.football_club("id", "name") + select * from football_club_TEMP + on conflict ("id") + do update set + "name" = EXCLUDED."name" + ; + drop table football_club_TEMP;""".update.run + } yield res + } } diff --git a/.bleep/generated-sources/typo-tester-doobie@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/football_club/FootballClubRepoMock.scala b/.bleep/generated-sources/typo-tester-doobie@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/football_club/FootballClubRepoMock.scala index 40df5f1815..de93115fd1 100644 --- a/.bleep/generated-sources/typo-tester-doobie@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/football_club/FootballClubRepoMock.scala +++ b/.bleep/generated-sources/typo-tester-doobie@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/football_club/FootballClubRepoMock.scala @@ -115,4 +115,23 @@ class FootballClubRepoMock(map: scala.collection.mutable.Map[FootballClubId, Foo unsaved } } + override def upsertBatch(unsaved: List[FootballClubRow]): Stream[ConnectionIO, FootballClubRow] = { + Stream.emits { + unsaved.map { row => + map += (row.id -> row) + row + } + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, FootballClubRow], batchSize: Int = 10000): ConnectionIO[Int] = { + unsaved.compile.toList.map { rows => + var num = 0 + rows.foreach { row => + map += (row.id -> row) + num += 1 + } + num + } + } } diff --git a/.bleep/generated-sources/typo-tester-doobie@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/football_club/FootballClubRow.scala b/.bleep/generated-sources/typo-tester-doobie@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/football_club/FootballClubRow.scala index 3b8985a4f8..eaaac16fb0 100644 --- a/.bleep/generated-sources/typo-tester-doobie@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/football_club/FootballClubRow.scala +++ b/.bleep/generated-sources/typo-tester-doobie@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/football_club/FootballClubRow.scala @@ -11,6 +11,7 @@ package football_club import doobie.enumerated.Nullability import doobie.postgres.Text import doobie.util.Read +import doobie.util.Write import doobie.util.meta.Meta import io.circe.Decoder import io.circe.Encoder @@ -42,4 +43,17 @@ object FootballClubRow { sb.append(Text.DELIMETER) Text.stringInstance.unsafeEncode(row.name, sb) } + implicit lazy val write: Write[FootballClubRow] = new Write[FootballClubRow]( + puts = List((FootballClubId.put, Nullability.NoNulls), + (Meta.StringMeta.put, Nullability.NoNulls)), + toList = x => List(x.id, x.name), + unsafeSet = (rs, i, a) => { + FootballClubId.put.unsafeSetNonNullable(rs, i + 0, a.id) + Meta.StringMeta.put.unsafeSetNonNullable(rs, i + 1, a.name) + }, + unsafeUpdate = (ps, i, a) => { + FootballClubId.put.unsafeUpdateNonNullable(ps, i + 0, a.id) + Meta.StringMeta.put.unsafeUpdateNonNullable(ps, i + 1, a.name) + } + ) } diff --git a/.bleep/generated-sources/typo-tester-doobie@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/marital_status/MaritalStatusRepo.scala b/.bleep/generated-sources/typo-tester-doobie@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/marital_status/MaritalStatusRepo.scala index 29564dfea6..4307c05f12 100644 --- a/.bleep/generated-sources/typo-tester-doobie@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/marital_status/MaritalStatusRepo.scala +++ b/.bleep/generated-sources/typo-tester-doobie@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/marital_status/MaritalStatusRepo.scala @@ -28,4 +28,7 @@ trait MaritalStatusRepo { def selectByIdsTracked(ids: Array[MaritalStatusId]): ConnectionIO[Map[MaritalStatusId, MaritalStatusRow]] def update: UpdateBuilder[MaritalStatusFields, MaritalStatusRow] def upsert(unsaved: MaritalStatusRow): ConnectionIO[MaritalStatusRow] + def upsertBatch(unsaved: List[MaritalStatusRow]): Stream[ConnectionIO, MaritalStatusRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Stream[ConnectionIO, MaritalStatusRow], batchSize: Int = 10000): ConnectionIO[Int] } diff --git a/.bleep/generated-sources/typo-tester-doobie@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/marital_status/MaritalStatusRepoImpl.scala b/.bleep/generated-sources/typo-tester-doobie@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/marital_status/MaritalStatusRepoImpl.scala index c2253807d2..bed0f0dc00 100644 --- a/.bleep/generated-sources/typo-tester-doobie@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/marital_status/MaritalStatusRepoImpl.scala +++ b/.bleep/generated-sources/typo-tester-doobie@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/marital_status/MaritalStatusRepoImpl.scala @@ -8,12 +8,14 @@ package hardcoded package myschema package marital_status +import cats.instances.list.catsStdInstancesForList import doobie.free.connection.ConnectionIO import doobie.postgres.syntax.FragmentOps import doobie.syntax.SqlInterpolator.SingleFragment.fromWrite import doobie.syntax.string.toSqlInterpolator import doobie.util.Write import doobie.util.fragments +import doobie.util.update.Update import fs2.Stream import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder @@ -74,9 +76,31 @@ class MaritalStatusRepoImpl extends MaritalStatusRepo { ${fromWrite(unsaved.id)(Write.fromPut(MaritalStatusId.put))}::int8 ) on conflict ("id") - do update set - + do nothing returning "id" """.query(using MaritalStatusRow.read).unique } + override def upsertBatch(unsaved: List[MaritalStatusRow]): Stream[ConnectionIO, MaritalStatusRow] = { + Update[MaritalStatusRow]( + s"""insert into myschema.marital_status("id") + values (?::int8) + on conflict ("id") + do nothing + returning "id"""" + )(using MaritalStatusRow.write) + .updateManyWithGeneratedKeys[MaritalStatusRow]("id")(unsaved)(using catsStdInstancesForList, MaritalStatusRow.read) + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, MaritalStatusRow], batchSize: Int = 10000): ConnectionIO[Int] = { + for { + _ <- sql"create temporary table marital_status_TEMP (like myschema.marital_status) on commit drop".update.run + _ <- new FragmentOps(sql"""copy marital_status_TEMP("id") from stdin""").copyIn(unsaved, batchSize)(using MaritalStatusRow.text) + res <- sql"""insert into myschema.marital_status("id") + select * from marital_status_TEMP + on conflict ("id") + do nothing + ; + drop table marital_status_TEMP;""".update.run + } yield res + } } diff --git a/.bleep/generated-sources/typo-tester-doobie@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/marital_status/MaritalStatusRepoMock.scala b/.bleep/generated-sources/typo-tester-doobie@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/marital_status/MaritalStatusRepoMock.scala index 15191cb112..31d51191de 100644 --- a/.bleep/generated-sources/typo-tester-doobie@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/marital_status/MaritalStatusRepoMock.scala +++ b/.bleep/generated-sources/typo-tester-doobie@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/marital_status/MaritalStatusRepoMock.scala @@ -86,4 +86,23 @@ class MaritalStatusRepoMock(map: scala.collection.mutable.Map[MaritalStatusId, M unsaved } } + override def upsertBatch(unsaved: List[MaritalStatusRow]): Stream[ConnectionIO, MaritalStatusRow] = { + Stream.emits { + unsaved.map { row => + map += (row.id -> row) + row + } + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, MaritalStatusRow], batchSize: Int = 10000): ConnectionIO[Int] = { + unsaved.compile.toList.map { rows => + var num = 0 + rows.foreach { row => + map += (row.id -> row) + num += 1 + } + num + } + } } diff --git a/.bleep/generated-sources/typo-tester-doobie@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/marital_status/MaritalStatusRow.scala b/.bleep/generated-sources/typo-tester-doobie@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/marital_status/MaritalStatusRow.scala index da439766aa..d2a9981c82 100644 --- a/.bleep/generated-sources/typo-tester-doobie@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/marital_status/MaritalStatusRow.scala +++ b/.bleep/generated-sources/typo-tester-doobie@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/marital_status/MaritalStatusRow.scala @@ -11,6 +11,7 @@ package marital_status import doobie.enumerated.Nullability import doobie.postgres.Text import doobie.util.Read +import doobie.util.Write import io.circe.Decoder import io.circe.Encoder import java.sql.ResultSet @@ -35,4 +36,14 @@ object MaritalStatusRow { implicit lazy val text: Text[MaritalStatusRow] = Text.instance[MaritalStatusRow]{ (row, sb) => MaritalStatusId.text.unsafeEncode(row.id, sb) } + implicit lazy val write: Write[MaritalStatusRow] = new Write[MaritalStatusRow]( + puts = List((MaritalStatusId.put, Nullability.NoNulls)), + toList = x => List(x.id), + unsafeSet = (rs, i, a) => { + MaritalStatusId.put.unsafeSetNonNullable(rs, i + 0, a.id) + }, + unsafeUpdate = (ps, i, a) => { + MaritalStatusId.put.unsafeUpdateNonNullable(ps, i + 0, a.id) + } + ) } diff --git a/.bleep/generated-sources/typo-tester-doobie@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/person/PersonRepo.scala b/.bleep/generated-sources/typo-tester-doobie@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/person/PersonRepo.scala index 5e9fe04f97..9db299d5d0 100644 --- a/.bleep/generated-sources/typo-tester-doobie@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/person/PersonRepo.scala +++ b/.bleep/generated-sources/typo-tester-doobie@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/person/PersonRepo.scala @@ -33,4 +33,7 @@ trait PersonRepo { def update(row: PersonRow): ConnectionIO[Boolean] def updateFieldValues(id: PersonId, fieldValues: List[PersonFieldValue[?]]): ConnectionIO[Boolean] def upsert(unsaved: PersonRow): ConnectionIO[PersonRow] + def upsertBatch(unsaved: List[PersonRow]): Stream[ConnectionIO, PersonRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Stream[ConnectionIO, PersonRow], batchSize: Int = 10000): ConnectionIO[Int] } diff --git a/.bleep/generated-sources/typo-tester-doobie@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/person/PersonRepoImpl.scala b/.bleep/generated-sources/typo-tester-doobie@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/person/PersonRepoImpl.scala index cbcdf4cd8b..b499a6d13a 100644 --- a/.bleep/generated-sources/typo-tester-doobie@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/person/PersonRepoImpl.scala +++ b/.bleep/generated-sources/typo-tester-doobie@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/person/PersonRepoImpl.scala @@ -9,6 +9,7 @@ package myschema package person import cats.data.NonEmptyList +import cats.instances.list.catsStdInstancesForList import doobie.free.connection.ConnectionIO import doobie.free.connection.pure import doobie.postgres.syntax.FragmentOps @@ -18,6 +19,7 @@ import doobie.util.Write import doobie.util.fragment.Fragment import doobie.util.fragments import doobie.util.meta.Meta +import doobie.util.update.Update import fs2.Stream import testdb.hardcoded.customtypes.Defaulted import testdb.hardcoded.myschema.football_club.FootballClubId @@ -207,4 +209,49 @@ class PersonRepoImpl extends PersonRepo { returning "id", "favourite_football_club_id", "name", "nick_name", "blog_url", "email", "phone", "likes_pizza", "marital_status_id", "work_email", "sector", "favorite_number" """.query(using PersonRow.read).unique } + override def upsertBatch(unsaved: List[PersonRow]): Stream[ConnectionIO, PersonRow] = { + Update[PersonRow]( + s"""insert into myschema.person("id", "favourite_football_club_id", "name", "nick_name", "blog_url", "email", "phone", "likes_pizza", "marital_status_id", "work_email", "sector", "favorite_number") + values (?::int8,?,?,?,?,?,?,?,?,?,?::myschema.sector,?::myschema.number) + on conflict ("id") + do update set + "favourite_football_club_id" = EXCLUDED."favourite_football_club_id", + "name" = EXCLUDED."name", + "nick_name" = EXCLUDED."nick_name", + "blog_url" = EXCLUDED."blog_url", + "email" = EXCLUDED."email", + "phone" = EXCLUDED."phone", + "likes_pizza" = EXCLUDED."likes_pizza", + "marital_status_id" = EXCLUDED."marital_status_id", + "work_email" = EXCLUDED."work_email", + "sector" = EXCLUDED."sector", + "favorite_number" = EXCLUDED."favorite_number" + returning "id", "favourite_football_club_id", "name", "nick_name", "blog_url", "email", "phone", "likes_pizza", "marital_status_id", "work_email", "sector", "favorite_number"""" + )(using PersonRow.write) + .updateManyWithGeneratedKeys[PersonRow]("id", "favourite_football_club_id", "name", "nick_name", "blog_url", "email", "phone", "likes_pizza", "marital_status_id", "work_email", "sector", "favorite_number")(unsaved)(using catsStdInstancesForList, PersonRow.read) + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, PersonRow], batchSize: Int = 10000): ConnectionIO[Int] = { + for { + _ <- sql"create temporary table person_TEMP (like myschema.person) on commit drop".update.run + _ <- new FragmentOps(sql"""copy person_TEMP("id", "favourite_football_club_id", "name", "nick_name", "blog_url", "email", "phone", "likes_pizza", "marital_status_id", "work_email", "sector", "favorite_number") from stdin""").copyIn(unsaved, batchSize)(using PersonRow.text) + res <- sql"""insert into myschema.person("id", "favourite_football_club_id", "name", "nick_name", "blog_url", "email", "phone", "likes_pizza", "marital_status_id", "work_email", "sector", "favorite_number") + select * from person_TEMP + on conflict ("id") + do update set + "favourite_football_club_id" = EXCLUDED."favourite_football_club_id", + "name" = EXCLUDED."name", + "nick_name" = EXCLUDED."nick_name", + "blog_url" = EXCLUDED."blog_url", + "email" = EXCLUDED."email", + "phone" = EXCLUDED."phone", + "likes_pizza" = EXCLUDED."likes_pizza", + "marital_status_id" = EXCLUDED."marital_status_id", + "work_email" = EXCLUDED."work_email", + "sector" = EXCLUDED."sector", + "favorite_number" = EXCLUDED."favorite_number" + ; + drop table person_TEMP;""".update.run + } yield res + } } diff --git a/.bleep/generated-sources/typo-tester-doobie@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/person/PersonRepoMock.scala b/.bleep/generated-sources/typo-tester-doobie@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/person/PersonRepoMock.scala index 3bcd444f70..75ec1f0511 100644 --- a/.bleep/generated-sources/typo-tester-doobie@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/person/PersonRepoMock.scala +++ b/.bleep/generated-sources/typo-tester-doobie@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/person/PersonRepoMock.scala @@ -151,4 +151,23 @@ class PersonRepoMock(toRow: Function1[PersonRowUnsaved, PersonRow], unsaved } } + override def upsertBatch(unsaved: List[PersonRow]): Stream[ConnectionIO, PersonRow] = { + Stream.emits { + unsaved.map { row => + map += (row.id -> row) + row + } + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, PersonRow], batchSize: Int = 10000): ConnectionIO[Int] = { + unsaved.compile.toList.map { rows => + var num = 0 + rows.foreach { row => + map += (row.id -> row) + num += 1 + } + num + } + } } diff --git a/.bleep/generated-sources/typo-tester-doobie@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/person/PersonRow.scala b/.bleep/generated-sources/typo-tester-doobie@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/person/PersonRow.scala index b8664a1ca1..ffbdd51d9a 100644 --- a/.bleep/generated-sources/typo-tester-doobie@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/person/PersonRow.scala +++ b/.bleep/generated-sources/typo-tester-doobie@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/person/PersonRow.scala @@ -11,6 +11,7 @@ package person import doobie.enumerated.Nullability import doobie.postgres.Text import doobie.util.Read +import doobie.util.Write import doobie.util.meta.Meta import io.circe.Decoder import io.circe.Encoder @@ -104,4 +105,47 @@ object PersonRow { sb.append(Text.DELIMETER) Number.text.unsafeEncode(row.favoriteNumber, sb) } + implicit lazy val write: Write[PersonRow] = new Write[PersonRow]( + puts = List((PersonId.put, Nullability.NoNulls), + (FootballClubId.put, Nullability.NoNulls), + (Meta.StringMeta.put, Nullability.NoNulls), + (Meta.StringMeta.put, Nullability.Nullable), + (Meta.StringMeta.put, Nullability.Nullable), + (Meta.StringMeta.put, Nullability.NoNulls), + (Meta.StringMeta.put, Nullability.NoNulls), + (Meta.BooleanMeta.put, Nullability.NoNulls), + (MaritalStatusId.put, Nullability.NoNulls), + (Meta.StringMeta.put, Nullability.Nullable), + (Sector.put, Nullability.NoNulls), + (Number.put, Nullability.NoNulls)), + toList = x => List(x.id, x.favouriteFootballClubId, x.name, x.nickName, x.blogUrl, x.email, x.phone, x.likesPizza, x.maritalStatusId, x.workEmail, x.sector, x.favoriteNumber), + unsafeSet = (rs, i, a) => { + PersonId.put.unsafeSetNonNullable(rs, i + 0, a.id) + FootballClubId.put.unsafeSetNonNullable(rs, i + 1, a.favouriteFootballClubId) + Meta.StringMeta.put.unsafeSetNonNullable(rs, i + 2, a.name) + Meta.StringMeta.put.unsafeSetNullable(rs, i + 3, a.nickName) + Meta.StringMeta.put.unsafeSetNullable(rs, i + 4, a.blogUrl) + Meta.StringMeta.put.unsafeSetNonNullable(rs, i + 5, a.email) + Meta.StringMeta.put.unsafeSetNonNullable(rs, i + 6, a.phone) + Meta.BooleanMeta.put.unsafeSetNonNullable(rs, i + 7, a.likesPizza) + MaritalStatusId.put.unsafeSetNonNullable(rs, i + 8, a.maritalStatusId) + Meta.StringMeta.put.unsafeSetNullable(rs, i + 9, a.workEmail) + Sector.put.unsafeSetNonNullable(rs, i + 10, a.sector) + Number.put.unsafeSetNonNullable(rs, i + 11, a.favoriteNumber) + }, + unsafeUpdate = (ps, i, a) => { + PersonId.put.unsafeUpdateNonNullable(ps, i + 0, a.id) + FootballClubId.put.unsafeUpdateNonNullable(ps, i + 1, a.favouriteFootballClubId) + Meta.StringMeta.put.unsafeUpdateNonNullable(ps, i + 2, a.name) + Meta.StringMeta.put.unsafeUpdateNullable(ps, i + 3, a.nickName) + Meta.StringMeta.put.unsafeUpdateNullable(ps, i + 4, a.blogUrl) + Meta.StringMeta.put.unsafeUpdateNonNullable(ps, i + 5, a.email) + Meta.StringMeta.put.unsafeUpdateNonNullable(ps, i + 6, a.phone) + Meta.BooleanMeta.put.unsafeUpdateNonNullable(ps, i + 7, a.likesPizza) + MaritalStatusId.put.unsafeUpdateNonNullable(ps, i + 8, a.maritalStatusId) + Meta.StringMeta.put.unsafeUpdateNullable(ps, i + 9, a.workEmail) + Sector.put.unsafeUpdateNonNullable(ps, i + 10, a.sector) + Number.put.unsafeUpdateNonNullable(ps, i + 11, a.favoriteNumber) + } + ) } diff --git a/.bleep/generated-sources/typo-tester-zio-jdbc@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/compositepk/person/PersonRepo.scala b/.bleep/generated-sources/typo-tester-zio-jdbc@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/compositepk/person/PersonRepo.scala index 2a17943bfb..d47e1b1541 100644 --- a/.bleep/generated-sources/typo-tester-zio-jdbc@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/compositepk/person/PersonRepo.scala +++ b/.bleep/generated-sources/typo-tester-zio-jdbc@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/compositepk/person/PersonRepo.scala @@ -32,4 +32,7 @@ trait PersonRepo { def update(row: PersonRow): ZIO[ZConnection, Throwable, Boolean] def updateFieldValues(compositeId: PersonId, fieldValues: List[PersonFieldValue[?]]): ZIO[ZConnection, Throwable, Boolean] def upsert(unsaved: PersonRow): ZIO[ZConnection, Throwable, UpdateResult[PersonRow]] + // Not implementable for zio-jdbc: upsertBatch + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, PersonRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] } diff --git a/.bleep/generated-sources/typo-tester-zio-jdbc@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/compositepk/person/PersonRepoImpl.scala b/.bleep/generated-sources/typo-tester-zio-jdbc@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/compositepk/person/PersonRepoImpl.scala index 99e80552b5..d91f13f407 100644 --- a/.bleep/generated-sources/typo-tester-zio-jdbc@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/compositepk/person/PersonRepoImpl.scala +++ b/.bleep/generated-sources/typo-tester-zio-jdbc@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/compositepk/person/PersonRepoImpl.scala @@ -123,4 +123,17 @@ class PersonRepoImpl extends PersonRepo { "name" = EXCLUDED."name" returning "one", "two", "name"""".insertReturning(using PersonRow.jdbcDecoder) } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, PersonRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + val created = sql"create temporary table person_TEMP (like compositepk.person) on commit drop".execute + val copied = streamingInsert(s"""copy person_TEMP("one", "two", "name") from stdin""", batchSize, unsaved)(PersonRow.text) + val merged = sql"""insert into compositepk.person("one", "two", "name") + select * from person_TEMP + on conflict ("one", "two") + do update set + "name" = EXCLUDED."name" + ; + drop table person_TEMP;""".update + created *> copied *> merged + } } diff --git a/.bleep/generated-sources/typo-tester-zio-jdbc@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/compositepk/person/PersonRepoMock.scala b/.bleep/generated-sources/typo-tester-zio-jdbc@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/compositepk/person/PersonRepoMock.scala index 4f70aa2069..67b5eb0645 100644 --- a/.bleep/generated-sources/typo-tester-zio-jdbc@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/compositepk/person/PersonRepoMock.scala +++ b/.bleep/generated-sources/typo-tester-zio-jdbc@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/compositepk/person/PersonRepoMock.scala @@ -119,4 +119,13 @@ class PersonRepoMock(toRow: Function1[PersonRowUnsaved, PersonRow], UpdateResult(1, Chunk.single(unsaved)) } } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, PersonRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + unsaved.scanZIO(0L) { case (acc, row) => + ZIO.succeed { + map += (row.compositeId -> row) + acc + 1 + } + }.runLast.map(_.getOrElse(0L)) + } } diff --git a/.bleep/generated-sources/typo-tester-zio-jdbc@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/football_club/FootballClubRepo.scala b/.bleep/generated-sources/typo-tester-zio-jdbc@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/football_club/FootballClubRepo.scala index 652845f0c7..2f956fc30a 100644 --- a/.bleep/generated-sources/typo-tester-zio-jdbc@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/football_club/FootballClubRepo.scala +++ b/.bleep/generated-sources/typo-tester-zio-jdbc@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/football_club/FootballClubRepo.scala @@ -32,4 +32,7 @@ trait FootballClubRepo { def update(row: FootballClubRow): ZIO[ZConnection, Throwable, Boolean] def updateFieldValues(id: FootballClubId, fieldValues: List[FootballClubFieldValue[?]]): ZIO[ZConnection, Throwable, Boolean] def upsert(unsaved: FootballClubRow): ZIO[ZConnection, Throwable, UpdateResult[FootballClubRow]] + // Not implementable for zio-jdbc: upsertBatch + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, FootballClubRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] } diff --git a/.bleep/generated-sources/typo-tester-zio-jdbc@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/football_club/FootballClubRepoImpl.scala b/.bleep/generated-sources/typo-tester-zio-jdbc@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/football_club/FootballClubRepoImpl.scala index 3a7a40b828..eddf925ac2 100644 --- a/.bleep/generated-sources/typo-tester-zio-jdbc@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/football_club/FootballClubRepoImpl.scala +++ b/.bleep/generated-sources/typo-tester-zio-jdbc@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/football_club/FootballClubRepoImpl.scala @@ -103,4 +103,17 @@ class FootballClubRepoImpl extends FootballClubRepo { "name" = EXCLUDED."name" returning "id", "name"""".insertReturning(using FootballClubRow.jdbcDecoder) } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, FootballClubRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + val created = sql"create temporary table football_club_TEMP (like myschema.football_club) on commit drop".execute + val copied = streamingInsert(s"""copy football_club_TEMP("id", "name") from stdin""", batchSize, unsaved)(FootballClubRow.text) + val merged = sql"""insert into myschema.football_club("id", "name") + select * from football_club_TEMP + on conflict ("id") + do update set + "name" = EXCLUDED."name" + ; + drop table football_club_TEMP;""".update + created *> copied *> merged + } } diff --git a/.bleep/generated-sources/typo-tester-zio-jdbc@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/football_club/FootballClubRepoMock.scala b/.bleep/generated-sources/typo-tester-zio-jdbc@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/football_club/FootballClubRepoMock.scala index 90089894b2..6e1ceff710 100644 --- a/.bleep/generated-sources/typo-tester-zio-jdbc@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/football_club/FootballClubRepoMock.scala +++ b/.bleep/generated-sources/typo-tester-zio-jdbc@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/football_club/FootballClubRepoMock.scala @@ -116,4 +116,13 @@ class FootballClubRepoMock(map: scala.collection.mutable.Map[FootballClubId, Foo UpdateResult(1, Chunk.single(unsaved)) } } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, FootballClubRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + unsaved.scanZIO(0L) { case (acc, row) => + ZIO.succeed { + map += (row.id -> row) + acc + 1 + } + }.runLast.map(_.getOrElse(0L)) + } } diff --git a/.bleep/generated-sources/typo-tester-zio-jdbc@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/marital_status/MaritalStatusRepo.scala b/.bleep/generated-sources/typo-tester-zio-jdbc@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/marital_status/MaritalStatusRepo.scala index a12b10174e..1d52df440e 100644 --- a/.bleep/generated-sources/typo-tester-zio-jdbc@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/marital_status/MaritalStatusRepo.scala +++ b/.bleep/generated-sources/typo-tester-zio-jdbc@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/marital_status/MaritalStatusRepo.scala @@ -30,4 +30,7 @@ trait MaritalStatusRepo { def selectByIdsTracked(ids: Array[MaritalStatusId]): ZIO[ZConnection, Throwable, Map[MaritalStatusId, MaritalStatusRow]] def update: UpdateBuilder[MaritalStatusFields, MaritalStatusRow] def upsert(unsaved: MaritalStatusRow): ZIO[ZConnection, Throwable, UpdateResult[MaritalStatusRow]] + // Not implementable for zio-jdbc: upsertBatch + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, MaritalStatusRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] } diff --git a/.bleep/generated-sources/typo-tester-zio-jdbc@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/marital_status/MaritalStatusRepoImpl.scala b/.bleep/generated-sources/typo-tester-zio-jdbc@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/marital_status/MaritalStatusRepoImpl.scala index effe6fc4f4..1801585d43 100644 --- a/.bleep/generated-sources/typo-tester-zio-jdbc@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/marital_status/MaritalStatusRepoImpl.scala +++ b/.bleep/generated-sources/typo-tester-zio-jdbc@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/marital_status/MaritalStatusRepoImpl.scala @@ -78,6 +78,19 @@ class MaritalStatusRepoImpl extends MaritalStatusRepo { ${Segment.paramSegment(unsaved.id)(MaritalStatusId.setter)}::int8 ) on conflict ("id") + do nothing returning "id"""".insertReturning(using MaritalStatusRow.jdbcDecoder) } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, MaritalStatusRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + val created = sql"create temporary table marital_status_TEMP (like myschema.marital_status) on commit drop".execute + val copied = streamingInsert(s"""copy marital_status_TEMP("id") from stdin""", batchSize, unsaved)(MaritalStatusRow.text) + val merged = sql"""insert into myschema.marital_status("id") + select * from marital_status_TEMP + on conflict ("id") + do nothing + ; + drop table marital_status_TEMP;""".update + created *> copied *> merged + } } diff --git a/.bleep/generated-sources/typo-tester-zio-jdbc@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/marital_status/MaritalStatusRepoMock.scala b/.bleep/generated-sources/typo-tester-zio-jdbc@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/marital_status/MaritalStatusRepoMock.scala index c87c7a6b21..c473f43f07 100644 --- a/.bleep/generated-sources/typo-tester-zio-jdbc@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/marital_status/MaritalStatusRepoMock.scala +++ b/.bleep/generated-sources/typo-tester-zio-jdbc@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/marital_status/MaritalStatusRepoMock.scala @@ -87,4 +87,13 @@ class MaritalStatusRepoMock(map: scala.collection.mutable.Map[MaritalStatusId, M UpdateResult(1, Chunk.single(unsaved)) } } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, MaritalStatusRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + unsaved.scanZIO(0L) { case (acc, row) => + ZIO.succeed { + map += (row.id -> row) + acc + 1 + } + }.runLast.map(_.getOrElse(0L)) + } } diff --git a/.bleep/generated-sources/typo-tester-zio-jdbc@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/person/PersonRepo.scala b/.bleep/generated-sources/typo-tester-zio-jdbc@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/person/PersonRepo.scala index 7eb24cee51..db8b93dac1 100644 --- a/.bleep/generated-sources/typo-tester-zio-jdbc@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/person/PersonRepo.scala +++ b/.bleep/generated-sources/typo-tester-zio-jdbc@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/person/PersonRepo.scala @@ -35,4 +35,7 @@ trait PersonRepo { def update(row: PersonRow): ZIO[ZConnection, Throwable, Boolean] def updateFieldValues(id: PersonId, fieldValues: List[PersonFieldValue[?]]): ZIO[ZConnection, Throwable, Boolean] def upsert(unsaved: PersonRow): ZIO[ZConnection, Throwable, UpdateResult[PersonRow]] + // Not implementable for zio-jdbc: upsertBatch + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, PersonRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] } diff --git a/.bleep/generated-sources/typo-tester-zio-jdbc@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/person/PersonRepoImpl.scala b/.bleep/generated-sources/typo-tester-zio-jdbc@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/person/PersonRepoImpl.scala index 47ce473aa9..ede74147b9 100644 --- a/.bleep/generated-sources/typo-tester-zio-jdbc@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/person/PersonRepoImpl.scala +++ b/.bleep/generated-sources/typo-tester-zio-jdbc@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/person/PersonRepoImpl.scala @@ -200,4 +200,27 @@ class PersonRepoImpl extends PersonRepo { "favorite_number" = EXCLUDED."favorite_number" returning "id", "favourite_football_club_id", "name", "nick_name", "blog_url", "email", "phone", "likes_pizza", "marital_status_id", "work_email", "sector", "favorite_number"""".insertReturning(using PersonRow.jdbcDecoder) } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, PersonRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + val created = sql"create temporary table person_TEMP (like myschema.person) on commit drop".execute + val copied = streamingInsert(s"""copy person_TEMP("id", "favourite_football_club_id", "name", "nick_name", "blog_url", "email", "phone", "likes_pizza", "marital_status_id", "work_email", "sector", "favorite_number") from stdin""", batchSize, unsaved)(PersonRow.text) + val merged = sql"""insert into myschema.person("id", "favourite_football_club_id", "name", "nick_name", "blog_url", "email", "phone", "likes_pizza", "marital_status_id", "work_email", "sector", "favorite_number") + select * from person_TEMP + on conflict ("id") + do update set + "favourite_football_club_id" = EXCLUDED."favourite_football_club_id", + "name" = EXCLUDED."name", + "nick_name" = EXCLUDED."nick_name", + "blog_url" = EXCLUDED."blog_url", + "email" = EXCLUDED."email", + "phone" = EXCLUDED."phone", + "likes_pizza" = EXCLUDED."likes_pizza", + "marital_status_id" = EXCLUDED."marital_status_id", + "work_email" = EXCLUDED."work_email", + "sector" = EXCLUDED."sector", + "favorite_number" = EXCLUDED."favorite_number" + ; + drop table person_TEMP;""".update + created *> copied *> merged + } } diff --git a/.bleep/generated-sources/typo-tester-zio-jdbc@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/person/PersonRepoMock.scala b/.bleep/generated-sources/typo-tester-zio-jdbc@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/person/PersonRepoMock.scala index 880b46fefa..cdd0e78296 100644 --- a/.bleep/generated-sources/typo-tester-zio-jdbc@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/person/PersonRepoMock.scala +++ b/.bleep/generated-sources/typo-tester-zio-jdbc@jvm213/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/person/PersonRepoMock.scala @@ -150,4 +150,13 @@ class PersonRepoMock(toRow: Function1[PersonRowUnsaved, PersonRow], UpdateResult(1, Chunk.single(unsaved)) } } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, PersonRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + unsaved.scanZIO(0L) { case (acc, row) => + ZIO.succeed { + map += (row.id -> row) + acc + 1 + } + }.runLast.map(_.getOrElse(0L)) + } } diff --git a/.bleep/generated-sources/typo-tester-zio-jdbc@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/compositepk/person/PersonRepo.scala b/.bleep/generated-sources/typo-tester-zio-jdbc@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/compositepk/person/PersonRepo.scala index 2a17943bfb..d47e1b1541 100644 --- a/.bleep/generated-sources/typo-tester-zio-jdbc@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/compositepk/person/PersonRepo.scala +++ b/.bleep/generated-sources/typo-tester-zio-jdbc@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/compositepk/person/PersonRepo.scala @@ -32,4 +32,7 @@ trait PersonRepo { def update(row: PersonRow): ZIO[ZConnection, Throwable, Boolean] def updateFieldValues(compositeId: PersonId, fieldValues: List[PersonFieldValue[?]]): ZIO[ZConnection, Throwable, Boolean] def upsert(unsaved: PersonRow): ZIO[ZConnection, Throwable, UpdateResult[PersonRow]] + // Not implementable for zio-jdbc: upsertBatch + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, PersonRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] } diff --git a/.bleep/generated-sources/typo-tester-zio-jdbc@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/compositepk/person/PersonRepoImpl.scala b/.bleep/generated-sources/typo-tester-zio-jdbc@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/compositepk/person/PersonRepoImpl.scala index 99e80552b5..d91f13f407 100644 --- a/.bleep/generated-sources/typo-tester-zio-jdbc@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/compositepk/person/PersonRepoImpl.scala +++ b/.bleep/generated-sources/typo-tester-zio-jdbc@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/compositepk/person/PersonRepoImpl.scala @@ -123,4 +123,17 @@ class PersonRepoImpl extends PersonRepo { "name" = EXCLUDED."name" returning "one", "two", "name"""".insertReturning(using PersonRow.jdbcDecoder) } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, PersonRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + val created = sql"create temporary table person_TEMP (like compositepk.person) on commit drop".execute + val copied = streamingInsert(s"""copy person_TEMP("one", "two", "name") from stdin""", batchSize, unsaved)(PersonRow.text) + val merged = sql"""insert into compositepk.person("one", "two", "name") + select * from person_TEMP + on conflict ("one", "two") + do update set + "name" = EXCLUDED."name" + ; + drop table person_TEMP;""".update + created *> copied *> merged + } } diff --git a/.bleep/generated-sources/typo-tester-zio-jdbc@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/compositepk/person/PersonRepoMock.scala b/.bleep/generated-sources/typo-tester-zio-jdbc@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/compositepk/person/PersonRepoMock.scala index 4f70aa2069..67b5eb0645 100644 --- a/.bleep/generated-sources/typo-tester-zio-jdbc@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/compositepk/person/PersonRepoMock.scala +++ b/.bleep/generated-sources/typo-tester-zio-jdbc@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/compositepk/person/PersonRepoMock.scala @@ -119,4 +119,13 @@ class PersonRepoMock(toRow: Function1[PersonRowUnsaved, PersonRow], UpdateResult(1, Chunk.single(unsaved)) } } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, PersonRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + unsaved.scanZIO(0L) { case (acc, row) => + ZIO.succeed { + map += (row.compositeId -> row) + acc + 1 + } + }.runLast.map(_.getOrElse(0L)) + } } diff --git a/.bleep/generated-sources/typo-tester-zio-jdbc@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/football_club/FootballClubRepo.scala b/.bleep/generated-sources/typo-tester-zio-jdbc@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/football_club/FootballClubRepo.scala index 652845f0c7..2f956fc30a 100644 --- a/.bleep/generated-sources/typo-tester-zio-jdbc@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/football_club/FootballClubRepo.scala +++ b/.bleep/generated-sources/typo-tester-zio-jdbc@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/football_club/FootballClubRepo.scala @@ -32,4 +32,7 @@ trait FootballClubRepo { def update(row: FootballClubRow): ZIO[ZConnection, Throwable, Boolean] def updateFieldValues(id: FootballClubId, fieldValues: List[FootballClubFieldValue[?]]): ZIO[ZConnection, Throwable, Boolean] def upsert(unsaved: FootballClubRow): ZIO[ZConnection, Throwable, UpdateResult[FootballClubRow]] + // Not implementable for zio-jdbc: upsertBatch + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, FootballClubRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] } diff --git a/.bleep/generated-sources/typo-tester-zio-jdbc@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/football_club/FootballClubRepoImpl.scala b/.bleep/generated-sources/typo-tester-zio-jdbc@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/football_club/FootballClubRepoImpl.scala index 3a7a40b828..eddf925ac2 100644 --- a/.bleep/generated-sources/typo-tester-zio-jdbc@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/football_club/FootballClubRepoImpl.scala +++ b/.bleep/generated-sources/typo-tester-zio-jdbc@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/football_club/FootballClubRepoImpl.scala @@ -103,4 +103,17 @@ class FootballClubRepoImpl extends FootballClubRepo { "name" = EXCLUDED."name" returning "id", "name"""".insertReturning(using FootballClubRow.jdbcDecoder) } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, FootballClubRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + val created = sql"create temporary table football_club_TEMP (like myschema.football_club) on commit drop".execute + val copied = streamingInsert(s"""copy football_club_TEMP("id", "name") from stdin""", batchSize, unsaved)(FootballClubRow.text) + val merged = sql"""insert into myschema.football_club("id", "name") + select * from football_club_TEMP + on conflict ("id") + do update set + "name" = EXCLUDED."name" + ; + drop table football_club_TEMP;""".update + created *> copied *> merged + } } diff --git a/.bleep/generated-sources/typo-tester-zio-jdbc@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/football_club/FootballClubRepoMock.scala b/.bleep/generated-sources/typo-tester-zio-jdbc@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/football_club/FootballClubRepoMock.scala index 90089894b2..6e1ceff710 100644 --- a/.bleep/generated-sources/typo-tester-zio-jdbc@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/football_club/FootballClubRepoMock.scala +++ b/.bleep/generated-sources/typo-tester-zio-jdbc@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/football_club/FootballClubRepoMock.scala @@ -116,4 +116,13 @@ class FootballClubRepoMock(map: scala.collection.mutable.Map[FootballClubId, Foo UpdateResult(1, Chunk.single(unsaved)) } } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, FootballClubRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + unsaved.scanZIO(0L) { case (acc, row) => + ZIO.succeed { + map += (row.id -> row) + acc + 1 + } + }.runLast.map(_.getOrElse(0L)) + } } diff --git a/.bleep/generated-sources/typo-tester-zio-jdbc@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/marital_status/MaritalStatusRepo.scala b/.bleep/generated-sources/typo-tester-zio-jdbc@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/marital_status/MaritalStatusRepo.scala index a12b10174e..1d52df440e 100644 --- a/.bleep/generated-sources/typo-tester-zio-jdbc@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/marital_status/MaritalStatusRepo.scala +++ b/.bleep/generated-sources/typo-tester-zio-jdbc@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/marital_status/MaritalStatusRepo.scala @@ -30,4 +30,7 @@ trait MaritalStatusRepo { def selectByIdsTracked(ids: Array[MaritalStatusId]): ZIO[ZConnection, Throwable, Map[MaritalStatusId, MaritalStatusRow]] def update: UpdateBuilder[MaritalStatusFields, MaritalStatusRow] def upsert(unsaved: MaritalStatusRow): ZIO[ZConnection, Throwable, UpdateResult[MaritalStatusRow]] + // Not implementable for zio-jdbc: upsertBatch + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, MaritalStatusRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] } diff --git a/.bleep/generated-sources/typo-tester-zio-jdbc@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/marital_status/MaritalStatusRepoImpl.scala b/.bleep/generated-sources/typo-tester-zio-jdbc@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/marital_status/MaritalStatusRepoImpl.scala index effe6fc4f4..1801585d43 100644 --- a/.bleep/generated-sources/typo-tester-zio-jdbc@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/marital_status/MaritalStatusRepoImpl.scala +++ b/.bleep/generated-sources/typo-tester-zio-jdbc@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/marital_status/MaritalStatusRepoImpl.scala @@ -78,6 +78,19 @@ class MaritalStatusRepoImpl extends MaritalStatusRepo { ${Segment.paramSegment(unsaved.id)(MaritalStatusId.setter)}::int8 ) on conflict ("id") + do nothing returning "id"""".insertReturning(using MaritalStatusRow.jdbcDecoder) } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, MaritalStatusRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + val created = sql"create temporary table marital_status_TEMP (like myschema.marital_status) on commit drop".execute + val copied = streamingInsert(s"""copy marital_status_TEMP("id") from stdin""", batchSize, unsaved)(MaritalStatusRow.text) + val merged = sql"""insert into myschema.marital_status("id") + select * from marital_status_TEMP + on conflict ("id") + do nothing + ; + drop table marital_status_TEMP;""".update + created *> copied *> merged + } } diff --git a/.bleep/generated-sources/typo-tester-zio-jdbc@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/marital_status/MaritalStatusRepoMock.scala b/.bleep/generated-sources/typo-tester-zio-jdbc@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/marital_status/MaritalStatusRepoMock.scala index c87c7a6b21..c473f43f07 100644 --- a/.bleep/generated-sources/typo-tester-zio-jdbc@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/marital_status/MaritalStatusRepoMock.scala +++ b/.bleep/generated-sources/typo-tester-zio-jdbc@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/marital_status/MaritalStatusRepoMock.scala @@ -87,4 +87,13 @@ class MaritalStatusRepoMock(map: scala.collection.mutable.Map[MaritalStatusId, M UpdateResult(1, Chunk.single(unsaved)) } } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, MaritalStatusRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + unsaved.scanZIO(0L) { case (acc, row) => + ZIO.succeed { + map += (row.id -> row) + acc + 1 + } + }.runLast.map(_.getOrElse(0L)) + } } diff --git a/.bleep/generated-sources/typo-tester-zio-jdbc@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/person/PersonRepo.scala b/.bleep/generated-sources/typo-tester-zio-jdbc@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/person/PersonRepo.scala index 7eb24cee51..db8b93dac1 100644 --- a/.bleep/generated-sources/typo-tester-zio-jdbc@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/person/PersonRepo.scala +++ b/.bleep/generated-sources/typo-tester-zio-jdbc@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/person/PersonRepo.scala @@ -35,4 +35,7 @@ trait PersonRepo { def update(row: PersonRow): ZIO[ZConnection, Throwable, Boolean] def updateFieldValues(id: PersonId, fieldValues: List[PersonFieldValue[?]]): ZIO[ZConnection, Throwable, Boolean] def upsert(unsaved: PersonRow): ZIO[ZConnection, Throwable, UpdateResult[PersonRow]] + // Not implementable for zio-jdbc: upsertBatch + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, PersonRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] } diff --git a/.bleep/generated-sources/typo-tester-zio-jdbc@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/person/PersonRepoImpl.scala b/.bleep/generated-sources/typo-tester-zio-jdbc@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/person/PersonRepoImpl.scala index 47ce473aa9..ede74147b9 100644 --- a/.bleep/generated-sources/typo-tester-zio-jdbc@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/person/PersonRepoImpl.scala +++ b/.bleep/generated-sources/typo-tester-zio-jdbc@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/person/PersonRepoImpl.scala @@ -200,4 +200,27 @@ class PersonRepoImpl extends PersonRepo { "favorite_number" = EXCLUDED."favorite_number" returning "id", "favourite_football_club_id", "name", "nick_name", "blog_url", "email", "phone", "likes_pizza", "marital_status_id", "work_email", "sector", "favorite_number"""".insertReturning(using PersonRow.jdbcDecoder) } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, PersonRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + val created = sql"create temporary table person_TEMP (like myschema.person) on commit drop".execute + val copied = streamingInsert(s"""copy person_TEMP("id", "favourite_football_club_id", "name", "nick_name", "blog_url", "email", "phone", "likes_pizza", "marital_status_id", "work_email", "sector", "favorite_number") from stdin""", batchSize, unsaved)(PersonRow.text) + val merged = sql"""insert into myschema.person("id", "favourite_football_club_id", "name", "nick_name", "blog_url", "email", "phone", "likes_pizza", "marital_status_id", "work_email", "sector", "favorite_number") + select * from person_TEMP + on conflict ("id") + do update set + "favourite_football_club_id" = EXCLUDED."favourite_football_club_id", + "name" = EXCLUDED."name", + "nick_name" = EXCLUDED."nick_name", + "blog_url" = EXCLUDED."blog_url", + "email" = EXCLUDED."email", + "phone" = EXCLUDED."phone", + "likes_pizza" = EXCLUDED."likes_pizza", + "marital_status_id" = EXCLUDED."marital_status_id", + "work_email" = EXCLUDED."work_email", + "sector" = EXCLUDED."sector", + "favorite_number" = EXCLUDED."favorite_number" + ; + drop table person_TEMP;""".update + created *> copied *> merged + } } diff --git a/.bleep/generated-sources/typo-tester-zio-jdbc@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/person/PersonRepoMock.scala b/.bleep/generated-sources/typo-tester-zio-jdbc@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/person/PersonRepoMock.scala index 880b46fefa..cdd0e78296 100644 --- a/.bleep/generated-sources/typo-tester-zio-jdbc@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/person/PersonRepoMock.scala +++ b/.bleep/generated-sources/typo-tester-zio-jdbc@jvm3/scripts.GenHardcodedFiles/testdb/hardcoded/myschema/person/PersonRepoMock.scala @@ -150,4 +150,13 @@ class PersonRepoMock(toRow: Function1[PersonRowUnsaved, PersonRow], UpdateResult(1, Chunk.single(unsaved)) } } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, PersonRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + unsaved.scanZIO(0L) { case (acc, row) => + ZIO.succeed { + map += (row.id -> row) + acc + 1 + } + }.runLast.map(_.getOrElse(0L)) + } } diff --git a/site-in/other-features/testing-with-stubs.md b/site-in/other-features/testing-with-stubs.md index cd97ab5642..86f6df69df 100644 --- a/site-in/other-features/testing-with-stubs.md +++ b/site-in/other-features/testing-with-stubs.md @@ -107,6 +107,18 @@ class AddressRepoMock(toRow: Function1[AddressRowUnsaved, AddressRow], map.put(unsaved.addressid, unsaved): @nowarn unsaved } + override def upsertBatch(unsaved: Iterable[AddressRow])(implicit c: Connection): List[AddressRow] = { + unsaved.map { row => + map += (row.addressid -> row) + row + }.toList + } + override def upsertStreaming(unsaved: Iterator[AddressRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + unsaved.foreach { row => + map += (row.addressid -> row) + } + unsaved.size + } } ``` diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/humanresources/department/DepartmentRepo.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/humanresources/department/DepartmentRepo.scala index 54d667ecb9..e55859678f 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/humanresources/department/DepartmentRepo.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/humanresources/department/DepartmentRepo.scala @@ -29,4 +29,7 @@ trait DepartmentRepo { def update: UpdateBuilder[DepartmentFields, DepartmentRow] def update(row: DepartmentRow)(implicit c: Connection): Boolean def upsert(unsaved: DepartmentRow)(implicit c: Connection): DepartmentRow + def upsertBatch(unsaved: Iterable[DepartmentRow])(implicit c: Connection): List[DepartmentRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Iterator[DepartmentRow], batchSize: Int = 10000)(implicit c: Connection): Int } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/humanresources/department/DepartmentRepoImpl.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/humanresources/department/DepartmentRepoImpl.scala index 38574bf288..269f9f2fff 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/humanresources/department/DepartmentRepoImpl.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/humanresources/department/DepartmentRepoImpl.scala @@ -10,6 +10,7 @@ package department import adventureworks.customtypes.Defaulted import adventureworks.customtypes.TypoLocalDateTime import adventureworks.public.Name +import anorm.BatchSql import anorm.NamedParameter import anorm.ParameterValue import anorm.RowParser @@ -17,6 +18,7 @@ import anorm.SQL import anorm.SimpleSql import anorm.SqlStringInterpolation import java.sql.Connection +import scala.annotation.nowarn import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder import typo.dsl.SelectBuilderSql @@ -135,4 +137,45 @@ class DepartmentRepoImpl extends DepartmentRepo { .executeInsert(DepartmentRow.rowParser(1).single) } + override def upsertBatch(unsaved: Iterable[DepartmentRow])(implicit c: Connection): List[DepartmentRow] = { + def toNamedParameter(row: DepartmentRow): List[NamedParameter] = List( + NamedParameter("departmentid", ParameterValue(row.departmentid, null, DepartmentId.toStatement)), + NamedParameter("name", ParameterValue(row.name, null, Name.toStatement)), + NamedParameter("groupname", ParameterValue(row.groupname, null, Name.toStatement)), + NamedParameter("modifieddate", ParameterValue(row.modifieddate, null, TypoLocalDateTime.toStatement)) + ) + unsaved.toList match { + case Nil => Nil + case head :: rest => + new anorm.adventureworks.ExecuteReturningSyntax.Ops( + BatchSql( + s"""insert into humanresources.department("departmentid", "name", "groupname", "modifieddate") + values ({departmentid}::int4, {name}::varchar, {groupname}::varchar, {modifieddate}::timestamp) + on conflict ("departmentid") + do update set + "name" = EXCLUDED."name", + "groupname" = EXCLUDED."groupname", + "modifieddate" = EXCLUDED."modifieddate" + returning "departmentid", "name", "groupname", "modifieddate"::text + """, + toNamedParameter(head), + rest.map(toNamedParameter)* + ) + ).executeReturning(DepartmentRow.rowParser(1).*) + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[DepartmentRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + SQL"create temporary table department_TEMP (like humanresources.department) on commit drop".execute(): @nowarn + streamingInsert(s"""copy department_TEMP("departmentid", "name", "groupname", "modifieddate") from stdin""", batchSize, unsaved)(DepartmentRow.text, c): @nowarn + SQL"""insert into humanresources.department("departmentid", "name", "groupname", "modifieddate") + select * from department_TEMP + on conflict ("departmentid") + do update set + "name" = EXCLUDED."name", + "groupname" = EXCLUDED."groupname", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table department_TEMP;""".executeUpdate() + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/humanresources/department/DepartmentRepoMock.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/humanresources/department/DepartmentRepoMock.scala index f65fb7948d..d663ba0a1b 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/humanresources/department/DepartmentRepoMock.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/humanresources/department/DepartmentRepoMock.scala @@ -87,4 +87,17 @@ class DepartmentRepoMock(toRow: Function1[DepartmentRowUnsaved, DepartmentRow], map.put(unsaved.departmentid, unsaved): @nowarn unsaved } + override def upsertBatch(unsaved: Iterable[DepartmentRow])(implicit c: Connection): List[DepartmentRow] = { + unsaved.map { row => + map += (row.departmentid -> row) + row + }.toList + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[DepartmentRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + unsaved.foreach { row => + map += (row.departmentid -> row) + } + unsaved.size + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/humanresources/employee/EmployeeRepo.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/humanresources/employee/EmployeeRepo.scala index 70387571de..e9ff53ff60 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/humanresources/employee/EmployeeRepo.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/humanresources/employee/EmployeeRepo.scala @@ -30,4 +30,7 @@ trait EmployeeRepo { def update: UpdateBuilder[EmployeeFields, EmployeeRow] def update(row: EmployeeRow)(implicit c: Connection): Boolean def upsert(unsaved: EmployeeRow)(implicit c: Connection): EmployeeRow + def upsertBatch(unsaved: Iterable[EmployeeRow])(implicit c: Connection): List[EmployeeRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Iterator[EmployeeRow], batchSize: Int = 10000)(implicit c: Connection): Int } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/humanresources/employee/EmployeeRepoImpl.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/humanresources/employee/EmployeeRepoImpl.scala index 50dcfdc9ad..f18bc40e9e 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/humanresources/employee/EmployeeRepoImpl.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/humanresources/employee/EmployeeRepoImpl.scala @@ -14,6 +14,7 @@ import adventureworks.customtypes.TypoShort import adventureworks.customtypes.TypoUUID import adventureworks.person.businessentity.BusinessentityId import adventureworks.public.Flag +import anorm.BatchSql import anorm.NamedParameter import anorm.ParameterMetaData import anorm.ParameterValue @@ -23,6 +24,7 @@ import anorm.SimpleSql import anorm.SqlStringInterpolation import anorm.ToStatement import java.sql.Connection +import scala.annotation.nowarn import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder import typo.dsl.SelectBuilderSql @@ -200,4 +202,78 @@ class EmployeeRepoImpl extends EmployeeRepo { .executeInsert(EmployeeRow.rowParser(1).single) } + override def upsertBatch(unsaved: Iterable[EmployeeRow])(implicit c: Connection): List[EmployeeRow] = { + def toNamedParameter(row: EmployeeRow): List[NamedParameter] = List( + NamedParameter("businessentityid", ParameterValue(row.businessentityid, null, BusinessentityId.toStatement)), + NamedParameter("nationalidnumber", ParameterValue(row.nationalidnumber, null, ToStatement.stringToStatement)), + NamedParameter("loginid", ParameterValue(row.loginid, null, ToStatement.stringToStatement)), + NamedParameter("jobtitle", ParameterValue(row.jobtitle, null, ToStatement.stringToStatement)), + NamedParameter("birthdate", ParameterValue(row.birthdate, null, TypoLocalDate.toStatement)), + NamedParameter("maritalstatus", ParameterValue(row.maritalstatus, null, ToStatement.stringToStatement)), + NamedParameter("gender", ParameterValue(row.gender, null, ToStatement.stringToStatement)), + NamedParameter("hiredate", ParameterValue(row.hiredate, null, TypoLocalDate.toStatement)), + NamedParameter("salariedflag", ParameterValue(row.salariedflag, null, Flag.toStatement)), + NamedParameter("vacationhours", ParameterValue(row.vacationhours, null, TypoShort.toStatement)), + NamedParameter("sickleavehours", ParameterValue(row.sickleavehours, null, TypoShort.toStatement)), + NamedParameter("currentflag", ParameterValue(row.currentflag, null, Flag.toStatement)), + NamedParameter("rowguid", ParameterValue(row.rowguid, null, TypoUUID.toStatement)), + NamedParameter("modifieddate", ParameterValue(row.modifieddate, null, TypoLocalDateTime.toStatement)), + NamedParameter("organizationnode", ParameterValue(row.organizationnode, null, ToStatement.optionToStatement(ToStatement.stringToStatement, ParameterMetaData.StringParameterMetaData))) + ) + unsaved.toList match { + case Nil => Nil + case head :: rest => + new anorm.adventureworks.ExecuteReturningSyntax.Ops( + BatchSql( + s"""insert into humanresources.employee("businessentityid", "nationalidnumber", "loginid", "jobtitle", "birthdate", "maritalstatus", "gender", "hiredate", "salariedflag", "vacationhours", "sickleavehours", "currentflag", "rowguid", "modifieddate", "organizationnode") + values ({businessentityid}::int4, {nationalidnumber}, {loginid}, {jobtitle}, {birthdate}::date, {maritalstatus}::bpchar, {gender}::bpchar, {hiredate}::date, {salariedflag}::bool, {vacationhours}::int2, {sickleavehours}::int2, {currentflag}::bool, {rowguid}::uuid, {modifieddate}::timestamp, {organizationnode}) + on conflict ("businessentityid") + do update set + "nationalidnumber" = EXCLUDED."nationalidnumber", + "loginid" = EXCLUDED."loginid", + "jobtitle" = EXCLUDED."jobtitle", + "birthdate" = EXCLUDED."birthdate", + "maritalstatus" = EXCLUDED."maritalstatus", + "gender" = EXCLUDED."gender", + "hiredate" = EXCLUDED."hiredate", + "salariedflag" = EXCLUDED."salariedflag", + "vacationhours" = EXCLUDED."vacationhours", + "sickleavehours" = EXCLUDED."sickleavehours", + "currentflag" = EXCLUDED."currentflag", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate", + "organizationnode" = EXCLUDED."organizationnode" + returning "businessentityid", "nationalidnumber", "loginid", "jobtitle", "birthdate"::text, "maritalstatus", "gender", "hiredate"::text, "salariedflag", "vacationhours", "sickleavehours", "currentflag", "rowguid", "modifieddate"::text, "organizationnode" + """, + toNamedParameter(head), + rest.map(toNamedParameter)* + ) + ).executeReturning(EmployeeRow.rowParser(1).*) + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[EmployeeRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + SQL"create temporary table employee_TEMP (like humanresources.employee) on commit drop".execute(): @nowarn + streamingInsert(s"""copy employee_TEMP("businessentityid", "nationalidnumber", "loginid", "jobtitle", "birthdate", "maritalstatus", "gender", "hiredate", "salariedflag", "vacationhours", "sickleavehours", "currentflag", "rowguid", "modifieddate", "organizationnode") from stdin""", batchSize, unsaved)(EmployeeRow.text, c): @nowarn + SQL"""insert into humanresources.employee("businessentityid", "nationalidnumber", "loginid", "jobtitle", "birthdate", "maritalstatus", "gender", "hiredate", "salariedflag", "vacationhours", "sickleavehours", "currentflag", "rowguid", "modifieddate", "organizationnode") + select * from employee_TEMP + on conflict ("businessentityid") + do update set + "nationalidnumber" = EXCLUDED."nationalidnumber", + "loginid" = EXCLUDED."loginid", + "jobtitle" = EXCLUDED."jobtitle", + "birthdate" = EXCLUDED."birthdate", + "maritalstatus" = EXCLUDED."maritalstatus", + "gender" = EXCLUDED."gender", + "hiredate" = EXCLUDED."hiredate", + "salariedflag" = EXCLUDED."salariedflag", + "vacationhours" = EXCLUDED."vacationhours", + "sickleavehours" = EXCLUDED."sickleavehours", + "currentflag" = EXCLUDED."currentflag", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate", + "organizationnode" = EXCLUDED."organizationnode" + ; + drop table employee_TEMP;""".executeUpdate() + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/humanresources/employee/EmployeeRepoMock.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/humanresources/employee/EmployeeRepoMock.scala index 48f677140d..85e6fda6f8 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/humanresources/employee/EmployeeRepoMock.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/humanresources/employee/EmployeeRepoMock.scala @@ -88,4 +88,17 @@ class EmployeeRepoMock(toRow: Function1[EmployeeRowUnsaved, EmployeeRow], map.put(unsaved.businessentityid, unsaved): @nowarn unsaved } + override def upsertBatch(unsaved: Iterable[EmployeeRow])(implicit c: Connection): List[EmployeeRow] = { + unsaved.map { row => + map += (row.businessentityid -> row) + row + }.toList + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[EmployeeRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + unsaved.foreach { row => + map += (row.businessentityid -> row) + } + unsaved.size + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/humanresources/employeedepartmenthistory/EmployeedepartmenthistoryRepo.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/humanresources/employeedepartmenthistory/EmployeedepartmenthistoryRepo.scala index ebe64177b9..28efa91883 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/humanresources/employeedepartmenthistory/EmployeedepartmenthistoryRepo.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/humanresources/employeedepartmenthistory/EmployeedepartmenthistoryRepo.scala @@ -29,4 +29,7 @@ trait EmployeedepartmenthistoryRepo { def update: UpdateBuilder[EmployeedepartmenthistoryFields, EmployeedepartmenthistoryRow] def update(row: EmployeedepartmenthistoryRow)(implicit c: Connection): Boolean def upsert(unsaved: EmployeedepartmenthistoryRow)(implicit c: Connection): EmployeedepartmenthistoryRow + def upsertBatch(unsaved: Iterable[EmployeedepartmenthistoryRow])(implicit c: Connection): List[EmployeedepartmenthistoryRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Iterator[EmployeedepartmenthistoryRow], batchSize: Int = 10000)(implicit c: Connection): Int } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/humanresources/employeedepartmenthistory/EmployeedepartmenthistoryRepoImpl.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/humanresources/employeedepartmenthistory/EmployeedepartmenthistoryRepoImpl.scala index 1a5e87af5a..8d769c1d24 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/humanresources/employeedepartmenthistory/EmployeedepartmenthistoryRepoImpl.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/humanresources/employeedepartmenthistory/EmployeedepartmenthistoryRepoImpl.scala @@ -13,6 +13,7 @@ import adventureworks.customtypes.TypoLocalDateTime import adventureworks.humanresources.department.DepartmentId import adventureworks.humanresources.shift.ShiftId import adventureworks.person.businessentity.BusinessentityId +import anorm.BatchSql import anorm.NamedParameter import anorm.ParameterValue import anorm.RowParser @@ -21,6 +22,7 @@ import anorm.SimpleSql import anorm.SqlStringInterpolation import anorm.ToStatement import java.sql.Connection +import scala.annotation.nowarn import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder import typo.dsl.SelectBuilderSql @@ -148,4 +150,45 @@ class EmployeedepartmenthistoryRepoImpl extends EmployeedepartmenthistoryRepo { .executeInsert(EmployeedepartmenthistoryRow.rowParser(1).single) } + override def upsertBatch(unsaved: Iterable[EmployeedepartmenthistoryRow])(implicit c: Connection): List[EmployeedepartmenthistoryRow] = { + def toNamedParameter(row: EmployeedepartmenthistoryRow): List[NamedParameter] = List( + NamedParameter("businessentityid", ParameterValue(row.businessentityid, null, BusinessentityId.toStatement)), + NamedParameter("departmentid", ParameterValue(row.departmentid, null, DepartmentId.toStatement)), + NamedParameter("shiftid", ParameterValue(row.shiftid, null, ShiftId.toStatement)), + NamedParameter("startdate", ParameterValue(row.startdate, null, TypoLocalDate.toStatement)), + NamedParameter("enddate", ParameterValue(row.enddate, null, ToStatement.optionToStatement(TypoLocalDate.toStatement, TypoLocalDate.parameterMetadata))), + NamedParameter("modifieddate", ParameterValue(row.modifieddate, null, TypoLocalDateTime.toStatement)) + ) + unsaved.toList match { + case Nil => Nil + case head :: rest => + new anorm.adventureworks.ExecuteReturningSyntax.Ops( + BatchSql( + s"""insert into humanresources.employeedepartmenthistory("businessentityid", "departmentid", "shiftid", "startdate", "enddate", "modifieddate") + values ({businessentityid}::int4, {departmentid}::int2, {shiftid}::int2, {startdate}::date, {enddate}::date, {modifieddate}::timestamp) + on conflict ("businessentityid", "startdate", "departmentid", "shiftid") + do update set + "enddate" = EXCLUDED."enddate", + "modifieddate" = EXCLUDED."modifieddate" + returning "businessentityid", "departmentid", "shiftid", "startdate"::text, "enddate"::text, "modifieddate"::text + """, + toNamedParameter(head), + rest.map(toNamedParameter)* + ) + ).executeReturning(EmployeedepartmenthistoryRow.rowParser(1).*) + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[EmployeedepartmenthistoryRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + SQL"create temporary table employeedepartmenthistory_TEMP (like humanresources.employeedepartmenthistory) on commit drop".execute(): @nowarn + streamingInsert(s"""copy employeedepartmenthistory_TEMP("businessentityid", "departmentid", "shiftid", "startdate", "enddate", "modifieddate") from stdin""", batchSize, unsaved)(EmployeedepartmenthistoryRow.text, c): @nowarn + SQL"""insert into humanresources.employeedepartmenthistory("businessentityid", "departmentid", "shiftid", "startdate", "enddate", "modifieddate") + select * from employeedepartmenthistory_TEMP + on conflict ("businessentityid", "startdate", "departmentid", "shiftid") + do update set + "enddate" = EXCLUDED."enddate", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table employeedepartmenthistory_TEMP;""".executeUpdate() + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/humanresources/employeedepartmenthistory/EmployeedepartmenthistoryRepoMock.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/humanresources/employeedepartmenthistory/EmployeedepartmenthistoryRepoMock.scala index ed2c820247..18aecce701 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/humanresources/employeedepartmenthistory/EmployeedepartmenthistoryRepoMock.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/humanresources/employeedepartmenthistory/EmployeedepartmenthistoryRepoMock.scala @@ -87,4 +87,17 @@ class EmployeedepartmenthistoryRepoMock(toRow: Function1[Employeedepartmenthisto map.put(unsaved.compositeId, unsaved): @nowarn unsaved } + override def upsertBatch(unsaved: Iterable[EmployeedepartmenthistoryRow])(implicit c: Connection): List[EmployeedepartmenthistoryRow] = { + unsaved.map { row => + map += (row.compositeId -> row) + row + }.toList + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[EmployeedepartmenthistoryRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + unsaved.foreach { row => + map += (row.compositeId -> row) + } + unsaved.size + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/humanresources/employeepayhistory/EmployeepayhistoryRepo.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/humanresources/employeepayhistory/EmployeepayhistoryRepo.scala index d3e9a2dc48..3c4d2d46a7 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/humanresources/employeepayhistory/EmployeepayhistoryRepo.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/humanresources/employeepayhistory/EmployeepayhistoryRepo.scala @@ -29,4 +29,7 @@ trait EmployeepayhistoryRepo { def update: UpdateBuilder[EmployeepayhistoryFields, EmployeepayhistoryRow] def update(row: EmployeepayhistoryRow)(implicit c: Connection): Boolean def upsert(unsaved: EmployeepayhistoryRow)(implicit c: Connection): EmployeepayhistoryRow + def upsertBatch(unsaved: Iterable[EmployeepayhistoryRow])(implicit c: Connection): List[EmployeepayhistoryRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Iterator[EmployeepayhistoryRow], batchSize: Int = 10000)(implicit c: Connection): Int } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/humanresources/employeepayhistory/EmployeepayhistoryRepoImpl.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/humanresources/employeepayhistory/EmployeepayhistoryRepoImpl.scala index fddf76313a..95f5644423 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/humanresources/employeepayhistory/EmployeepayhistoryRepoImpl.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/humanresources/employeepayhistory/EmployeepayhistoryRepoImpl.scala @@ -11,6 +11,7 @@ import adventureworks.customtypes.Defaulted import adventureworks.customtypes.TypoLocalDateTime import adventureworks.customtypes.TypoShort import adventureworks.person.businessentity.BusinessentityId +import anorm.BatchSql import anorm.NamedParameter import anorm.ParameterValue import anorm.RowParser @@ -19,6 +20,7 @@ import anorm.SimpleSql import anorm.SqlStringInterpolation import anorm.ToStatement import java.sql.Connection +import scala.annotation.nowarn import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder import typo.dsl.SelectBuilderSql @@ -142,4 +144,46 @@ class EmployeepayhistoryRepoImpl extends EmployeepayhistoryRepo { .executeInsert(EmployeepayhistoryRow.rowParser(1).single) } + override def upsertBatch(unsaved: Iterable[EmployeepayhistoryRow])(implicit c: Connection): List[EmployeepayhistoryRow] = { + def toNamedParameter(row: EmployeepayhistoryRow): List[NamedParameter] = List( + NamedParameter("businessentityid", ParameterValue(row.businessentityid, null, BusinessentityId.toStatement)), + NamedParameter("ratechangedate", ParameterValue(row.ratechangedate, null, TypoLocalDateTime.toStatement)), + NamedParameter("rate", ParameterValue(row.rate, null, ToStatement.scalaBigDecimalToStatement)), + NamedParameter("payfrequency", ParameterValue(row.payfrequency, null, TypoShort.toStatement)), + NamedParameter("modifieddate", ParameterValue(row.modifieddate, null, TypoLocalDateTime.toStatement)) + ) + unsaved.toList match { + case Nil => Nil + case head :: rest => + new anorm.adventureworks.ExecuteReturningSyntax.Ops( + BatchSql( + s"""insert into humanresources.employeepayhistory("businessentityid", "ratechangedate", "rate", "payfrequency", "modifieddate") + values ({businessentityid}::int4, {ratechangedate}::timestamp, {rate}::numeric, {payfrequency}::int2, {modifieddate}::timestamp) + on conflict ("businessentityid", "ratechangedate") + do update set + "rate" = EXCLUDED."rate", + "payfrequency" = EXCLUDED."payfrequency", + "modifieddate" = EXCLUDED."modifieddate" + returning "businessentityid", "ratechangedate"::text, "rate", "payfrequency", "modifieddate"::text + """, + toNamedParameter(head), + rest.map(toNamedParameter)* + ) + ).executeReturning(EmployeepayhistoryRow.rowParser(1).*) + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[EmployeepayhistoryRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + SQL"create temporary table employeepayhistory_TEMP (like humanresources.employeepayhistory) on commit drop".execute(): @nowarn + streamingInsert(s"""copy employeepayhistory_TEMP("businessentityid", "ratechangedate", "rate", "payfrequency", "modifieddate") from stdin""", batchSize, unsaved)(EmployeepayhistoryRow.text, c): @nowarn + SQL"""insert into humanresources.employeepayhistory("businessentityid", "ratechangedate", "rate", "payfrequency", "modifieddate") + select * from employeepayhistory_TEMP + on conflict ("businessentityid", "ratechangedate") + do update set + "rate" = EXCLUDED."rate", + "payfrequency" = EXCLUDED."payfrequency", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table employeepayhistory_TEMP;""".executeUpdate() + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/humanresources/employeepayhistory/EmployeepayhistoryRepoMock.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/humanresources/employeepayhistory/EmployeepayhistoryRepoMock.scala index 8aa642fb2b..82a22249db 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/humanresources/employeepayhistory/EmployeepayhistoryRepoMock.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/humanresources/employeepayhistory/EmployeepayhistoryRepoMock.scala @@ -87,4 +87,17 @@ class EmployeepayhistoryRepoMock(toRow: Function1[EmployeepayhistoryRowUnsaved, map.put(unsaved.compositeId, unsaved): @nowarn unsaved } + override def upsertBatch(unsaved: Iterable[EmployeepayhistoryRow])(implicit c: Connection): List[EmployeepayhistoryRow] = { + unsaved.map { row => + map += (row.compositeId -> row) + row + }.toList + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[EmployeepayhistoryRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + unsaved.foreach { row => + map += (row.compositeId -> row) + } + unsaved.size + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/humanresources/jobcandidate/JobcandidateRepo.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/humanresources/jobcandidate/JobcandidateRepo.scala index bdb9369575..5df163c7ae 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/humanresources/jobcandidate/JobcandidateRepo.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/humanresources/jobcandidate/JobcandidateRepo.scala @@ -29,4 +29,7 @@ trait JobcandidateRepo { def update: UpdateBuilder[JobcandidateFields, JobcandidateRow] def update(row: JobcandidateRow)(implicit c: Connection): Boolean def upsert(unsaved: JobcandidateRow)(implicit c: Connection): JobcandidateRow + def upsertBatch(unsaved: Iterable[JobcandidateRow])(implicit c: Connection): List[JobcandidateRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Iterator[JobcandidateRow], batchSize: Int = 10000)(implicit c: Connection): Int } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/humanresources/jobcandidate/JobcandidateRepoImpl.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/humanresources/jobcandidate/JobcandidateRepoImpl.scala index 0b8b561970..5b56a7a659 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/humanresources/jobcandidate/JobcandidateRepoImpl.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/humanresources/jobcandidate/JobcandidateRepoImpl.scala @@ -11,6 +11,7 @@ import adventureworks.customtypes.Defaulted import adventureworks.customtypes.TypoLocalDateTime import adventureworks.customtypes.TypoXml import adventureworks.person.businessentity.BusinessentityId +import anorm.BatchSql import anorm.NamedParameter import anorm.ParameterValue import anorm.RowParser @@ -19,6 +20,7 @@ import anorm.SimpleSql import anorm.SqlStringInterpolation import anorm.ToStatement import java.sql.Connection +import scala.annotation.nowarn import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder import typo.dsl.SelectBuilderSql @@ -137,4 +139,45 @@ class JobcandidateRepoImpl extends JobcandidateRepo { .executeInsert(JobcandidateRow.rowParser(1).single) } + override def upsertBatch(unsaved: Iterable[JobcandidateRow])(implicit c: Connection): List[JobcandidateRow] = { + def toNamedParameter(row: JobcandidateRow): List[NamedParameter] = List( + NamedParameter("jobcandidateid", ParameterValue(row.jobcandidateid, null, JobcandidateId.toStatement)), + NamedParameter("businessentityid", ParameterValue(row.businessentityid, null, ToStatement.optionToStatement(BusinessentityId.toStatement, BusinessentityId.parameterMetadata))), + NamedParameter("resume", ParameterValue(row.resume, null, ToStatement.optionToStatement(TypoXml.toStatement, TypoXml.parameterMetadata))), + NamedParameter("modifieddate", ParameterValue(row.modifieddate, null, TypoLocalDateTime.toStatement)) + ) + unsaved.toList match { + case Nil => Nil + case head :: rest => + new anorm.adventureworks.ExecuteReturningSyntax.Ops( + BatchSql( + s"""insert into humanresources.jobcandidate("jobcandidateid", "businessentityid", "resume", "modifieddate") + values ({jobcandidateid}::int4, {businessentityid}::int4, {resume}::xml, {modifieddate}::timestamp) + on conflict ("jobcandidateid") + do update set + "businessentityid" = EXCLUDED."businessentityid", + "resume" = EXCLUDED."resume", + "modifieddate" = EXCLUDED."modifieddate" + returning "jobcandidateid", "businessentityid", "resume", "modifieddate"::text + """, + toNamedParameter(head), + rest.map(toNamedParameter)* + ) + ).executeReturning(JobcandidateRow.rowParser(1).*) + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[JobcandidateRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + SQL"create temporary table jobcandidate_TEMP (like humanresources.jobcandidate) on commit drop".execute(): @nowarn + streamingInsert(s"""copy jobcandidate_TEMP("jobcandidateid", "businessentityid", "resume", "modifieddate") from stdin""", batchSize, unsaved)(JobcandidateRow.text, c): @nowarn + SQL"""insert into humanresources.jobcandidate("jobcandidateid", "businessentityid", "resume", "modifieddate") + select * from jobcandidate_TEMP + on conflict ("jobcandidateid") + do update set + "businessentityid" = EXCLUDED."businessentityid", + "resume" = EXCLUDED."resume", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table jobcandidate_TEMP;""".executeUpdate() + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/humanresources/jobcandidate/JobcandidateRepoMock.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/humanresources/jobcandidate/JobcandidateRepoMock.scala index e3a4c4226a..e5f65c7b83 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/humanresources/jobcandidate/JobcandidateRepoMock.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/humanresources/jobcandidate/JobcandidateRepoMock.scala @@ -87,4 +87,17 @@ class JobcandidateRepoMock(toRow: Function1[JobcandidateRowUnsaved, Jobcandidate map.put(unsaved.jobcandidateid, unsaved): @nowarn unsaved } + override def upsertBatch(unsaved: Iterable[JobcandidateRow])(implicit c: Connection): List[JobcandidateRow] = { + unsaved.map { row => + map += (row.jobcandidateid -> row) + row + }.toList + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[JobcandidateRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + unsaved.foreach { row => + map += (row.jobcandidateid -> row) + } + unsaved.size + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/humanresources/shift/ShiftRepo.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/humanresources/shift/ShiftRepo.scala index e3a5bb1ba8..fa27a24ebf 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/humanresources/shift/ShiftRepo.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/humanresources/shift/ShiftRepo.scala @@ -29,4 +29,7 @@ trait ShiftRepo { def update: UpdateBuilder[ShiftFields, ShiftRow] def update(row: ShiftRow)(implicit c: Connection): Boolean def upsert(unsaved: ShiftRow)(implicit c: Connection): ShiftRow + def upsertBatch(unsaved: Iterable[ShiftRow])(implicit c: Connection): List[ShiftRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Iterator[ShiftRow], batchSize: Int = 10000)(implicit c: Connection): Int } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/humanresources/shift/ShiftRepoImpl.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/humanresources/shift/ShiftRepoImpl.scala index e37a6cb66e..5d48c68c73 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/humanresources/shift/ShiftRepoImpl.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/humanresources/shift/ShiftRepoImpl.scala @@ -11,6 +11,7 @@ import adventureworks.customtypes.Defaulted import adventureworks.customtypes.TypoLocalDateTime import adventureworks.customtypes.TypoLocalTime import adventureworks.public.Name +import anorm.BatchSql import anorm.NamedParameter import anorm.ParameterValue import anorm.RowParser @@ -18,6 +19,7 @@ import anorm.SQL import anorm.SimpleSql import anorm.SqlStringInterpolation import java.sql.Connection +import scala.annotation.nowarn import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder import typo.dsl.SelectBuilderSql @@ -140,4 +142,48 @@ class ShiftRepoImpl extends ShiftRepo { .executeInsert(ShiftRow.rowParser(1).single) } + override def upsertBatch(unsaved: Iterable[ShiftRow])(implicit c: Connection): List[ShiftRow] = { + def toNamedParameter(row: ShiftRow): List[NamedParameter] = List( + NamedParameter("shiftid", ParameterValue(row.shiftid, null, ShiftId.toStatement)), + NamedParameter("name", ParameterValue(row.name, null, Name.toStatement)), + NamedParameter("starttime", ParameterValue(row.starttime, null, TypoLocalTime.toStatement)), + NamedParameter("endtime", ParameterValue(row.endtime, null, TypoLocalTime.toStatement)), + NamedParameter("modifieddate", ParameterValue(row.modifieddate, null, TypoLocalDateTime.toStatement)) + ) + unsaved.toList match { + case Nil => Nil + case head :: rest => + new anorm.adventureworks.ExecuteReturningSyntax.Ops( + BatchSql( + s"""insert into humanresources.shift("shiftid", "name", "starttime", "endtime", "modifieddate") + values ({shiftid}::int4, {name}::varchar, {starttime}::time, {endtime}::time, {modifieddate}::timestamp) + on conflict ("shiftid") + do update set + "name" = EXCLUDED."name", + "starttime" = EXCLUDED."starttime", + "endtime" = EXCLUDED."endtime", + "modifieddate" = EXCLUDED."modifieddate" + returning "shiftid", "name", "starttime"::text, "endtime"::text, "modifieddate"::text + """, + toNamedParameter(head), + rest.map(toNamedParameter)* + ) + ).executeReturning(ShiftRow.rowParser(1).*) + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[ShiftRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + SQL"create temporary table shift_TEMP (like humanresources.shift) on commit drop".execute(): @nowarn + streamingInsert(s"""copy shift_TEMP("shiftid", "name", "starttime", "endtime", "modifieddate") from stdin""", batchSize, unsaved)(ShiftRow.text, c): @nowarn + SQL"""insert into humanresources.shift("shiftid", "name", "starttime", "endtime", "modifieddate") + select * from shift_TEMP + on conflict ("shiftid") + do update set + "name" = EXCLUDED."name", + "starttime" = EXCLUDED."starttime", + "endtime" = EXCLUDED."endtime", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table shift_TEMP;""".executeUpdate() + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/humanresources/shift/ShiftRepoMock.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/humanresources/shift/ShiftRepoMock.scala index 851844c035..9b2ed3bcf4 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/humanresources/shift/ShiftRepoMock.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/humanresources/shift/ShiftRepoMock.scala @@ -87,4 +87,17 @@ class ShiftRepoMock(toRow: Function1[ShiftRowUnsaved, ShiftRow], map.put(unsaved.shiftid, unsaved): @nowarn unsaved } + override def upsertBatch(unsaved: Iterable[ShiftRow])(implicit c: Connection): List[ShiftRow] = { + unsaved.map { row => + map += (row.shiftid -> row) + row + }.toList + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[ShiftRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + unsaved.foreach { row => + map += (row.shiftid -> row) + } + unsaved.size + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/information_schema/CardinalNumber.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/information_schema/CardinalNumber.scala new file mode 100644 index 0000000000..dfb29023af --- /dev/null +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/information_schema/CardinalNumber.scala @@ -0,0 +1,38 @@ +/** + * File has been automatically generated by `typo`. + * + * IF YOU CHANGE THIS FILE YOUR CHANGES WILL BE OVERWRITTEN. + */ +package adventureworks +package information_schema + +import anorm.Column +import anorm.ParameterMetaData +import anorm.ToStatement +import java.sql.Types +import play.api.libs.json.Reads +import play.api.libs.json.Writes +import typo.dsl.Bijection + +/** Domain `information_schema.cardinal_number` + * Constraint: CHECK ((VALUE >= 0)) + */ +case class CardinalNumber(value: Int) +object CardinalNumber { + implicit lazy val arrayColumn: Column[Array[CardinalNumber]] = Column.columnToArray(column, implicitly) + implicit lazy val arrayToStatement: ToStatement[Array[CardinalNumber]] = adventureworks.IntArrayToStatement.contramap(_.map(_.value)) + implicit lazy val bijection: Bijection[CardinalNumber, Int] = Bijection[CardinalNumber, Int](_.value)(CardinalNumber.apply) + implicit lazy val column: Column[CardinalNumber] = Column.columnToInt.map(CardinalNumber.apply) + implicit lazy val ordering: Ordering[CardinalNumber] = Ordering.by(_.value) + implicit lazy val parameterMetadata: ParameterMetaData[CardinalNumber] = new ParameterMetaData[CardinalNumber] { + override def sqlType: String = """"information_schema"."cardinal_number"""" + override def jdbcType: Int = Types.OTHER + } + implicit lazy val reads: Reads[CardinalNumber] = Reads.IntReads.map(CardinalNumber.apply) + implicit lazy val text: Text[CardinalNumber] = new Text[CardinalNumber] { + override def unsafeEncode(v: CardinalNumber, sb: StringBuilder) = Text.intInstance.unsafeEncode(v.value, sb) + override def unsafeArrayEncode(v: CardinalNumber, sb: StringBuilder) = Text.intInstance.unsafeArrayEncode(v.value, sb) + } + implicit lazy val toStatement: ToStatement[CardinalNumber] = ToStatement.intToStatement.contramap(_.value) + implicit lazy val writes: Writes[CardinalNumber] = Writes.IntWrites.contramap(_.value) +} \ No newline at end of file diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/information_schema/CharacterData.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/information_schema/CharacterData.scala new file mode 100644 index 0000000000..2edda18808 --- /dev/null +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/information_schema/CharacterData.scala @@ -0,0 +1,38 @@ +/** + * File has been automatically generated by `typo`. + * + * IF YOU CHANGE THIS FILE YOUR CHANGES WILL BE OVERWRITTEN. + */ +package adventureworks +package information_schema + +import anorm.Column +import anorm.ParameterMetaData +import anorm.ToStatement +import java.sql.Types +import play.api.libs.json.Reads +import play.api.libs.json.Writes +import typo.dsl.Bijection + +/** Domain `information_schema.character_data` + * No constraint + */ +case class CharacterData(value: String) +object CharacterData { + implicit lazy val arrayColumn: Column[Array[CharacterData]] = Column.columnToArray(column, implicitly) + implicit lazy val arrayToStatement: ToStatement[Array[CharacterData]] = ToStatement.arrayToParameter(ParameterMetaData.StringParameterMetaData).contramap(_.map(_.value)) + implicit lazy val bijection: Bijection[CharacterData, String] = Bijection[CharacterData, String](_.value)(CharacterData.apply) + implicit lazy val column: Column[CharacterData] = Column.columnToString.map(CharacterData.apply) + implicit lazy val ordering: Ordering[CharacterData] = Ordering.by(_.value) + implicit lazy val parameterMetadata: ParameterMetaData[CharacterData] = new ParameterMetaData[CharacterData] { + override def sqlType: String = """"information_schema"."character_data"""" + override def jdbcType: Int = Types.OTHER + } + implicit lazy val reads: Reads[CharacterData] = Reads.StringReads.map(CharacterData.apply) + implicit lazy val text: Text[CharacterData] = new Text[CharacterData] { + override def unsafeEncode(v: CharacterData, sb: StringBuilder) = Text.stringInstance.unsafeEncode(v.value, sb) + override def unsafeArrayEncode(v: CharacterData, sb: StringBuilder) = Text.stringInstance.unsafeArrayEncode(v.value, sb) + } + implicit lazy val toStatement: ToStatement[CharacterData] = ToStatement.stringToStatement.contramap(_.value) + implicit lazy val writes: Writes[CharacterData] = Writes.StringWrites.contramap(_.value) +} \ No newline at end of file diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/information_schema/SqlIdentifier.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/information_schema/SqlIdentifier.scala new file mode 100644 index 0000000000..9fb1d7b0af --- /dev/null +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/information_schema/SqlIdentifier.scala @@ -0,0 +1,38 @@ +/** + * File has been automatically generated by `typo`. + * + * IF YOU CHANGE THIS FILE YOUR CHANGES WILL BE OVERWRITTEN. + */ +package adventureworks +package information_schema + +import anorm.Column +import anorm.ParameterMetaData +import anorm.ToStatement +import java.sql.Types +import play.api.libs.json.Reads +import play.api.libs.json.Writes +import typo.dsl.Bijection + +/** Domain `information_schema.sql_identifier` + * No constraint + */ +case class SqlIdentifier(value: String) +object SqlIdentifier { + implicit lazy val arrayColumn: Column[Array[SqlIdentifier]] = Column.columnToArray(column, implicitly) + implicit lazy val arrayToStatement: ToStatement[Array[SqlIdentifier]] = ToStatement.arrayToParameter(ParameterMetaData.StringParameterMetaData).contramap(_.map(_.value)) + implicit lazy val bijection: Bijection[SqlIdentifier, String] = Bijection[SqlIdentifier, String](_.value)(SqlIdentifier.apply) + implicit lazy val column: Column[SqlIdentifier] = Column.columnToString.map(SqlIdentifier.apply) + implicit lazy val ordering: Ordering[SqlIdentifier] = Ordering.by(_.value) + implicit lazy val parameterMetadata: ParameterMetaData[SqlIdentifier] = new ParameterMetaData[SqlIdentifier] { + override def sqlType: String = """"information_schema"."sql_identifier"""" + override def jdbcType: Int = Types.OTHER + } + implicit lazy val reads: Reads[SqlIdentifier] = Reads.StringReads.map(SqlIdentifier.apply) + implicit lazy val text: Text[SqlIdentifier] = new Text[SqlIdentifier] { + override def unsafeEncode(v: SqlIdentifier, sb: StringBuilder) = Text.stringInstance.unsafeEncode(v.value, sb) + override def unsafeArrayEncode(v: SqlIdentifier, sb: StringBuilder) = Text.stringInstance.unsafeArrayEncode(v.value, sb) + } + implicit lazy val toStatement: ToStatement[SqlIdentifier] = ToStatement.stringToStatement.contramap(_.value) + implicit lazy val writes: Writes[SqlIdentifier] = Writes.StringWrites.contramap(_.value) +} \ No newline at end of file diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/information_schema/TimeStamp.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/information_schema/TimeStamp.scala new file mode 100644 index 0000000000..445ce7267a --- /dev/null +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/information_schema/TimeStamp.scala @@ -0,0 +1,39 @@ +/** + * File has been automatically generated by `typo`. + * + * IF YOU CHANGE THIS FILE YOUR CHANGES WILL BE OVERWRITTEN. + */ +package adventureworks +package information_schema + +import adventureworks.customtypes.TypoInstant +import anorm.Column +import anorm.ParameterMetaData +import anorm.ToStatement +import java.sql.Types +import play.api.libs.json.Reads +import play.api.libs.json.Writes +import typo.dsl.Bijection + +/** Domain `information_schema.time_stamp` + * No constraint + */ +case class TimeStamp(value: TypoInstant) +object TimeStamp { + implicit lazy val arrayColumn: Column[Array[TimeStamp]] = Column.columnToArray(column, implicitly) + implicit lazy val arrayToStatement: ToStatement[Array[TimeStamp]] = TypoInstant.arrayToStatement.contramap(_.map(_.value)) + implicit lazy val bijection: Bijection[TimeStamp, TypoInstant] = Bijection[TimeStamp, TypoInstant](_.value)(TimeStamp.apply) + implicit lazy val column: Column[TimeStamp] = TypoInstant.column.map(TimeStamp.apply) + implicit def ordering(implicit O0: Ordering[TypoInstant]): Ordering[TimeStamp] = Ordering.by(_.value) + implicit lazy val parameterMetadata: ParameterMetaData[TimeStamp] = new ParameterMetaData[TimeStamp] { + override def sqlType: String = """"information_schema"."time_stamp"""" + override def jdbcType: Int = Types.OTHER + } + implicit lazy val reads: Reads[TimeStamp] = TypoInstant.reads.map(TimeStamp.apply) + implicit lazy val text: Text[TimeStamp] = new Text[TimeStamp] { + override def unsafeEncode(v: TimeStamp, sb: StringBuilder) = TypoInstant.text.unsafeEncode(v.value, sb) + override def unsafeArrayEncode(v: TimeStamp, sb: StringBuilder) = TypoInstant.text.unsafeArrayEncode(v.value, sb) + } + implicit lazy val toStatement: ToStatement[TimeStamp] = TypoInstant.toStatement.contramap(_.value) + implicit lazy val writes: Writes[TimeStamp] = TypoInstant.writes.contramap(_.value) +} \ No newline at end of file diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/information_schema/YesOrNo.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/information_schema/YesOrNo.scala new file mode 100644 index 0000000000..9e60bb7c0e --- /dev/null +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/information_schema/YesOrNo.scala @@ -0,0 +1,38 @@ +/** + * File has been automatically generated by `typo`. + * + * IF YOU CHANGE THIS FILE YOUR CHANGES WILL BE OVERWRITTEN. + */ +package adventureworks +package information_schema + +import anorm.Column +import anorm.ParameterMetaData +import anorm.ToStatement +import java.sql.Types +import play.api.libs.json.Reads +import play.api.libs.json.Writes +import typo.dsl.Bijection + +/** Domain `information_schema.yes_or_no` + * Constraint: CHECK (((VALUE)::text = ANY ((ARRAY['YES'::character varying, 'NO'::character varying])::text[]))) + */ +case class YesOrNo(value: String) +object YesOrNo { + implicit lazy val arrayColumn: Column[Array[YesOrNo]] = Column.columnToArray(column, implicitly) + implicit lazy val arrayToStatement: ToStatement[Array[YesOrNo]] = ToStatement.arrayToParameter(ParameterMetaData.StringParameterMetaData).contramap(_.map(_.value)) + implicit lazy val bijection: Bijection[YesOrNo, String] = Bijection[YesOrNo, String](_.value)(YesOrNo.apply) + implicit lazy val column: Column[YesOrNo] = Column.columnToString.map(YesOrNo.apply) + implicit lazy val ordering: Ordering[YesOrNo] = Ordering.by(_.value) + implicit lazy val parameterMetadata: ParameterMetaData[YesOrNo] = new ParameterMetaData[YesOrNo] { + override def sqlType: String = """"information_schema"."yes_or_no"""" + override def jdbcType: Int = Types.OTHER + } + implicit lazy val reads: Reads[YesOrNo] = Reads.StringReads.map(YesOrNo.apply) + implicit lazy val text: Text[YesOrNo] = new Text[YesOrNo] { + override def unsafeEncode(v: YesOrNo, sb: StringBuilder) = Text.stringInstance.unsafeEncode(v.value, sb) + override def unsafeArrayEncode(v: YesOrNo, sb: StringBuilder) = Text.stringInstance.unsafeArrayEncode(v.value, sb) + } + implicit lazy val toStatement: ToStatement[YesOrNo] = ToStatement.stringToStatement.contramap(_.value) + implicit lazy val writes: Writes[YesOrNo] = Writes.StringWrites.contramap(_.value) +} \ No newline at end of file diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/person/address/AddressRepo.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/person/address/AddressRepo.scala index d6a3f76afa..f31e96c501 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/person/address/AddressRepo.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/person/address/AddressRepo.scala @@ -29,4 +29,7 @@ trait AddressRepo { def update: UpdateBuilder[AddressFields, AddressRow] def update(row: AddressRow)(implicit c: Connection): Boolean def upsert(unsaved: AddressRow)(implicit c: Connection): AddressRow + def upsertBatch(unsaved: Iterable[AddressRow])(implicit c: Connection): List[AddressRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Iterator[AddressRow], batchSize: Int = 10000)(implicit c: Connection): Int } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/person/address/AddressRepoImpl.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/person/address/AddressRepoImpl.scala index e4e757127d..49ae20e01b 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/person/address/AddressRepoImpl.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/person/address/AddressRepoImpl.scala @@ -12,6 +12,7 @@ import adventureworks.customtypes.TypoBytea import adventureworks.customtypes.TypoLocalDateTime import adventureworks.customtypes.TypoUUID import adventureworks.person.stateprovince.StateprovinceId +import anorm.BatchSql import anorm.NamedParameter import anorm.ParameterMetaData import anorm.ParameterValue @@ -21,6 +22,7 @@ import anorm.SimpleSql import anorm.SqlStringInterpolation import anorm.ToStatement import java.sql.Connection +import scala.annotation.nowarn import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder import typo.dsl.SelectBuilderSql @@ -162,4 +164,60 @@ class AddressRepoImpl extends AddressRepo { .executeInsert(AddressRow.rowParser(1).single) } + override def upsertBatch(unsaved: Iterable[AddressRow])(implicit c: Connection): List[AddressRow] = { + def toNamedParameter(row: AddressRow): List[NamedParameter] = List( + NamedParameter("addressid", ParameterValue(row.addressid, null, AddressId.toStatement)), + NamedParameter("addressline1", ParameterValue(row.addressline1, null, ToStatement.stringToStatement)), + NamedParameter("addressline2", ParameterValue(row.addressline2, null, ToStatement.optionToStatement(ToStatement.stringToStatement, ParameterMetaData.StringParameterMetaData))), + NamedParameter("city", ParameterValue(row.city, null, ToStatement.stringToStatement)), + NamedParameter("stateprovinceid", ParameterValue(row.stateprovinceid, null, StateprovinceId.toStatement)), + NamedParameter("postalcode", ParameterValue(row.postalcode, null, ToStatement.stringToStatement)), + NamedParameter("spatiallocation", ParameterValue(row.spatiallocation, null, ToStatement.optionToStatement(TypoBytea.toStatement, TypoBytea.parameterMetadata))), + NamedParameter("rowguid", ParameterValue(row.rowguid, null, TypoUUID.toStatement)), + NamedParameter("modifieddate", ParameterValue(row.modifieddate, null, TypoLocalDateTime.toStatement)) + ) + unsaved.toList match { + case Nil => Nil + case head :: rest => + new anorm.adventureworks.ExecuteReturningSyntax.Ops( + BatchSql( + s"""insert into person.address("addressid", "addressline1", "addressline2", "city", "stateprovinceid", "postalcode", "spatiallocation", "rowguid", "modifieddate") + values ({addressid}::int4, {addressline1}, {addressline2}, {city}, {stateprovinceid}::int4, {postalcode}, {spatiallocation}::bytea, {rowguid}::uuid, {modifieddate}::timestamp) + on conflict ("addressid") + do update set + "addressline1" = EXCLUDED."addressline1", + "addressline2" = EXCLUDED."addressline2", + "city" = EXCLUDED."city", + "stateprovinceid" = EXCLUDED."stateprovinceid", + "postalcode" = EXCLUDED."postalcode", + "spatiallocation" = EXCLUDED."spatiallocation", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + returning "addressid", "addressline1", "addressline2", "city", "stateprovinceid", "postalcode", "spatiallocation", "rowguid", "modifieddate"::text + """, + toNamedParameter(head), + rest.map(toNamedParameter)* + ) + ).executeReturning(AddressRow.rowParser(1).*) + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[AddressRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + SQL"create temporary table address_TEMP (like person.address) on commit drop".execute(): @nowarn + streamingInsert(s"""copy address_TEMP("addressid", "addressline1", "addressline2", "city", "stateprovinceid", "postalcode", "spatiallocation", "rowguid", "modifieddate") from stdin""", batchSize, unsaved)(AddressRow.text, c): @nowarn + SQL"""insert into person.address("addressid", "addressline1", "addressline2", "city", "stateprovinceid", "postalcode", "spatiallocation", "rowguid", "modifieddate") + select * from address_TEMP + on conflict ("addressid") + do update set + "addressline1" = EXCLUDED."addressline1", + "addressline2" = EXCLUDED."addressline2", + "city" = EXCLUDED."city", + "stateprovinceid" = EXCLUDED."stateprovinceid", + "postalcode" = EXCLUDED."postalcode", + "spatiallocation" = EXCLUDED."spatiallocation", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table address_TEMP;""".executeUpdate() + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/person/address/AddressRepoMock.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/person/address/AddressRepoMock.scala index 087842773c..b6d6823ae0 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/person/address/AddressRepoMock.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/person/address/AddressRepoMock.scala @@ -87,4 +87,17 @@ class AddressRepoMock(toRow: Function1[AddressRowUnsaved, AddressRow], map.put(unsaved.addressid, unsaved): @nowarn unsaved } + override def upsertBatch(unsaved: Iterable[AddressRow])(implicit c: Connection): List[AddressRow] = { + unsaved.map { row => + map += (row.addressid -> row) + row + }.toList + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[AddressRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + unsaved.foreach { row => + map += (row.addressid -> row) + } + unsaved.size + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/person/addresstype/AddresstypeRepo.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/person/addresstype/AddresstypeRepo.scala index b2bd59f5c2..226bcb2aeb 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/person/addresstype/AddresstypeRepo.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/person/addresstype/AddresstypeRepo.scala @@ -29,4 +29,7 @@ trait AddresstypeRepo { def update: UpdateBuilder[AddresstypeFields, AddresstypeRow] def update(row: AddresstypeRow)(implicit c: Connection): Boolean def upsert(unsaved: AddresstypeRow)(implicit c: Connection): AddresstypeRow + def upsertBatch(unsaved: Iterable[AddresstypeRow])(implicit c: Connection): List[AddresstypeRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Iterator[AddresstypeRow], batchSize: Int = 10000)(implicit c: Connection): Int } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/person/addresstype/AddresstypeRepoImpl.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/person/addresstype/AddresstypeRepoImpl.scala index 92550a8042..4a45c73e1a 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/person/addresstype/AddresstypeRepoImpl.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/person/addresstype/AddresstypeRepoImpl.scala @@ -11,6 +11,7 @@ import adventureworks.customtypes.Defaulted import adventureworks.customtypes.TypoLocalDateTime import adventureworks.customtypes.TypoUUID import adventureworks.public.Name +import anorm.BatchSql import anorm.NamedParameter import anorm.ParameterValue import anorm.RowParser @@ -18,6 +19,7 @@ import anorm.SQL import anorm.SimpleSql import anorm.SqlStringInterpolation import java.sql.Connection +import scala.annotation.nowarn import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder import typo.dsl.SelectBuilderSql @@ -139,4 +141,45 @@ class AddresstypeRepoImpl extends AddresstypeRepo { .executeInsert(AddresstypeRow.rowParser(1).single) } + override def upsertBatch(unsaved: Iterable[AddresstypeRow])(implicit c: Connection): List[AddresstypeRow] = { + def toNamedParameter(row: AddresstypeRow): List[NamedParameter] = List( + NamedParameter("addresstypeid", ParameterValue(row.addresstypeid, null, AddresstypeId.toStatement)), + NamedParameter("name", ParameterValue(row.name, null, Name.toStatement)), + NamedParameter("rowguid", ParameterValue(row.rowguid, null, TypoUUID.toStatement)), + NamedParameter("modifieddate", ParameterValue(row.modifieddate, null, TypoLocalDateTime.toStatement)) + ) + unsaved.toList match { + case Nil => Nil + case head :: rest => + new anorm.adventureworks.ExecuteReturningSyntax.Ops( + BatchSql( + s"""insert into person.addresstype("addresstypeid", "name", "rowguid", "modifieddate") + values ({addresstypeid}::int4, {name}::varchar, {rowguid}::uuid, {modifieddate}::timestamp) + on conflict ("addresstypeid") + do update set + "name" = EXCLUDED."name", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + returning "addresstypeid", "name", "rowguid", "modifieddate"::text + """, + toNamedParameter(head), + rest.map(toNamedParameter)* + ) + ).executeReturning(AddresstypeRow.rowParser(1).*) + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[AddresstypeRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + SQL"create temporary table addresstype_TEMP (like person.addresstype) on commit drop".execute(): @nowarn + streamingInsert(s"""copy addresstype_TEMP("addresstypeid", "name", "rowguid", "modifieddate") from stdin""", batchSize, unsaved)(AddresstypeRow.text, c): @nowarn + SQL"""insert into person.addresstype("addresstypeid", "name", "rowguid", "modifieddate") + select * from addresstype_TEMP + on conflict ("addresstypeid") + do update set + "name" = EXCLUDED."name", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table addresstype_TEMP;""".executeUpdate() + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/person/addresstype/AddresstypeRepoMock.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/person/addresstype/AddresstypeRepoMock.scala index 9224317679..1b15838bca 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/person/addresstype/AddresstypeRepoMock.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/person/addresstype/AddresstypeRepoMock.scala @@ -87,4 +87,17 @@ class AddresstypeRepoMock(toRow: Function1[AddresstypeRowUnsaved, AddresstypeRow map.put(unsaved.addresstypeid, unsaved): @nowarn unsaved } + override def upsertBatch(unsaved: Iterable[AddresstypeRow])(implicit c: Connection): List[AddresstypeRow] = { + unsaved.map { row => + map += (row.addresstypeid -> row) + row + }.toList + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[AddresstypeRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + unsaved.foreach { row => + map += (row.addresstypeid -> row) + } + unsaved.size + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/person/businessentity/BusinessentityRepo.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/person/businessentity/BusinessentityRepo.scala index 655e8b3210..b77437aac1 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/person/businessentity/BusinessentityRepo.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/person/businessentity/BusinessentityRepo.scala @@ -29,4 +29,7 @@ trait BusinessentityRepo { def update: UpdateBuilder[BusinessentityFields, BusinessentityRow] def update(row: BusinessentityRow)(implicit c: Connection): Boolean def upsert(unsaved: BusinessentityRow)(implicit c: Connection): BusinessentityRow + def upsertBatch(unsaved: Iterable[BusinessentityRow])(implicit c: Connection): List[BusinessentityRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Iterator[BusinessentityRow], batchSize: Int = 10000)(implicit c: Connection): Int } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/person/businessentity/BusinessentityRepoImpl.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/person/businessentity/BusinessentityRepoImpl.scala index bc51a38ed7..05ff387578 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/person/businessentity/BusinessentityRepoImpl.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/person/businessentity/BusinessentityRepoImpl.scala @@ -10,6 +10,7 @@ package businessentity import adventureworks.customtypes.Defaulted import adventureworks.customtypes.TypoLocalDateTime import adventureworks.customtypes.TypoUUID +import anorm.BatchSql import anorm.NamedParameter import anorm.ParameterValue import anorm.RowParser @@ -17,6 +18,7 @@ import anorm.SQL import anorm.SimpleSql import anorm.SqlStringInterpolation import java.sql.Connection +import scala.annotation.nowarn import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder import typo.dsl.SelectBuilderSql @@ -134,4 +136,42 @@ class BusinessentityRepoImpl extends BusinessentityRepo { .executeInsert(BusinessentityRow.rowParser(1).single) } + override def upsertBatch(unsaved: Iterable[BusinessentityRow])(implicit c: Connection): List[BusinessentityRow] = { + def toNamedParameter(row: BusinessentityRow): List[NamedParameter] = List( + NamedParameter("businessentityid", ParameterValue(row.businessentityid, null, BusinessentityId.toStatement)), + NamedParameter("rowguid", ParameterValue(row.rowguid, null, TypoUUID.toStatement)), + NamedParameter("modifieddate", ParameterValue(row.modifieddate, null, TypoLocalDateTime.toStatement)) + ) + unsaved.toList match { + case Nil => Nil + case head :: rest => + new anorm.adventureworks.ExecuteReturningSyntax.Ops( + BatchSql( + s"""insert into person.businessentity("businessentityid", "rowguid", "modifieddate") + values ({businessentityid}::int4, {rowguid}::uuid, {modifieddate}::timestamp) + on conflict ("businessentityid") + do update set + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + returning "businessentityid", "rowguid", "modifieddate"::text + """, + toNamedParameter(head), + rest.map(toNamedParameter)* + ) + ).executeReturning(BusinessentityRow.rowParser(1).*) + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[BusinessentityRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + SQL"create temporary table businessentity_TEMP (like person.businessentity) on commit drop".execute(): @nowarn + streamingInsert(s"""copy businessentity_TEMP("businessentityid", "rowguid", "modifieddate") from stdin""", batchSize, unsaved)(BusinessentityRow.text, c): @nowarn + SQL"""insert into person.businessentity("businessentityid", "rowguid", "modifieddate") + select * from businessentity_TEMP + on conflict ("businessentityid") + do update set + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table businessentity_TEMP;""".executeUpdate() + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/person/businessentity/BusinessentityRepoMock.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/person/businessentity/BusinessentityRepoMock.scala index 17cc0fa777..8286a0f736 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/person/businessentity/BusinessentityRepoMock.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/person/businessentity/BusinessentityRepoMock.scala @@ -87,4 +87,17 @@ class BusinessentityRepoMock(toRow: Function1[BusinessentityRowUnsaved, Business map.put(unsaved.businessentityid, unsaved): @nowarn unsaved } + override def upsertBatch(unsaved: Iterable[BusinessentityRow])(implicit c: Connection): List[BusinessentityRow] = { + unsaved.map { row => + map += (row.businessentityid -> row) + row + }.toList + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[BusinessentityRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + unsaved.foreach { row => + map += (row.businessentityid -> row) + } + unsaved.size + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/person/businessentityaddress/BusinessentityaddressRepo.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/person/businessentityaddress/BusinessentityaddressRepo.scala index 85611f4d33..6a659700b4 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/person/businessentityaddress/BusinessentityaddressRepo.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/person/businessentityaddress/BusinessentityaddressRepo.scala @@ -29,4 +29,7 @@ trait BusinessentityaddressRepo { def update: UpdateBuilder[BusinessentityaddressFields, BusinessentityaddressRow] def update(row: BusinessentityaddressRow)(implicit c: Connection): Boolean def upsert(unsaved: BusinessentityaddressRow)(implicit c: Connection): BusinessentityaddressRow + def upsertBatch(unsaved: Iterable[BusinessentityaddressRow])(implicit c: Connection): List[BusinessentityaddressRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Iterator[BusinessentityaddressRow], batchSize: Int = 10000)(implicit c: Connection): Int } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/person/businessentityaddress/BusinessentityaddressRepoImpl.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/person/businessentityaddress/BusinessentityaddressRepoImpl.scala index c1468c00a4..b3a0d25316 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/person/businessentityaddress/BusinessentityaddressRepoImpl.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/person/businessentityaddress/BusinessentityaddressRepoImpl.scala @@ -13,6 +13,7 @@ import adventureworks.customtypes.TypoUUID import adventureworks.person.address.AddressId import adventureworks.person.addresstype.AddresstypeId import adventureworks.person.businessentity.BusinessentityId +import anorm.BatchSql import anorm.NamedParameter import anorm.ParameterValue import anorm.RowParser @@ -20,6 +21,7 @@ import anorm.SQL import anorm.SimpleSql import anorm.SqlStringInterpolation import java.sql.Connection +import scala.annotation.nowarn import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder import typo.dsl.SelectBuilderSql @@ -146,4 +148,44 @@ class BusinessentityaddressRepoImpl extends BusinessentityaddressRepo { .executeInsert(BusinessentityaddressRow.rowParser(1).single) } + override def upsertBatch(unsaved: Iterable[BusinessentityaddressRow])(implicit c: Connection): List[BusinessentityaddressRow] = { + def toNamedParameter(row: BusinessentityaddressRow): List[NamedParameter] = List( + NamedParameter("businessentityid", ParameterValue(row.businessentityid, null, BusinessentityId.toStatement)), + NamedParameter("addressid", ParameterValue(row.addressid, null, AddressId.toStatement)), + NamedParameter("addresstypeid", ParameterValue(row.addresstypeid, null, AddresstypeId.toStatement)), + NamedParameter("rowguid", ParameterValue(row.rowguid, null, TypoUUID.toStatement)), + NamedParameter("modifieddate", ParameterValue(row.modifieddate, null, TypoLocalDateTime.toStatement)) + ) + unsaved.toList match { + case Nil => Nil + case head :: rest => + new anorm.adventureworks.ExecuteReturningSyntax.Ops( + BatchSql( + s"""insert into person.businessentityaddress("businessentityid", "addressid", "addresstypeid", "rowguid", "modifieddate") + values ({businessentityid}::int4, {addressid}::int4, {addresstypeid}::int4, {rowguid}::uuid, {modifieddate}::timestamp) + on conflict ("businessentityid", "addressid", "addresstypeid") + do update set + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + returning "businessentityid", "addressid", "addresstypeid", "rowguid", "modifieddate"::text + """, + toNamedParameter(head), + rest.map(toNamedParameter)* + ) + ).executeReturning(BusinessentityaddressRow.rowParser(1).*) + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[BusinessentityaddressRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + SQL"create temporary table businessentityaddress_TEMP (like person.businessentityaddress) on commit drop".execute(): @nowarn + streamingInsert(s"""copy businessentityaddress_TEMP("businessentityid", "addressid", "addresstypeid", "rowguid", "modifieddate") from stdin""", batchSize, unsaved)(BusinessentityaddressRow.text, c): @nowarn + SQL"""insert into person.businessentityaddress("businessentityid", "addressid", "addresstypeid", "rowguid", "modifieddate") + select * from businessentityaddress_TEMP + on conflict ("businessentityid", "addressid", "addresstypeid") + do update set + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table businessentityaddress_TEMP;""".executeUpdate() + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/person/businessentityaddress/BusinessentityaddressRepoMock.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/person/businessentityaddress/BusinessentityaddressRepoMock.scala index 7d9a336681..b80a3e4fe7 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/person/businessentityaddress/BusinessentityaddressRepoMock.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/person/businessentityaddress/BusinessentityaddressRepoMock.scala @@ -87,4 +87,17 @@ class BusinessentityaddressRepoMock(toRow: Function1[BusinessentityaddressRowUns map.put(unsaved.compositeId, unsaved): @nowarn unsaved } + override def upsertBatch(unsaved: Iterable[BusinessentityaddressRow])(implicit c: Connection): List[BusinessentityaddressRow] = { + unsaved.map { row => + map += (row.compositeId -> row) + row + }.toList + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[BusinessentityaddressRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + unsaved.foreach { row => + map += (row.compositeId -> row) + } + unsaved.size + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/person/businessentitycontact/BusinessentitycontactRepo.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/person/businessentitycontact/BusinessentitycontactRepo.scala index 3d311c8717..38734e4d78 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/person/businessentitycontact/BusinessentitycontactRepo.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/person/businessentitycontact/BusinessentitycontactRepo.scala @@ -29,4 +29,7 @@ trait BusinessentitycontactRepo { def update: UpdateBuilder[BusinessentitycontactFields, BusinessentitycontactRow] def update(row: BusinessentitycontactRow)(implicit c: Connection): Boolean def upsert(unsaved: BusinessentitycontactRow)(implicit c: Connection): BusinessentitycontactRow + def upsertBatch(unsaved: Iterable[BusinessentitycontactRow])(implicit c: Connection): List[BusinessentitycontactRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Iterator[BusinessentitycontactRow], batchSize: Int = 10000)(implicit c: Connection): Int } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/person/businessentitycontact/BusinessentitycontactRepoImpl.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/person/businessentitycontact/BusinessentitycontactRepoImpl.scala index 0952223e1c..8b449c6e8a 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/person/businessentitycontact/BusinessentitycontactRepoImpl.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/person/businessentitycontact/BusinessentitycontactRepoImpl.scala @@ -12,6 +12,7 @@ import adventureworks.customtypes.TypoLocalDateTime import adventureworks.customtypes.TypoUUID import adventureworks.person.businessentity.BusinessentityId import adventureworks.person.contacttype.ContacttypeId +import anorm.BatchSql import anorm.NamedParameter import anorm.ParameterValue import anorm.RowParser @@ -19,6 +20,7 @@ import anorm.SQL import anorm.SimpleSql import anorm.SqlStringInterpolation import java.sql.Connection +import scala.annotation.nowarn import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder import typo.dsl.SelectBuilderSql @@ -145,4 +147,44 @@ class BusinessentitycontactRepoImpl extends BusinessentitycontactRepo { .executeInsert(BusinessentitycontactRow.rowParser(1).single) } + override def upsertBatch(unsaved: Iterable[BusinessentitycontactRow])(implicit c: Connection): List[BusinessentitycontactRow] = { + def toNamedParameter(row: BusinessentitycontactRow): List[NamedParameter] = List( + NamedParameter("businessentityid", ParameterValue(row.businessentityid, null, BusinessentityId.toStatement)), + NamedParameter("personid", ParameterValue(row.personid, null, BusinessentityId.toStatement)), + NamedParameter("contacttypeid", ParameterValue(row.contacttypeid, null, ContacttypeId.toStatement)), + NamedParameter("rowguid", ParameterValue(row.rowguid, null, TypoUUID.toStatement)), + NamedParameter("modifieddate", ParameterValue(row.modifieddate, null, TypoLocalDateTime.toStatement)) + ) + unsaved.toList match { + case Nil => Nil + case head :: rest => + new anorm.adventureworks.ExecuteReturningSyntax.Ops( + BatchSql( + s"""insert into person.businessentitycontact("businessentityid", "personid", "contacttypeid", "rowguid", "modifieddate") + values ({businessentityid}::int4, {personid}::int4, {contacttypeid}::int4, {rowguid}::uuid, {modifieddate}::timestamp) + on conflict ("businessentityid", "personid", "contacttypeid") + do update set + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + returning "businessentityid", "personid", "contacttypeid", "rowguid", "modifieddate"::text + """, + toNamedParameter(head), + rest.map(toNamedParameter)* + ) + ).executeReturning(BusinessentitycontactRow.rowParser(1).*) + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[BusinessentitycontactRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + SQL"create temporary table businessentitycontact_TEMP (like person.businessentitycontact) on commit drop".execute(): @nowarn + streamingInsert(s"""copy businessentitycontact_TEMP("businessentityid", "personid", "contacttypeid", "rowguid", "modifieddate") from stdin""", batchSize, unsaved)(BusinessentitycontactRow.text, c): @nowarn + SQL"""insert into person.businessentitycontact("businessentityid", "personid", "contacttypeid", "rowguid", "modifieddate") + select * from businessentitycontact_TEMP + on conflict ("businessentityid", "personid", "contacttypeid") + do update set + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table businessentitycontact_TEMP;""".executeUpdate() + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/person/businessentitycontact/BusinessentitycontactRepoMock.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/person/businessentitycontact/BusinessentitycontactRepoMock.scala index ca0c746983..c8611d5d48 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/person/businessentitycontact/BusinessentitycontactRepoMock.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/person/businessentitycontact/BusinessentitycontactRepoMock.scala @@ -87,4 +87,17 @@ class BusinessentitycontactRepoMock(toRow: Function1[BusinessentitycontactRowUns map.put(unsaved.compositeId, unsaved): @nowarn unsaved } + override def upsertBatch(unsaved: Iterable[BusinessentitycontactRow])(implicit c: Connection): List[BusinessentitycontactRow] = { + unsaved.map { row => + map += (row.compositeId -> row) + row + }.toList + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[BusinessentitycontactRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + unsaved.foreach { row => + map += (row.compositeId -> row) + } + unsaved.size + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/person/contacttype/ContacttypeRepo.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/person/contacttype/ContacttypeRepo.scala index 69c219b04e..8feeadef9a 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/person/contacttype/ContacttypeRepo.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/person/contacttype/ContacttypeRepo.scala @@ -29,4 +29,7 @@ trait ContacttypeRepo { def update: UpdateBuilder[ContacttypeFields, ContacttypeRow] def update(row: ContacttypeRow)(implicit c: Connection): Boolean def upsert(unsaved: ContacttypeRow)(implicit c: Connection): ContacttypeRow + def upsertBatch(unsaved: Iterable[ContacttypeRow])(implicit c: Connection): List[ContacttypeRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Iterator[ContacttypeRow], batchSize: Int = 10000)(implicit c: Connection): Int } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/person/contacttype/ContacttypeRepoImpl.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/person/contacttype/ContacttypeRepoImpl.scala index c416212e50..b60426ac64 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/person/contacttype/ContacttypeRepoImpl.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/person/contacttype/ContacttypeRepoImpl.scala @@ -10,6 +10,7 @@ package contacttype import adventureworks.customtypes.Defaulted import adventureworks.customtypes.TypoLocalDateTime import adventureworks.public.Name +import anorm.BatchSql import anorm.NamedParameter import anorm.ParameterValue import anorm.RowParser @@ -17,6 +18,7 @@ import anorm.SQL import anorm.SimpleSql import anorm.SqlStringInterpolation import java.sql.Connection +import scala.annotation.nowarn import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder import typo.dsl.SelectBuilderSql @@ -131,4 +133,42 @@ class ContacttypeRepoImpl extends ContacttypeRepo { .executeInsert(ContacttypeRow.rowParser(1).single) } + override def upsertBatch(unsaved: Iterable[ContacttypeRow])(implicit c: Connection): List[ContacttypeRow] = { + def toNamedParameter(row: ContacttypeRow): List[NamedParameter] = List( + NamedParameter("contacttypeid", ParameterValue(row.contacttypeid, null, ContacttypeId.toStatement)), + NamedParameter("name", ParameterValue(row.name, null, Name.toStatement)), + NamedParameter("modifieddate", ParameterValue(row.modifieddate, null, TypoLocalDateTime.toStatement)) + ) + unsaved.toList match { + case Nil => Nil + case head :: rest => + new anorm.adventureworks.ExecuteReturningSyntax.Ops( + BatchSql( + s"""insert into person.contacttype("contacttypeid", "name", "modifieddate") + values ({contacttypeid}::int4, {name}::varchar, {modifieddate}::timestamp) + on conflict ("contacttypeid") + do update set + "name" = EXCLUDED."name", + "modifieddate" = EXCLUDED."modifieddate" + returning "contacttypeid", "name", "modifieddate"::text + """, + toNamedParameter(head), + rest.map(toNamedParameter)* + ) + ).executeReturning(ContacttypeRow.rowParser(1).*) + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[ContacttypeRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + SQL"create temporary table contacttype_TEMP (like person.contacttype) on commit drop".execute(): @nowarn + streamingInsert(s"""copy contacttype_TEMP("contacttypeid", "name", "modifieddate") from stdin""", batchSize, unsaved)(ContacttypeRow.text, c): @nowarn + SQL"""insert into person.contacttype("contacttypeid", "name", "modifieddate") + select * from contacttype_TEMP + on conflict ("contacttypeid") + do update set + "name" = EXCLUDED."name", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table contacttype_TEMP;""".executeUpdate() + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/person/contacttype/ContacttypeRepoMock.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/person/contacttype/ContacttypeRepoMock.scala index 165c5bdfa1..bc9949089c 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/person/contacttype/ContacttypeRepoMock.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/person/contacttype/ContacttypeRepoMock.scala @@ -87,4 +87,17 @@ class ContacttypeRepoMock(toRow: Function1[ContacttypeRowUnsaved, ContacttypeRow map.put(unsaved.contacttypeid, unsaved): @nowarn unsaved } + override def upsertBatch(unsaved: Iterable[ContacttypeRow])(implicit c: Connection): List[ContacttypeRow] = { + unsaved.map { row => + map += (row.contacttypeid -> row) + row + }.toList + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[ContacttypeRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + unsaved.foreach { row => + map += (row.contacttypeid -> row) + } + unsaved.size + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/person/countryregion/CountryregionRepo.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/person/countryregion/CountryregionRepo.scala index 3d2aebce3a..42d88560fc 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/person/countryregion/CountryregionRepo.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/person/countryregion/CountryregionRepo.scala @@ -29,4 +29,7 @@ trait CountryregionRepo { def update: UpdateBuilder[CountryregionFields, CountryregionRow] def update(row: CountryregionRow)(implicit c: Connection): Boolean def upsert(unsaved: CountryregionRow)(implicit c: Connection): CountryregionRow + def upsertBatch(unsaved: Iterable[CountryregionRow])(implicit c: Connection): List[CountryregionRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Iterator[CountryregionRow], batchSize: Int = 10000)(implicit c: Connection): Int } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/person/countryregion/CountryregionRepoImpl.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/person/countryregion/CountryregionRepoImpl.scala index 27182db4b0..e09001c831 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/person/countryregion/CountryregionRepoImpl.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/person/countryregion/CountryregionRepoImpl.scala @@ -10,6 +10,7 @@ package countryregion import adventureworks.customtypes.Defaulted import adventureworks.customtypes.TypoLocalDateTime import adventureworks.public.Name +import anorm.BatchSql import anorm.NamedParameter import anorm.ParameterValue import anorm.RowParser @@ -17,6 +18,7 @@ import anorm.SQL import anorm.SimpleSql import anorm.SqlStringInterpolation import java.sql.Connection +import scala.annotation.nowarn import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder import typo.dsl.SelectBuilderSql @@ -128,4 +130,42 @@ class CountryregionRepoImpl extends CountryregionRepo { .executeInsert(CountryregionRow.rowParser(1).single) } + override def upsertBatch(unsaved: Iterable[CountryregionRow])(implicit c: Connection): List[CountryregionRow] = { + def toNamedParameter(row: CountryregionRow): List[NamedParameter] = List( + NamedParameter("countryregioncode", ParameterValue(row.countryregioncode, null, CountryregionId.toStatement)), + NamedParameter("name", ParameterValue(row.name, null, Name.toStatement)), + NamedParameter("modifieddate", ParameterValue(row.modifieddate, null, TypoLocalDateTime.toStatement)) + ) + unsaved.toList match { + case Nil => Nil + case head :: rest => + new anorm.adventureworks.ExecuteReturningSyntax.Ops( + BatchSql( + s"""insert into person.countryregion("countryregioncode", "name", "modifieddate") + values ({countryregioncode}, {name}::varchar, {modifieddate}::timestamp) + on conflict ("countryregioncode") + do update set + "name" = EXCLUDED."name", + "modifieddate" = EXCLUDED."modifieddate" + returning "countryregioncode", "name", "modifieddate"::text + """, + toNamedParameter(head), + rest.map(toNamedParameter)* + ) + ).executeReturning(CountryregionRow.rowParser(1).*) + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[CountryregionRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + SQL"create temporary table countryregion_TEMP (like person.countryregion) on commit drop".execute(): @nowarn + streamingInsert(s"""copy countryregion_TEMP("countryregioncode", "name", "modifieddate") from stdin""", batchSize, unsaved)(CountryregionRow.text, c): @nowarn + SQL"""insert into person.countryregion("countryregioncode", "name", "modifieddate") + select * from countryregion_TEMP + on conflict ("countryregioncode") + do update set + "name" = EXCLUDED."name", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table countryregion_TEMP;""".executeUpdate() + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/person/countryregion/CountryregionRepoMock.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/person/countryregion/CountryregionRepoMock.scala index ac8b613740..0be599f482 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/person/countryregion/CountryregionRepoMock.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/person/countryregion/CountryregionRepoMock.scala @@ -87,4 +87,17 @@ class CountryregionRepoMock(toRow: Function1[CountryregionRowUnsaved, Countryreg map.put(unsaved.countryregioncode, unsaved): @nowarn unsaved } + override def upsertBatch(unsaved: Iterable[CountryregionRow])(implicit c: Connection): List[CountryregionRow] = { + unsaved.map { row => + map += (row.countryregioncode -> row) + row + }.toList + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[CountryregionRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + unsaved.foreach { row => + map += (row.countryregioncode -> row) + } + unsaved.size + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/person/emailaddress/EmailaddressRepo.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/person/emailaddress/EmailaddressRepo.scala index 4c4282b63a..1cdd9bf299 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/person/emailaddress/EmailaddressRepo.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/person/emailaddress/EmailaddressRepo.scala @@ -29,4 +29,7 @@ trait EmailaddressRepo { def update: UpdateBuilder[EmailaddressFields, EmailaddressRow] def update(row: EmailaddressRow)(implicit c: Connection): Boolean def upsert(unsaved: EmailaddressRow)(implicit c: Connection): EmailaddressRow + def upsertBatch(unsaved: Iterable[EmailaddressRow])(implicit c: Connection): List[EmailaddressRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Iterator[EmailaddressRow], batchSize: Int = 10000)(implicit c: Connection): Int } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/person/emailaddress/EmailaddressRepoImpl.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/person/emailaddress/EmailaddressRepoImpl.scala index 4a83773fbf..d1cf9f628d 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/person/emailaddress/EmailaddressRepoImpl.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/person/emailaddress/EmailaddressRepoImpl.scala @@ -11,6 +11,7 @@ import adventureworks.customtypes.Defaulted import adventureworks.customtypes.TypoLocalDateTime import adventureworks.customtypes.TypoUUID import adventureworks.person.businessentity.BusinessentityId +import anorm.BatchSql import anorm.NamedParameter import anorm.ParameterMetaData import anorm.ParameterValue @@ -20,6 +21,7 @@ import anorm.SimpleSql import anorm.SqlStringInterpolation import anorm.ToStatement import java.sql.Connection +import scala.annotation.nowarn import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder import typo.dsl.SelectBuilderSql @@ -149,4 +151,46 @@ class EmailaddressRepoImpl extends EmailaddressRepo { .executeInsert(EmailaddressRow.rowParser(1).single) } + override def upsertBatch(unsaved: Iterable[EmailaddressRow])(implicit c: Connection): List[EmailaddressRow] = { + def toNamedParameter(row: EmailaddressRow): List[NamedParameter] = List( + NamedParameter("businessentityid", ParameterValue(row.businessentityid, null, BusinessentityId.toStatement)), + NamedParameter("emailaddressid", ParameterValue(row.emailaddressid, null, ToStatement.intToStatement)), + NamedParameter("emailaddress", ParameterValue(row.emailaddress, null, ToStatement.optionToStatement(ToStatement.stringToStatement, ParameterMetaData.StringParameterMetaData))), + NamedParameter("rowguid", ParameterValue(row.rowguid, null, TypoUUID.toStatement)), + NamedParameter("modifieddate", ParameterValue(row.modifieddate, null, TypoLocalDateTime.toStatement)) + ) + unsaved.toList match { + case Nil => Nil + case head :: rest => + new anorm.adventureworks.ExecuteReturningSyntax.Ops( + BatchSql( + s"""insert into person.emailaddress("businessentityid", "emailaddressid", "emailaddress", "rowguid", "modifieddate") + values ({businessentityid}::int4, {emailaddressid}::int4, {emailaddress}, {rowguid}::uuid, {modifieddate}::timestamp) + on conflict ("businessentityid", "emailaddressid") + do update set + "emailaddress" = EXCLUDED."emailaddress", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + returning "businessentityid", "emailaddressid", "emailaddress", "rowguid", "modifieddate"::text + """, + toNamedParameter(head), + rest.map(toNamedParameter)* + ) + ).executeReturning(EmailaddressRow.rowParser(1).*) + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[EmailaddressRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + SQL"create temporary table emailaddress_TEMP (like person.emailaddress) on commit drop".execute(): @nowarn + streamingInsert(s"""copy emailaddress_TEMP("businessentityid", "emailaddressid", "emailaddress", "rowguid", "modifieddate") from stdin""", batchSize, unsaved)(EmailaddressRow.text, c): @nowarn + SQL"""insert into person.emailaddress("businessentityid", "emailaddressid", "emailaddress", "rowguid", "modifieddate") + select * from emailaddress_TEMP + on conflict ("businessentityid", "emailaddressid") + do update set + "emailaddress" = EXCLUDED."emailaddress", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table emailaddress_TEMP;""".executeUpdate() + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/person/emailaddress/EmailaddressRepoMock.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/person/emailaddress/EmailaddressRepoMock.scala index 90f6fa0e6e..75b2f9a0b9 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/person/emailaddress/EmailaddressRepoMock.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/person/emailaddress/EmailaddressRepoMock.scala @@ -87,4 +87,17 @@ class EmailaddressRepoMock(toRow: Function1[EmailaddressRowUnsaved, Emailaddress map.put(unsaved.compositeId, unsaved): @nowarn unsaved } + override def upsertBatch(unsaved: Iterable[EmailaddressRow])(implicit c: Connection): List[EmailaddressRow] = { + unsaved.map { row => + map += (row.compositeId -> row) + row + }.toList + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[EmailaddressRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + unsaved.foreach { row => + map += (row.compositeId -> row) + } + unsaved.size + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/person/password/PasswordRepo.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/person/password/PasswordRepo.scala index bedde72dc6..23708ec438 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/person/password/PasswordRepo.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/person/password/PasswordRepo.scala @@ -30,4 +30,7 @@ trait PasswordRepo { def update: UpdateBuilder[PasswordFields, PasswordRow] def update(row: PasswordRow)(implicit c: Connection): Boolean def upsert(unsaved: PasswordRow)(implicit c: Connection): PasswordRow + def upsertBatch(unsaved: Iterable[PasswordRow])(implicit c: Connection): List[PasswordRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Iterator[PasswordRow], batchSize: Int = 10000)(implicit c: Connection): Int } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/person/password/PasswordRepoImpl.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/person/password/PasswordRepoImpl.scala index e5a1c45df6..b7f7bd72b7 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/person/password/PasswordRepoImpl.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/person/password/PasswordRepoImpl.scala @@ -11,6 +11,7 @@ import adventureworks.customtypes.Defaulted import adventureworks.customtypes.TypoLocalDateTime import adventureworks.customtypes.TypoUUID import adventureworks.person.businessentity.BusinessentityId +import anorm.BatchSql import anorm.NamedParameter import anorm.ParameterValue import anorm.RowParser @@ -19,6 +20,7 @@ import anorm.SimpleSql import anorm.SqlStringInterpolation import anorm.ToStatement import java.sql.Connection +import scala.annotation.nowarn import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder import typo.dsl.SelectBuilderSql @@ -141,4 +143,48 @@ class PasswordRepoImpl extends PasswordRepo { .executeInsert(PasswordRow.rowParser(1).single) } + override def upsertBatch(unsaved: Iterable[PasswordRow])(implicit c: Connection): List[PasswordRow] = { + def toNamedParameter(row: PasswordRow): List[NamedParameter] = List( + NamedParameter("businessentityid", ParameterValue(row.businessentityid, null, BusinessentityId.toStatement)), + NamedParameter("passwordhash", ParameterValue(row.passwordhash, null, ToStatement.stringToStatement)), + NamedParameter("passwordsalt", ParameterValue(row.passwordsalt, null, ToStatement.stringToStatement)), + NamedParameter("rowguid", ParameterValue(row.rowguid, null, TypoUUID.toStatement)), + NamedParameter("modifieddate", ParameterValue(row.modifieddate, null, TypoLocalDateTime.toStatement)) + ) + unsaved.toList match { + case Nil => Nil + case head :: rest => + new anorm.adventureworks.ExecuteReturningSyntax.Ops( + BatchSql( + s"""insert into person.password("businessentityid", "passwordhash", "passwordsalt", "rowguid", "modifieddate") + values ({businessentityid}::int4, {passwordhash}, {passwordsalt}, {rowguid}::uuid, {modifieddate}::timestamp) + on conflict ("businessentityid") + do update set + "passwordhash" = EXCLUDED."passwordhash", + "passwordsalt" = EXCLUDED."passwordsalt", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + returning "businessentityid", "passwordhash", "passwordsalt", "rowguid", "modifieddate"::text + """, + toNamedParameter(head), + rest.map(toNamedParameter)* + ) + ).executeReturning(PasswordRow.rowParser(1).*) + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[PasswordRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + SQL"create temporary table password_TEMP (like person.password) on commit drop".execute(): @nowarn + streamingInsert(s"""copy password_TEMP("businessentityid", "passwordhash", "passwordsalt", "rowguid", "modifieddate") from stdin""", batchSize, unsaved)(PasswordRow.text, c): @nowarn + SQL"""insert into person.password("businessentityid", "passwordhash", "passwordsalt", "rowguid", "modifieddate") + select * from password_TEMP + on conflict ("businessentityid") + do update set + "passwordhash" = EXCLUDED."passwordhash", + "passwordsalt" = EXCLUDED."passwordsalt", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table password_TEMP;""".executeUpdate() + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/person/password/PasswordRepoMock.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/person/password/PasswordRepoMock.scala index 0a5634f39b..c4a75b3b10 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/person/password/PasswordRepoMock.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/person/password/PasswordRepoMock.scala @@ -88,4 +88,17 @@ class PasswordRepoMock(toRow: Function1[PasswordRowUnsaved, PasswordRow], map.put(unsaved.businessentityid, unsaved): @nowarn unsaved } + override def upsertBatch(unsaved: Iterable[PasswordRow])(implicit c: Connection): List[PasswordRow] = { + unsaved.map { row => + map += (row.businessentityid -> row) + row + }.toList + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[PasswordRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + unsaved.foreach { row => + map += (row.businessentityid -> row) + } + unsaved.size + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/person/person/PersonRepo.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/person/person/PersonRepo.scala index 9dbec66538..cb6c61d729 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/person/person/PersonRepo.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/person/person/PersonRepo.scala @@ -30,4 +30,7 @@ trait PersonRepo { def update: UpdateBuilder[PersonFields, PersonRow] def update(row: PersonRow)(implicit c: Connection): Boolean def upsert(unsaved: PersonRow)(implicit c: Connection): PersonRow + def upsertBatch(unsaved: Iterable[PersonRow])(implicit c: Connection): List[PersonRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Iterator[PersonRow], batchSize: Int = 10000)(implicit c: Connection): Int } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/person/person/PersonRepoImpl.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/person/person/PersonRepoImpl.scala index 940ca52253..391584271f 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/person/person/PersonRepoImpl.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/person/person/PersonRepoImpl.scala @@ -15,6 +15,7 @@ import adventureworks.person.businessentity.BusinessentityId import adventureworks.public.Name import adventureworks.public.NameStyle import adventureworks.userdefined.FirstName +import anorm.BatchSql import anorm.NamedParameter import anorm.ParameterMetaData import anorm.ParameterValue @@ -24,6 +25,7 @@ import anorm.SimpleSql import anorm.SqlStringInterpolation import anorm.ToStatement import java.sql.Connection +import scala.annotation.nowarn import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder import typo.dsl.SelectBuilderSql @@ -184,4 +186,72 @@ class PersonRepoImpl extends PersonRepo { .executeInsert(PersonRow.rowParser(1).single) } + override def upsertBatch(unsaved: Iterable[PersonRow])(implicit c: Connection): List[PersonRow] = { + def toNamedParameter(row: PersonRow): List[NamedParameter] = List( + NamedParameter("businessentityid", ParameterValue(row.businessentityid, null, BusinessentityId.toStatement)), + NamedParameter("persontype", ParameterValue(row.persontype, null, ToStatement.stringToStatement)), + NamedParameter("namestyle", ParameterValue(row.namestyle, null, NameStyle.toStatement)), + NamedParameter("title", ParameterValue(row.title, null, ToStatement.optionToStatement(ToStatement.stringToStatement, ParameterMetaData.StringParameterMetaData))), + NamedParameter("firstname", ParameterValue(row.firstname, null, /* user-picked */ FirstName.toStatement)), + NamedParameter("middlename", ParameterValue(row.middlename, null, ToStatement.optionToStatement(Name.toStatement, Name.parameterMetadata))), + NamedParameter("lastname", ParameterValue(row.lastname, null, Name.toStatement)), + NamedParameter("suffix", ParameterValue(row.suffix, null, ToStatement.optionToStatement(ToStatement.stringToStatement, ParameterMetaData.StringParameterMetaData))), + NamedParameter("emailpromotion", ParameterValue(row.emailpromotion, null, ToStatement.intToStatement)), + NamedParameter("additionalcontactinfo", ParameterValue(row.additionalcontactinfo, null, ToStatement.optionToStatement(TypoXml.toStatement, TypoXml.parameterMetadata))), + NamedParameter("demographics", ParameterValue(row.demographics, null, ToStatement.optionToStatement(TypoXml.toStatement, TypoXml.parameterMetadata))), + NamedParameter("rowguid", ParameterValue(row.rowguid, null, TypoUUID.toStatement)), + NamedParameter("modifieddate", ParameterValue(row.modifieddate, null, TypoLocalDateTime.toStatement)) + ) + unsaved.toList match { + case Nil => Nil + case head :: rest => + new anorm.adventureworks.ExecuteReturningSyntax.Ops( + BatchSql( + s"""insert into person.person("businessentityid", "persontype", "namestyle", "title", "firstname", "middlename", "lastname", "suffix", "emailpromotion", "additionalcontactinfo", "demographics", "rowguid", "modifieddate") + values ({businessentityid}::int4, {persontype}::bpchar, {namestyle}::bool, {title}, {firstname}::varchar, {middlename}::varchar, {lastname}::varchar, {suffix}, {emailpromotion}::int4, {additionalcontactinfo}::xml, {demographics}::xml, {rowguid}::uuid, {modifieddate}::timestamp) + on conflict ("businessentityid") + do update set + "persontype" = EXCLUDED."persontype", + "namestyle" = EXCLUDED."namestyle", + "title" = EXCLUDED."title", + "firstname" = EXCLUDED."firstname", + "middlename" = EXCLUDED."middlename", + "lastname" = EXCLUDED."lastname", + "suffix" = EXCLUDED."suffix", + "emailpromotion" = EXCLUDED."emailpromotion", + "additionalcontactinfo" = EXCLUDED."additionalcontactinfo", + "demographics" = EXCLUDED."demographics", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + returning "businessentityid", "persontype", "namestyle", "title", "firstname", "middlename", "lastname", "suffix", "emailpromotion", "additionalcontactinfo", "demographics", "rowguid", "modifieddate"::text + """, + toNamedParameter(head), + rest.map(toNamedParameter)* + ) + ).executeReturning(PersonRow.rowParser(1).*) + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[PersonRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + SQL"create temporary table person_TEMP (like person.person) on commit drop".execute(): @nowarn + streamingInsert(s"""copy person_TEMP("businessentityid", "persontype", "namestyle", "title", "firstname", "middlename", "lastname", "suffix", "emailpromotion", "additionalcontactinfo", "demographics", "rowguid", "modifieddate") from stdin""", batchSize, unsaved)(PersonRow.text, c): @nowarn + SQL"""insert into person.person("businessentityid", "persontype", "namestyle", "title", "firstname", "middlename", "lastname", "suffix", "emailpromotion", "additionalcontactinfo", "demographics", "rowguid", "modifieddate") + select * from person_TEMP + on conflict ("businessentityid") + do update set + "persontype" = EXCLUDED."persontype", + "namestyle" = EXCLUDED."namestyle", + "title" = EXCLUDED."title", + "firstname" = EXCLUDED."firstname", + "middlename" = EXCLUDED."middlename", + "lastname" = EXCLUDED."lastname", + "suffix" = EXCLUDED."suffix", + "emailpromotion" = EXCLUDED."emailpromotion", + "additionalcontactinfo" = EXCLUDED."additionalcontactinfo", + "demographics" = EXCLUDED."demographics", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table person_TEMP;""".executeUpdate() + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/person/person/PersonRepoMock.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/person/person/PersonRepoMock.scala index ab4d398f22..da4f66c04a 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/person/person/PersonRepoMock.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/person/person/PersonRepoMock.scala @@ -88,4 +88,17 @@ class PersonRepoMock(toRow: Function1[PersonRowUnsaved, PersonRow], map.put(unsaved.businessentityid, unsaved): @nowarn unsaved } + override def upsertBatch(unsaved: Iterable[PersonRow])(implicit c: Connection): List[PersonRow] = { + unsaved.map { row => + map += (row.businessentityid -> row) + row + }.toList + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[PersonRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + unsaved.foreach { row => + map += (row.businessentityid -> row) + } + unsaved.size + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/person/personphone/PersonphoneRepo.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/person/personphone/PersonphoneRepo.scala index e1ef04026b..742f731789 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/person/personphone/PersonphoneRepo.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/person/personphone/PersonphoneRepo.scala @@ -29,4 +29,7 @@ trait PersonphoneRepo { def update: UpdateBuilder[PersonphoneFields, PersonphoneRow] def update(row: PersonphoneRow)(implicit c: Connection): Boolean def upsert(unsaved: PersonphoneRow)(implicit c: Connection): PersonphoneRow + def upsertBatch(unsaved: Iterable[PersonphoneRow])(implicit c: Connection): List[PersonphoneRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Iterator[PersonphoneRow], batchSize: Int = 10000)(implicit c: Connection): Int } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/person/personphone/PersonphoneRepoImpl.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/person/personphone/PersonphoneRepoImpl.scala index c2afac90ff..aec325a325 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/person/personphone/PersonphoneRepoImpl.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/person/personphone/PersonphoneRepoImpl.scala @@ -12,6 +12,7 @@ import adventureworks.customtypes.TypoLocalDateTime import adventureworks.person.businessentity.BusinessentityId import adventureworks.person.phonenumbertype.PhonenumbertypeId import adventureworks.public.Phone +import anorm.BatchSql import anorm.NamedParameter import anorm.ParameterValue import anorm.RowParser @@ -19,6 +20,7 @@ import anorm.SQL import anorm.SimpleSql import anorm.SqlStringInterpolation import java.sql.Connection +import scala.annotation.nowarn import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder import typo.dsl.SelectBuilderSql @@ -138,4 +140,41 @@ class PersonphoneRepoImpl extends PersonphoneRepo { .executeInsert(PersonphoneRow.rowParser(1).single) } + override def upsertBatch(unsaved: Iterable[PersonphoneRow])(implicit c: Connection): List[PersonphoneRow] = { + def toNamedParameter(row: PersonphoneRow): List[NamedParameter] = List( + NamedParameter("businessentityid", ParameterValue(row.businessentityid, null, BusinessentityId.toStatement)), + NamedParameter("phonenumber", ParameterValue(row.phonenumber, null, Phone.toStatement)), + NamedParameter("phonenumbertypeid", ParameterValue(row.phonenumbertypeid, null, PhonenumbertypeId.toStatement)), + NamedParameter("modifieddate", ParameterValue(row.modifieddate, null, TypoLocalDateTime.toStatement)) + ) + unsaved.toList match { + case Nil => Nil + case head :: rest => + new anorm.adventureworks.ExecuteReturningSyntax.Ops( + BatchSql( + s"""insert into person.personphone("businessentityid", "phonenumber", "phonenumbertypeid", "modifieddate") + values ({businessentityid}::int4, {phonenumber}::varchar, {phonenumbertypeid}::int4, {modifieddate}::timestamp) + on conflict ("businessentityid", "phonenumber", "phonenumbertypeid") + do update set + "modifieddate" = EXCLUDED."modifieddate" + returning "businessentityid", "phonenumber", "phonenumbertypeid", "modifieddate"::text + """, + toNamedParameter(head), + rest.map(toNamedParameter)* + ) + ).executeReturning(PersonphoneRow.rowParser(1).*) + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[PersonphoneRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + SQL"create temporary table personphone_TEMP (like person.personphone) on commit drop".execute(): @nowarn + streamingInsert(s"""copy personphone_TEMP("businessentityid", "phonenumber", "phonenumbertypeid", "modifieddate") from stdin""", batchSize, unsaved)(PersonphoneRow.text, c): @nowarn + SQL"""insert into person.personphone("businessentityid", "phonenumber", "phonenumbertypeid", "modifieddate") + select * from personphone_TEMP + on conflict ("businessentityid", "phonenumber", "phonenumbertypeid") + do update set + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table personphone_TEMP;""".executeUpdate() + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/person/personphone/PersonphoneRepoMock.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/person/personphone/PersonphoneRepoMock.scala index 92853f8132..ed12d4df39 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/person/personphone/PersonphoneRepoMock.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/person/personphone/PersonphoneRepoMock.scala @@ -87,4 +87,17 @@ class PersonphoneRepoMock(toRow: Function1[PersonphoneRowUnsaved, PersonphoneRow map.put(unsaved.compositeId, unsaved): @nowarn unsaved } + override def upsertBatch(unsaved: Iterable[PersonphoneRow])(implicit c: Connection): List[PersonphoneRow] = { + unsaved.map { row => + map += (row.compositeId -> row) + row + }.toList + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[PersonphoneRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + unsaved.foreach { row => + map += (row.compositeId -> row) + } + unsaved.size + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/person/phonenumbertype/PhonenumbertypeRepo.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/person/phonenumbertype/PhonenumbertypeRepo.scala index 0be2c2c174..1d49582a94 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/person/phonenumbertype/PhonenumbertypeRepo.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/person/phonenumbertype/PhonenumbertypeRepo.scala @@ -29,4 +29,7 @@ trait PhonenumbertypeRepo { def update: UpdateBuilder[PhonenumbertypeFields, PhonenumbertypeRow] def update(row: PhonenumbertypeRow)(implicit c: Connection): Boolean def upsert(unsaved: PhonenumbertypeRow)(implicit c: Connection): PhonenumbertypeRow + def upsertBatch(unsaved: Iterable[PhonenumbertypeRow])(implicit c: Connection): List[PhonenumbertypeRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Iterator[PhonenumbertypeRow], batchSize: Int = 10000)(implicit c: Connection): Int } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/person/phonenumbertype/PhonenumbertypeRepoImpl.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/person/phonenumbertype/PhonenumbertypeRepoImpl.scala index 065ac42cc4..988e4cbd4e 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/person/phonenumbertype/PhonenumbertypeRepoImpl.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/person/phonenumbertype/PhonenumbertypeRepoImpl.scala @@ -10,6 +10,7 @@ package phonenumbertype import adventureworks.customtypes.Defaulted import adventureworks.customtypes.TypoLocalDateTime import adventureworks.public.Name +import anorm.BatchSql import anorm.NamedParameter import anorm.ParameterValue import anorm.RowParser @@ -17,6 +18,7 @@ import anorm.SQL import anorm.SimpleSql import anorm.SqlStringInterpolation import java.sql.Connection +import scala.annotation.nowarn import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder import typo.dsl.SelectBuilderSql @@ -131,4 +133,42 @@ class PhonenumbertypeRepoImpl extends PhonenumbertypeRepo { .executeInsert(PhonenumbertypeRow.rowParser(1).single) } + override def upsertBatch(unsaved: Iterable[PhonenumbertypeRow])(implicit c: Connection): List[PhonenumbertypeRow] = { + def toNamedParameter(row: PhonenumbertypeRow): List[NamedParameter] = List( + NamedParameter("phonenumbertypeid", ParameterValue(row.phonenumbertypeid, null, PhonenumbertypeId.toStatement)), + NamedParameter("name", ParameterValue(row.name, null, Name.toStatement)), + NamedParameter("modifieddate", ParameterValue(row.modifieddate, null, TypoLocalDateTime.toStatement)) + ) + unsaved.toList match { + case Nil => Nil + case head :: rest => + new anorm.adventureworks.ExecuteReturningSyntax.Ops( + BatchSql( + s"""insert into person.phonenumbertype("phonenumbertypeid", "name", "modifieddate") + values ({phonenumbertypeid}::int4, {name}::varchar, {modifieddate}::timestamp) + on conflict ("phonenumbertypeid") + do update set + "name" = EXCLUDED."name", + "modifieddate" = EXCLUDED."modifieddate" + returning "phonenumbertypeid", "name", "modifieddate"::text + """, + toNamedParameter(head), + rest.map(toNamedParameter)* + ) + ).executeReturning(PhonenumbertypeRow.rowParser(1).*) + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[PhonenumbertypeRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + SQL"create temporary table phonenumbertype_TEMP (like person.phonenumbertype) on commit drop".execute(): @nowarn + streamingInsert(s"""copy phonenumbertype_TEMP("phonenumbertypeid", "name", "modifieddate") from stdin""", batchSize, unsaved)(PhonenumbertypeRow.text, c): @nowarn + SQL"""insert into person.phonenumbertype("phonenumbertypeid", "name", "modifieddate") + select * from phonenumbertype_TEMP + on conflict ("phonenumbertypeid") + do update set + "name" = EXCLUDED."name", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table phonenumbertype_TEMP;""".executeUpdate() + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/person/phonenumbertype/PhonenumbertypeRepoMock.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/person/phonenumbertype/PhonenumbertypeRepoMock.scala index eb0b166c68..3b0853dd1b 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/person/phonenumbertype/PhonenumbertypeRepoMock.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/person/phonenumbertype/PhonenumbertypeRepoMock.scala @@ -87,4 +87,17 @@ class PhonenumbertypeRepoMock(toRow: Function1[PhonenumbertypeRowUnsaved, Phonen map.put(unsaved.phonenumbertypeid, unsaved): @nowarn unsaved } + override def upsertBatch(unsaved: Iterable[PhonenumbertypeRow])(implicit c: Connection): List[PhonenumbertypeRow] = { + unsaved.map { row => + map += (row.phonenumbertypeid -> row) + row + }.toList + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[PhonenumbertypeRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + unsaved.foreach { row => + map += (row.phonenumbertypeid -> row) + } + unsaved.size + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/person/stateprovince/StateprovinceRepo.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/person/stateprovince/StateprovinceRepo.scala index 279c3c91af..b9d836a6ec 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/person/stateprovince/StateprovinceRepo.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/person/stateprovince/StateprovinceRepo.scala @@ -29,4 +29,7 @@ trait StateprovinceRepo { def update: UpdateBuilder[StateprovinceFields, StateprovinceRow] def update(row: StateprovinceRow)(implicit c: Connection): Boolean def upsert(unsaved: StateprovinceRow)(implicit c: Connection): StateprovinceRow + def upsertBatch(unsaved: Iterable[StateprovinceRow])(implicit c: Connection): List[StateprovinceRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Iterator[StateprovinceRow], batchSize: Int = 10000)(implicit c: Connection): Int } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/person/stateprovince/StateprovinceRepoImpl.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/person/stateprovince/StateprovinceRepoImpl.scala index 228f4abf12..ac856af52c 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/person/stateprovince/StateprovinceRepoImpl.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/person/stateprovince/StateprovinceRepoImpl.scala @@ -14,6 +14,7 @@ import adventureworks.person.countryregion.CountryregionId import adventureworks.public.Flag import adventureworks.public.Name import adventureworks.sales.salesterritory.SalesterritoryId +import anorm.BatchSql import anorm.NamedParameter import anorm.ParameterValue import anorm.RowParser @@ -22,6 +23,7 @@ import anorm.SimpleSql import anorm.SqlStringInterpolation import anorm.ToStatement import java.sql.Connection +import scala.annotation.nowarn import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder import typo.dsl.SelectBuilderSql @@ -162,4 +164,57 @@ class StateprovinceRepoImpl extends StateprovinceRepo { .executeInsert(StateprovinceRow.rowParser(1).single) } + override def upsertBatch(unsaved: Iterable[StateprovinceRow])(implicit c: Connection): List[StateprovinceRow] = { + def toNamedParameter(row: StateprovinceRow): List[NamedParameter] = List( + NamedParameter("stateprovinceid", ParameterValue(row.stateprovinceid, null, StateprovinceId.toStatement)), + NamedParameter("stateprovincecode", ParameterValue(row.stateprovincecode, null, ToStatement.stringToStatement)), + NamedParameter("countryregioncode", ParameterValue(row.countryregioncode, null, CountryregionId.toStatement)), + NamedParameter("isonlystateprovinceflag", ParameterValue(row.isonlystateprovinceflag, null, Flag.toStatement)), + NamedParameter("name", ParameterValue(row.name, null, Name.toStatement)), + NamedParameter("territoryid", ParameterValue(row.territoryid, null, SalesterritoryId.toStatement)), + NamedParameter("rowguid", ParameterValue(row.rowguid, null, TypoUUID.toStatement)), + NamedParameter("modifieddate", ParameterValue(row.modifieddate, null, TypoLocalDateTime.toStatement)) + ) + unsaved.toList match { + case Nil => Nil + case head :: rest => + new anorm.adventureworks.ExecuteReturningSyntax.Ops( + BatchSql( + s"""insert into person.stateprovince("stateprovinceid", "stateprovincecode", "countryregioncode", "isonlystateprovinceflag", "name", "territoryid", "rowguid", "modifieddate") + values ({stateprovinceid}::int4, {stateprovincecode}::bpchar, {countryregioncode}, {isonlystateprovinceflag}::bool, {name}::varchar, {territoryid}::int4, {rowguid}::uuid, {modifieddate}::timestamp) + on conflict ("stateprovinceid") + do update set + "stateprovincecode" = EXCLUDED."stateprovincecode", + "countryregioncode" = EXCLUDED."countryregioncode", + "isonlystateprovinceflag" = EXCLUDED."isonlystateprovinceflag", + "name" = EXCLUDED."name", + "territoryid" = EXCLUDED."territoryid", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + returning "stateprovinceid", "stateprovincecode", "countryregioncode", "isonlystateprovinceflag", "name", "territoryid", "rowguid", "modifieddate"::text + """, + toNamedParameter(head), + rest.map(toNamedParameter)* + ) + ).executeReturning(StateprovinceRow.rowParser(1).*) + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[StateprovinceRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + SQL"create temporary table stateprovince_TEMP (like person.stateprovince) on commit drop".execute(): @nowarn + streamingInsert(s"""copy stateprovince_TEMP("stateprovinceid", "stateprovincecode", "countryregioncode", "isonlystateprovinceflag", "name", "territoryid", "rowguid", "modifieddate") from stdin""", batchSize, unsaved)(StateprovinceRow.text, c): @nowarn + SQL"""insert into person.stateprovince("stateprovinceid", "stateprovincecode", "countryregioncode", "isonlystateprovinceflag", "name", "territoryid", "rowguid", "modifieddate") + select * from stateprovince_TEMP + on conflict ("stateprovinceid") + do update set + "stateprovincecode" = EXCLUDED."stateprovincecode", + "countryregioncode" = EXCLUDED."countryregioncode", + "isonlystateprovinceflag" = EXCLUDED."isonlystateprovinceflag", + "name" = EXCLUDED."name", + "territoryid" = EXCLUDED."territoryid", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table stateprovince_TEMP;""".executeUpdate() + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/person/stateprovince/StateprovinceRepoMock.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/person/stateprovince/StateprovinceRepoMock.scala index d7b8f962a1..ad2e4cb12f 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/person/stateprovince/StateprovinceRepoMock.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/person/stateprovince/StateprovinceRepoMock.scala @@ -87,4 +87,17 @@ class StateprovinceRepoMock(toRow: Function1[StateprovinceRowUnsaved, Stateprovi map.put(unsaved.stateprovinceid, unsaved): @nowarn unsaved } + override def upsertBatch(unsaved: Iterable[StateprovinceRow])(implicit c: Connection): List[StateprovinceRow] = { + unsaved.map { row => + map += (row.stateprovinceid -> row) + row + }.toList + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[StateprovinceRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + unsaved.foreach { row => + map += (row.stateprovinceid -> row) + } + unsaved.size + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/billofmaterials/BillofmaterialsRepo.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/billofmaterials/BillofmaterialsRepo.scala index 560fea6a75..e7d39a783c 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/billofmaterials/BillofmaterialsRepo.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/billofmaterials/BillofmaterialsRepo.scala @@ -29,4 +29,7 @@ trait BillofmaterialsRepo { def update: UpdateBuilder[BillofmaterialsFields, BillofmaterialsRow] def update(row: BillofmaterialsRow)(implicit c: Connection): Boolean def upsert(unsaved: BillofmaterialsRow)(implicit c: Connection): BillofmaterialsRow + def upsertBatch(unsaved: Iterable[BillofmaterialsRow])(implicit c: Connection): List[BillofmaterialsRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Iterator[BillofmaterialsRow], batchSize: Int = 10000)(implicit c: Connection): Int } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/billofmaterials/BillofmaterialsRepoImpl.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/billofmaterials/BillofmaterialsRepoImpl.scala index a8c84e1f55..7ab3b068cb 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/billofmaterials/BillofmaterialsRepoImpl.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/billofmaterials/BillofmaterialsRepoImpl.scala @@ -12,6 +12,7 @@ import adventureworks.customtypes.TypoLocalDateTime import adventureworks.customtypes.TypoShort import adventureworks.production.product.ProductId import adventureworks.production.unitmeasure.UnitmeasureId +import anorm.BatchSql import anorm.NamedParameter import anorm.ParameterValue import anorm.RowParser @@ -20,6 +21,7 @@ import anorm.SimpleSql import anorm.SqlStringInterpolation import anorm.ToStatement import java.sql.Connection +import scala.annotation.nowarn import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder import typo.dsl.SelectBuilderSql @@ -164,4 +166,60 @@ class BillofmaterialsRepoImpl extends BillofmaterialsRepo { .executeInsert(BillofmaterialsRow.rowParser(1).single) } + override def upsertBatch(unsaved: Iterable[BillofmaterialsRow])(implicit c: Connection): List[BillofmaterialsRow] = { + def toNamedParameter(row: BillofmaterialsRow): List[NamedParameter] = List( + NamedParameter("billofmaterialsid", ParameterValue(row.billofmaterialsid, null, ToStatement.intToStatement)), + NamedParameter("productassemblyid", ParameterValue(row.productassemblyid, null, ToStatement.optionToStatement(ProductId.toStatement, ProductId.parameterMetadata))), + NamedParameter("componentid", ParameterValue(row.componentid, null, ProductId.toStatement)), + NamedParameter("startdate", ParameterValue(row.startdate, null, TypoLocalDateTime.toStatement)), + NamedParameter("enddate", ParameterValue(row.enddate, null, ToStatement.optionToStatement(TypoLocalDateTime.toStatement, TypoLocalDateTime.parameterMetadata))), + NamedParameter("unitmeasurecode", ParameterValue(row.unitmeasurecode, null, UnitmeasureId.toStatement)), + NamedParameter("bomlevel", ParameterValue(row.bomlevel, null, TypoShort.toStatement)), + NamedParameter("perassemblyqty", ParameterValue(row.perassemblyqty, null, ToStatement.scalaBigDecimalToStatement)), + NamedParameter("modifieddate", ParameterValue(row.modifieddate, null, TypoLocalDateTime.toStatement)) + ) + unsaved.toList match { + case Nil => Nil + case head :: rest => + new anorm.adventureworks.ExecuteReturningSyntax.Ops( + BatchSql( + s"""insert into production.billofmaterials("billofmaterialsid", "productassemblyid", "componentid", "startdate", "enddate", "unitmeasurecode", "bomlevel", "perassemblyqty", "modifieddate") + values ({billofmaterialsid}::int4, {productassemblyid}::int4, {componentid}::int4, {startdate}::timestamp, {enddate}::timestamp, {unitmeasurecode}::bpchar, {bomlevel}::int2, {perassemblyqty}::numeric, {modifieddate}::timestamp) + on conflict ("billofmaterialsid") + do update set + "productassemblyid" = EXCLUDED."productassemblyid", + "componentid" = EXCLUDED."componentid", + "startdate" = EXCLUDED."startdate", + "enddate" = EXCLUDED."enddate", + "unitmeasurecode" = EXCLUDED."unitmeasurecode", + "bomlevel" = EXCLUDED."bomlevel", + "perassemblyqty" = EXCLUDED."perassemblyqty", + "modifieddate" = EXCLUDED."modifieddate" + returning "billofmaterialsid", "productassemblyid", "componentid", "startdate"::text, "enddate"::text, "unitmeasurecode", "bomlevel", "perassemblyqty", "modifieddate"::text + """, + toNamedParameter(head), + rest.map(toNamedParameter)* + ) + ).executeReturning(BillofmaterialsRow.rowParser(1).*) + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[BillofmaterialsRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + SQL"create temporary table billofmaterials_TEMP (like production.billofmaterials) on commit drop".execute(): @nowarn + streamingInsert(s"""copy billofmaterials_TEMP("billofmaterialsid", "productassemblyid", "componentid", "startdate", "enddate", "unitmeasurecode", "bomlevel", "perassemblyqty", "modifieddate") from stdin""", batchSize, unsaved)(BillofmaterialsRow.text, c): @nowarn + SQL"""insert into production.billofmaterials("billofmaterialsid", "productassemblyid", "componentid", "startdate", "enddate", "unitmeasurecode", "bomlevel", "perassemblyqty", "modifieddate") + select * from billofmaterials_TEMP + on conflict ("billofmaterialsid") + do update set + "productassemblyid" = EXCLUDED."productassemblyid", + "componentid" = EXCLUDED."componentid", + "startdate" = EXCLUDED."startdate", + "enddate" = EXCLUDED."enddate", + "unitmeasurecode" = EXCLUDED."unitmeasurecode", + "bomlevel" = EXCLUDED."bomlevel", + "perassemblyqty" = EXCLUDED."perassemblyqty", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table billofmaterials_TEMP;""".executeUpdate() + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/billofmaterials/BillofmaterialsRepoMock.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/billofmaterials/BillofmaterialsRepoMock.scala index 397e97f21e..a50cef1754 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/billofmaterials/BillofmaterialsRepoMock.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/billofmaterials/BillofmaterialsRepoMock.scala @@ -87,4 +87,17 @@ class BillofmaterialsRepoMock(toRow: Function1[BillofmaterialsRowUnsaved, Billof map.put(unsaved.billofmaterialsid, unsaved): @nowarn unsaved } + override def upsertBatch(unsaved: Iterable[BillofmaterialsRow])(implicit c: Connection): List[BillofmaterialsRow] = { + unsaved.map { row => + map += (row.billofmaterialsid -> row) + row + }.toList + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[BillofmaterialsRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + unsaved.foreach { row => + map += (row.billofmaterialsid -> row) + } + unsaved.size + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/culture/CultureRepo.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/culture/CultureRepo.scala index f1c7fcff17..cd42c61ca1 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/culture/CultureRepo.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/culture/CultureRepo.scala @@ -29,4 +29,7 @@ trait CultureRepo { def update: UpdateBuilder[CultureFields, CultureRow] def update(row: CultureRow)(implicit c: Connection): Boolean def upsert(unsaved: CultureRow)(implicit c: Connection): CultureRow + def upsertBatch(unsaved: Iterable[CultureRow])(implicit c: Connection): List[CultureRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Iterator[CultureRow], batchSize: Int = 10000)(implicit c: Connection): Int } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/culture/CultureRepoImpl.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/culture/CultureRepoImpl.scala index 168ccdcdc4..d414001c5d 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/culture/CultureRepoImpl.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/culture/CultureRepoImpl.scala @@ -10,6 +10,7 @@ package culture import adventureworks.customtypes.Defaulted import adventureworks.customtypes.TypoLocalDateTime import adventureworks.public.Name +import anorm.BatchSql import anorm.NamedParameter import anorm.ParameterValue import anorm.RowParser @@ -17,6 +18,7 @@ import anorm.SQL import anorm.SimpleSql import anorm.SqlStringInterpolation import java.sql.Connection +import scala.annotation.nowarn import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder import typo.dsl.SelectBuilderSql @@ -128,4 +130,42 @@ class CultureRepoImpl extends CultureRepo { .executeInsert(CultureRow.rowParser(1).single) } + override def upsertBatch(unsaved: Iterable[CultureRow])(implicit c: Connection): List[CultureRow] = { + def toNamedParameter(row: CultureRow): List[NamedParameter] = List( + NamedParameter("cultureid", ParameterValue(row.cultureid, null, CultureId.toStatement)), + NamedParameter("name", ParameterValue(row.name, null, Name.toStatement)), + NamedParameter("modifieddate", ParameterValue(row.modifieddate, null, TypoLocalDateTime.toStatement)) + ) + unsaved.toList match { + case Nil => Nil + case head :: rest => + new anorm.adventureworks.ExecuteReturningSyntax.Ops( + BatchSql( + s"""insert into production.culture("cultureid", "name", "modifieddate") + values ({cultureid}::bpchar, {name}::varchar, {modifieddate}::timestamp) + on conflict ("cultureid") + do update set + "name" = EXCLUDED."name", + "modifieddate" = EXCLUDED."modifieddate" + returning "cultureid", "name", "modifieddate"::text + """, + toNamedParameter(head), + rest.map(toNamedParameter)* + ) + ).executeReturning(CultureRow.rowParser(1).*) + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[CultureRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + SQL"create temporary table culture_TEMP (like production.culture) on commit drop".execute(): @nowarn + streamingInsert(s"""copy culture_TEMP("cultureid", "name", "modifieddate") from stdin""", batchSize, unsaved)(CultureRow.text, c): @nowarn + SQL"""insert into production.culture("cultureid", "name", "modifieddate") + select * from culture_TEMP + on conflict ("cultureid") + do update set + "name" = EXCLUDED."name", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table culture_TEMP;""".executeUpdate() + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/culture/CultureRepoMock.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/culture/CultureRepoMock.scala index 79c532d76d..8e36e29cb5 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/culture/CultureRepoMock.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/culture/CultureRepoMock.scala @@ -87,4 +87,17 @@ class CultureRepoMock(toRow: Function1[CultureRowUnsaved, CultureRow], map.put(unsaved.cultureid, unsaved): @nowarn unsaved } + override def upsertBatch(unsaved: Iterable[CultureRow])(implicit c: Connection): List[CultureRow] = { + unsaved.map { row => + map += (row.cultureid -> row) + row + }.toList + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[CultureRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + unsaved.foreach { row => + map += (row.cultureid -> row) + } + unsaved.size + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/document/DocumentRepo.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/document/DocumentRepo.scala index e11b65000f..95154b1b2a 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/document/DocumentRepo.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/document/DocumentRepo.scala @@ -31,4 +31,7 @@ trait DocumentRepo { def update: UpdateBuilder[DocumentFields, DocumentRow] def update(row: DocumentRow)(implicit c: Connection): Boolean def upsert(unsaved: DocumentRow)(implicit c: Connection): DocumentRow + def upsertBatch(unsaved: Iterable[DocumentRow])(implicit c: Connection): List[DocumentRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Iterator[DocumentRow], batchSize: Int = 10000)(implicit c: Connection): Int } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/document/DocumentRepoImpl.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/document/DocumentRepoImpl.scala index 130dfb5a07..3a832c4c61 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/document/DocumentRepoImpl.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/document/DocumentRepoImpl.scala @@ -14,6 +14,7 @@ import adventureworks.customtypes.TypoShort import adventureworks.customtypes.TypoUUID import adventureworks.person.businessentity.BusinessentityId import adventureworks.public.Flag +import anorm.BatchSql import anorm.NamedParameter import anorm.ParameterMetaData import anorm.ParameterValue @@ -23,6 +24,7 @@ import anorm.SimpleSql import anorm.SqlStringInterpolation import anorm.ToStatement import java.sql.Connection +import scala.annotation.nowarn import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder import typo.dsl.SelectBuilderSql @@ -193,4 +195,72 @@ class DocumentRepoImpl extends DocumentRepo { .executeInsert(DocumentRow.rowParser(1).single) } + override def upsertBatch(unsaved: Iterable[DocumentRow])(implicit c: Connection): List[DocumentRow] = { + def toNamedParameter(row: DocumentRow): List[NamedParameter] = List( + NamedParameter("title", ParameterValue(row.title, null, ToStatement.stringToStatement)), + NamedParameter("owner", ParameterValue(row.owner, null, BusinessentityId.toStatement)), + NamedParameter("folderflag", ParameterValue(row.folderflag, null, Flag.toStatement)), + NamedParameter("filename", ParameterValue(row.filename, null, ToStatement.stringToStatement)), + NamedParameter("fileextension", ParameterValue(row.fileextension, null, ToStatement.optionToStatement(ToStatement.stringToStatement, ParameterMetaData.StringParameterMetaData))), + NamedParameter("revision", ParameterValue(row.revision, null, ToStatement.stringToStatement)), + NamedParameter("changenumber", ParameterValue(row.changenumber, null, ToStatement.intToStatement)), + NamedParameter("status", ParameterValue(row.status, null, TypoShort.toStatement)), + NamedParameter("documentsummary", ParameterValue(row.documentsummary, null, ToStatement.optionToStatement(ToStatement.stringToStatement, ParameterMetaData.StringParameterMetaData))), + NamedParameter("document", ParameterValue(row.document, null, ToStatement.optionToStatement(TypoBytea.toStatement, TypoBytea.parameterMetadata))), + NamedParameter("rowguid", ParameterValue(row.rowguid, null, TypoUUID.toStatement)), + NamedParameter("modifieddate", ParameterValue(row.modifieddate, null, TypoLocalDateTime.toStatement)), + NamedParameter("documentnode", ParameterValue(row.documentnode, null, DocumentId.toStatement)) + ) + unsaved.toList match { + case Nil => Nil + case head :: rest => + new anorm.adventureworks.ExecuteReturningSyntax.Ops( + BatchSql( + s"""insert into production.document("title", "owner", "folderflag", "filename", "fileextension", "revision", "changenumber", "status", "documentsummary", "document", "rowguid", "modifieddate", "documentnode") + values ({title}, {owner}::int4, {folderflag}::bool, {filename}, {fileextension}, {revision}::bpchar, {changenumber}::int4, {status}::int2, {documentsummary}, {document}::bytea, {rowguid}::uuid, {modifieddate}::timestamp, {documentnode}) + on conflict ("documentnode") + do update set + "title" = EXCLUDED."title", + "owner" = EXCLUDED."owner", + "folderflag" = EXCLUDED."folderflag", + "filename" = EXCLUDED."filename", + "fileextension" = EXCLUDED."fileextension", + "revision" = EXCLUDED."revision", + "changenumber" = EXCLUDED."changenumber", + "status" = EXCLUDED."status", + "documentsummary" = EXCLUDED."documentsummary", + "document" = EXCLUDED."document", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + returning "title", "owner", "folderflag", "filename", "fileextension", "revision", "changenumber", "status", "documentsummary", "document", "rowguid", "modifieddate"::text, "documentnode" + """, + toNamedParameter(head), + rest.map(toNamedParameter)* + ) + ).executeReturning(DocumentRow.rowParser(1).*) + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[DocumentRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + SQL"create temporary table document_TEMP (like production.document) on commit drop".execute(): @nowarn + streamingInsert(s"""copy document_TEMP("title", "owner", "folderflag", "filename", "fileextension", "revision", "changenumber", "status", "documentsummary", "document", "rowguid", "modifieddate", "documentnode") from stdin""", batchSize, unsaved)(DocumentRow.text, c): @nowarn + SQL"""insert into production.document("title", "owner", "folderflag", "filename", "fileextension", "revision", "changenumber", "status", "documentsummary", "document", "rowguid", "modifieddate", "documentnode") + select * from document_TEMP + on conflict ("documentnode") + do update set + "title" = EXCLUDED."title", + "owner" = EXCLUDED."owner", + "folderflag" = EXCLUDED."folderflag", + "filename" = EXCLUDED."filename", + "fileextension" = EXCLUDED."fileextension", + "revision" = EXCLUDED."revision", + "changenumber" = EXCLUDED."changenumber", + "status" = EXCLUDED."status", + "documentsummary" = EXCLUDED."documentsummary", + "document" = EXCLUDED."document", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table document_TEMP;""".executeUpdate() + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/document/DocumentRepoMock.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/document/DocumentRepoMock.scala index 9f4d2ff643..ee6057ff8e 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/document/DocumentRepoMock.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/document/DocumentRepoMock.scala @@ -91,4 +91,17 @@ class DocumentRepoMock(toRow: Function1[DocumentRowUnsaved, DocumentRow], map.put(unsaved.documentnode, unsaved): @nowarn unsaved } + override def upsertBatch(unsaved: Iterable[DocumentRow])(implicit c: Connection): List[DocumentRow] = { + unsaved.map { row => + map += (row.documentnode -> row) + row + }.toList + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[DocumentRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + unsaved.foreach { row => + map += (row.documentnode -> row) + } + unsaved.size + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/illustration/IllustrationRepo.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/illustration/IllustrationRepo.scala index 6c0210c763..9f4506d3c9 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/illustration/IllustrationRepo.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/illustration/IllustrationRepo.scala @@ -29,4 +29,7 @@ trait IllustrationRepo { def update: UpdateBuilder[IllustrationFields, IllustrationRow] def update(row: IllustrationRow)(implicit c: Connection): Boolean def upsert(unsaved: IllustrationRow)(implicit c: Connection): IllustrationRow + def upsertBatch(unsaved: Iterable[IllustrationRow])(implicit c: Connection): List[IllustrationRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Iterator[IllustrationRow], batchSize: Int = 10000)(implicit c: Connection): Int } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/illustration/IllustrationRepoImpl.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/illustration/IllustrationRepoImpl.scala index e366c777a5..1808e94d9e 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/illustration/IllustrationRepoImpl.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/illustration/IllustrationRepoImpl.scala @@ -10,6 +10,7 @@ package illustration import adventureworks.customtypes.Defaulted import adventureworks.customtypes.TypoLocalDateTime import adventureworks.customtypes.TypoXml +import anorm.BatchSql import anorm.NamedParameter import anorm.ParameterValue import anorm.RowParser @@ -18,6 +19,7 @@ import anorm.SimpleSql import anorm.SqlStringInterpolation import anorm.ToStatement import java.sql.Connection +import scala.annotation.nowarn import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder import typo.dsl.SelectBuilderSql @@ -132,4 +134,42 @@ class IllustrationRepoImpl extends IllustrationRepo { .executeInsert(IllustrationRow.rowParser(1).single) } + override def upsertBatch(unsaved: Iterable[IllustrationRow])(implicit c: Connection): List[IllustrationRow] = { + def toNamedParameter(row: IllustrationRow): List[NamedParameter] = List( + NamedParameter("illustrationid", ParameterValue(row.illustrationid, null, IllustrationId.toStatement)), + NamedParameter("diagram", ParameterValue(row.diagram, null, ToStatement.optionToStatement(TypoXml.toStatement, TypoXml.parameterMetadata))), + NamedParameter("modifieddate", ParameterValue(row.modifieddate, null, TypoLocalDateTime.toStatement)) + ) + unsaved.toList match { + case Nil => Nil + case head :: rest => + new anorm.adventureworks.ExecuteReturningSyntax.Ops( + BatchSql( + s"""insert into production.illustration("illustrationid", "diagram", "modifieddate") + values ({illustrationid}::int4, {diagram}::xml, {modifieddate}::timestamp) + on conflict ("illustrationid") + do update set + "diagram" = EXCLUDED."diagram", + "modifieddate" = EXCLUDED."modifieddate" + returning "illustrationid", "diagram", "modifieddate"::text + """, + toNamedParameter(head), + rest.map(toNamedParameter)* + ) + ).executeReturning(IllustrationRow.rowParser(1).*) + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[IllustrationRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + SQL"create temporary table illustration_TEMP (like production.illustration) on commit drop".execute(): @nowarn + streamingInsert(s"""copy illustration_TEMP("illustrationid", "diagram", "modifieddate") from stdin""", batchSize, unsaved)(IllustrationRow.text, c): @nowarn + SQL"""insert into production.illustration("illustrationid", "diagram", "modifieddate") + select * from illustration_TEMP + on conflict ("illustrationid") + do update set + "diagram" = EXCLUDED."diagram", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table illustration_TEMP;""".executeUpdate() + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/illustration/IllustrationRepoMock.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/illustration/IllustrationRepoMock.scala index 3ecbea1fae..6767884d6b 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/illustration/IllustrationRepoMock.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/illustration/IllustrationRepoMock.scala @@ -87,4 +87,17 @@ class IllustrationRepoMock(toRow: Function1[IllustrationRowUnsaved, Illustration map.put(unsaved.illustrationid, unsaved): @nowarn unsaved } + override def upsertBatch(unsaved: Iterable[IllustrationRow])(implicit c: Connection): List[IllustrationRow] = { + unsaved.map { row => + map += (row.illustrationid -> row) + row + }.toList + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[IllustrationRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + unsaved.foreach { row => + map += (row.illustrationid -> row) + } + unsaved.size + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/location/LocationRepo.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/location/LocationRepo.scala index 5a14375801..0a56204792 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/location/LocationRepo.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/location/LocationRepo.scala @@ -29,4 +29,7 @@ trait LocationRepo { def update: UpdateBuilder[LocationFields, LocationRow] def update(row: LocationRow)(implicit c: Connection): Boolean def upsert(unsaved: LocationRow)(implicit c: Connection): LocationRow + def upsertBatch(unsaved: Iterable[LocationRow])(implicit c: Connection): List[LocationRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Iterator[LocationRow], batchSize: Int = 10000)(implicit c: Connection): Int } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/location/LocationRepoImpl.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/location/LocationRepoImpl.scala index 431455ca8d..b5662c8282 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/location/LocationRepoImpl.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/location/LocationRepoImpl.scala @@ -10,6 +10,7 @@ package location import adventureworks.customtypes.Defaulted import adventureworks.customtypes.TypoLocalDateTime import adventureworks.public.Name +import anorm.BatchSql import anorm.NamedParameter import anorm.ParameterValue import anorm.RowParser @@ -18,6 +19,7 @@ import anorm.SimpleSql import anorm.SqlStringInterpolation import anorm.ToStatement import java.sql.Connection +import scala.annotation.nowarn import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder import typo.dsl.SelectBuilderSql @@ -146,4 +148,48 @@ class LocationRepoImpl extends LocationRepo { .executeInsert(LocationRow.rowParser(1).single) } + override def upsertBatch(unsaved: Iterable[LocationRow])(implicit c: Connection): List[LocationRow] = { + def toNamedParameter(row: LocationRow): List[NamedParameter] = List( + NamedParameter("locationid", ParameterValue(row.locationid, null, LocationId.toStatement)), + NamedParameter("name", ParameterValue(row.name, null, Name.toStatement)), + NamedParameter("costrate", ParameterValue(row.costrate, null, ToStatement.scalaBigDecimalToStatement)), + NamedParameter("availability", ParameterValue(row.availability, null, ToStatement.scalaBigDecimalToStatement)), + NamedParameter("modifieddate", ParameterValue(row.modifieddate, null, TypoLocalDateTime.toStatement)) + ) + unsaved.toList match { + case Nil => Nil + case head :: rest => + new anorm.adventureworks.ExecuteReturningSyntax.Ops( + BatchSql( + s"""insert into production.location("locationid", "name", "costrate", "availability", "modifieddate") + values ({locationid}::int4, {name}::varchar, {costrate}::numeric, {availability}::numeric, {modifieddate}::timestamp) + on conflict ("locationid") + do update set + "name" = EXCLUDED."name", + "costrate" = EXCLUDED."costrate", + "availability" = EXCLUDED."availability", + "modifieddate" = EXCLUDED."modifieddate" + returning "locationid", "name", "costrate", "availability", "modifieddate"::text + """, + toNamedParameter(head), + rest.map(toNamedParameter)* + ) + ).executeReturning(LocationRow.rowParser(1).*) + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[LocationRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + SQL"create temporary table location_TEMP (like production.location) on commit drop".execute(): @nowarn + streamingInsert(s"""copy location_TEMP("locationid", "name", "costrate", "availability", "modifieddate") from stdin""", batchSize, unsaved)(LocationRow.text, c): @nowarn + SQL"""insert into production.location("locationid", "name", "costrate", "availability", "modifieddate") + select * from location_TEMP + on conflict ("locationid") + do update set + "name" = EXCLUDED."name", + "costrate" = EXCLUDED."costrate", + "availability" = EXCLUDED."availability", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table location_TEMP;""".executeUpdate() + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/location/LocationRepoMock.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/location/LocationRepoMock.scala index da374be2c9..b0e7d5c87e 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/location/LocationRepoMock.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/location/LocationRepoMock.scala @@ -87,4 +87,17 @@ class LocationRepoMock(toRow: Function1[LocationRowUnsaved, LocationRow], map.put(unsaved.locationid, unsaved): @nowarn unsaved } + override def upsertBatch(unsaved: Iterable[LocationRow])(implicit c: Connection): List[LocationRow] = { + unsaved.map { row => + map += (row.locationid -> row) + row + }.toList + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[LocationRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + unsaved.foreach { row => + map += (row.locationid -> row) + } + unsaved.size + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/product/ProductRepo.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/product/ProductRepo.scala index e077e4b2de..ea99bb45f3 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/product/ProductRepo.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/product/ProductRepo.scala @@ -29,4 +29,7 @@ trait ProductRepo { def update: UpdateBuilder[ProductFields, ProductRow] def update(row: ProductRow)(implicit c: Connection): Boolean def upsert(unsaved: ProductRow)(implicit c: Connection): ProductRow + def upsertBatch(unsaved: Iterable[ProductRow])(implicit c: Connection): List[ProductRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Iterator[ProductRow], batchSize: Int = 10000)(implicit c: Connection): Int } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/product/ProductRepoImpl.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/product/ProductRepoImpl.scala index 12baefb389..40f07e7eb9 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/product/ProductRepoImpl.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/product/ProductRepoImpl.scala @@ -16,6 +16,7 @@ import adventureworks.production.productsubcategory.ProductsubcategoryId import adventureworks.production.unitmeasure.UnitmeasureId import adventureworks.public.Flag import adventureworks.public.Name +import anorm.BatchSql import anorm.NamedParameter import anorm.ParameterMetaData import anorm.ParameterValue @@ -25,6 +26,7 @@ import anorm.SimpleSql import anorm.SqlStringInterpolation import anorm.ToStatement import java.sql.Connection +import scala.annotation.nowarn import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder import typo.dsl.SelectBuilderSql @@ -236,4 +238,108 @@ class ProductRepoImpl extends ProductRepo { .executeInsert(ProductRow.rowParser(1).single) } + override def upsertBatch(unsaved: Iterable[ProductRow])(implicit c: Connection): List[ProductRow] = { + def toNamedParameter(row: ProductRow): List[NamedParameter] = List( + NamedParameter("productid", ParameterValue(row.productid, null, ProductId.toStatement)), + NamedParameter("name", ParameterValue(row.name, null, Name.toStatement)), + NamedParameter("productnumber", ParameterValue(row.productnumber, null, ToStatement.stringToStatement)), + NamedParameter("makeflag", ParameterValue(row.makeflag, null, Flag.toStatement)), + NamedParameter("finishedgoodsflag", ParameterValue(row.finishedgoodsflag, null, Flag.toStatement)), + NamedParameter("color", ParameterValue(row.color, null, ToStatement.optionToStatement(ToStatement.stringToStatement, ParameterMetaData.StringParameterMetaData))), + NamedParameter("safetystocklevel", ParameterValue(row.safetystocklevel, null, TypoShort.toStatement)), + NamedParameter("reorderpoint", ParameterValue(row.reorderpoint, null, TypoShort.toStatement)), + NamedParameter("standardcost", ParameterValue(row.standardcost, null, ToStatement.scalaBigDecimalToStatement)), + NamedParameter("listprice", ParameterValue(row.listprice, null, ToStatement.scalaBigDecimalToStatement)), + NamedParameter("size", ParameterValue(row.size, null, ToStatement.optionToStatement(ToStatement.stringToStatement, ParameterMetaData.StringParameterMetaData))), + NamedParameter("sizeunitmeasurecode", ParameterValue(row.sizeunitmeasurecode, null, ToStatement.optionToStatement(UnitmeasureId.toStatement, UnitmeasureId.parameterMetadata))), + NamedParameter("weightunitmeasurecode", ParameterValue(row.weightunitmeasurecode, null, ToStatement.optionToStatement(UnitmeasureId.toStatement, UnitmeasureId.parameterMetadata))), + NamedParameter("weight", ParameterValue(row.weight, null, ToStatement.optionToStatement(ToStatement.scalaBigDecimalToStatement, ParameterMetaData.BigDecimalParameterMetaData))), + NamedParameter("daystomanufacture", ParameterValue(row.daystomanufacture, null, ToStatement.intToStatement)), + NamedParameter("productline", ParameterValue(row.productline, null, ToStatement.optionToStatement(ToStatement.stringToStatement, ParameterMetaData.StringParameterMetaData))), + NamedParameter("class", ParameterValue(row.`class`, null, ToStatement.optionToStatement(ToStatement.stringToStatement, ParameterMetaData.StringParameterMetaData))), + NamedParameter("style", ParameterValue(row.style, null, ToStatement.optionToStatement(ToStatement.stringToStatement, ParameterMetaData.StringParameterMetaData))), + NamedParameter("productsubcategoryid", ParameterValue(row.productsubcategoryid, null, ToStatement.optionToStatement(ProductsubcategoryId.toStatement, ProductsubcategoryId.parameterMetadata))), + NamedParameter("productmodelid", ParameterValue(row.productmodelid, null, ToStatement.optionToStatement(ProductmodelId.toStatement, ProductmodelId.parameterMetadata))), + NamedParameter("sellstartdate", ParameterValue(row.sellstartdate, null, TypoLocalDateTime.toStatement)), + NamedParameter("sellenddate", ParameterValue(row.sellenddate, null, ToStatement.optionToStatement(TypoLocalDateTime.toStatement, TypoLocalDateTime.parameterMetadata))), + NamedParameter("discontinueddate", ParameterValue(row.discontinueddate, null, ToStatement.optionToStatement(TypoLocalDateTime.toStatement, TypoLocalDateTime.parameterMetadata))), + NamedParameter("rowguid", ParameterValue(row.rowguid, null, TypoUUID.toStatement)), + NamedParameter("modifieddate", ParameterValue(row.modifieddate, null, TypoLocalDateTime.toStatement)) + ) + unsaved.toList match { + case Nil => Nil + case head :: rest => + new anorm.adventureworks.ExecuteReturningSyntax.Ops( + BatchSql( + s"""insert into production.product("productid", "name", "productnumber", "makeflag", "finishedgoodsflag", "color", "safetystocklevel", "reorderpoint", "standardcost", "listprice", "size", "sizeunitmeasurecode", "weightunitmeasurecode", "weight", "daystomanufacture", "productline", "class", "style", "productsubcategoryid", "productmodelid", "sellstartdate", "sellenddate", "discontinueddate", "rowguid", "modifieddate") + values ({productid}::int4, {name}::varchar, {productnumber}, {makeflag}::bool, {finishedgoodsflag}::bool, {color}, {safetystocklevel}::int2, {reorderpoint}::int2, {standardcost}::numeric, {listprice}::numeric, {size}, {sizeunitmeasurecode}::bpchar, {weightunitmeasurecode}::bpchar, {weight}::numeric, {daystomanufacture}::int4, {productline}::bpchar, {class}::bpchar, {style}::bpchar, {productsubcategoryid}::int4, {productmodelid}::int4, {sellstartdate}::timestamp, {sellenddate}::timestamp, {discontinueddate}::timestamp, {rowguid}::uuid, {modifieddate}::timestamp) + on conflict ("productid") + do update set + "name" = EXCLUDED."name", + "productnumber" = EXCLUDED."productnumber", + "makeflag" = EXCLUDED."makeflag", + "finishedgoodsflag" = EXCLUDED."finishedgoodsflag", + "color" = EXCLUDED."color", + "safetystocklevel" = EXCLUDED."safetystocklevel", + "reorderpoint" = EXCLUDED."reorderpoint", + "standardcost" = EXCLUDED."standardcost", + "listprice" = EXCLUDED."listprice", + "size" = EXCLUDED."size", + "sizeunitmeasurecode" = EXCLUDED."sizeunitmeasurecode", + "weightunitmeasurecode" = EXCLUDED."weightunitmeasurecode", + "weight" = EXCLUDED."weight", + "daystomanufacture" = EXCLUDED."daystomanufacture", + "productline" = EXCLUDED."productline", + "class" = EXCLUDED."class", + "style" = EXCLUDED."style", + "productsubcategoryid" = EXCLUDED."productsubcategoryid", + "productmodelid" = EXCLUDED."productmodelid", + "sellstartdate" = EXCLUDED."sellstartdate", + "sellenddate" = EXCLUDED."sellenddate", + "discontinueddate" = EXCLUDED."discontinueddate", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + returning "productid", "name", "productnumber", "makeflag", "finishedgoodsflag", "color", "safetystocklevel", "reorderpoint", "standardcost", "listprice", "size", "sizeunitmeasurecode", "weightunitmeasurecode", "weight", "daystomanufacture", "productline", "class", "style", "productsubcategoryid", "productmodelid", "sellstartdate"::text, "sellenddate"::text, "discontinueddate"::text, "rowguid", "modifieddate"::text + """, + toNamedParameter(head), + rest.map(toNamedParameter)* + ) + ).executeReturning(ProductRow.rowParser(1).*) + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[ProductRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + SQL"create temporary table product_TEMP (like production.product) on commit drop".execute(): @nowarn + streamingInsert(s"""copy product_TEMP("productid", "name", "productnumber", "makeflag", "finishedgoodsflag", "color", "safetystocklevel", "reorderpoint", "standardcost", "listprice", "size", "sizeunitmeasurecode", "weightunitmeasurecode", "weight", "daystomanufacture", "productline", "class", "style", "productsubcategoryid", "productmodelid", "sellstartdate", "sellenddate", "discontinueddate", "rowguid", "modifieddate") from stdin""", batchSize, unsaved)(ProductRow.text, c): @nowarn + SQL"""insert into production.product("productid", "name", "productnumber", "makeflag", "finishedgoodsflag", "color", "safetystocklevel", "reorderpoint", "standardcost", "listprice", "size", "sizeunitmeasurecode", "weightunitmeasurecode", "weight", "daystomanufacture", "productline", "class", "style", "productsubcategoryid", "productmodelid", "sellstartdate", "sellenddate", "discontinueddate", "rowguid", "modifieddate") + select * from product_TEMP + on conflict ("productid") + do update set + "name" = EXCLUDED."name", + "productnumber" = EXCLUDED."productnumber", + "makeflag" = EXCLUDED."makeflag", + "finishedgoodsflag" = EXCLUDED."finishedgoodsflag", + "color" = EXCLUDED."color", + "safetystocklevel" = EXCLUDED."safetystocklevel", + "reorderpoint" = EXCLUDED."reorderpoint", + "standardcost" = EXCLUDED."standardcost", + "listprice" = EXCLUDED."listprice", + "size" = EXCLUDED."size", + "sizeunitmeasurecode" = EXCLUDED."sizeunitmeasurecode", + "weightunitmeasurecode" = EXCLUDED."weightunitmeasurecode", + "weight" = EXCLUDED."weight", + "daystomanufacture" = EXCLUDED."daystomanufacture", + "productline" = EXCLUDED."productline", + "class" = EXCLUDED."class", + "style" = EXCLUDED."style", + "productsubcategoryid" = EXCLUDED."productsubcategoryid", + "productmodelid" = EXCLUDED."productmodelid", + "sellstartdate" = EXCLUDED."sellstartdate", + "sellenddate" = EXCLUDED."sellenddate", + "discontinueddate" = EXCLUDED."discontinueddate", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table product_TEMP;""".executeUpdate() + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/product/ProductRepoMock.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/product/ProductRepoMock.scala index dd88783d48..b2c06c5cb3 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/product/ProductRepoMock.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/product/ProductRepoMock.scala @@ -87,4 +87,17 @@ class ProductRepoMock(toRow: Function1[ProductRowUnsaved, ProductRow], map.put(unsaved.productid, unsaved): @nowarn unsaved } + override def upsertBatch(unsaved: Iterable[ProductRow])(implicit c: Connection): List[ProductRow] = { + unsaved.map { row => + map += (row.productid -> row) + row + }.toList + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[ProductRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + unsaved.foreach { row => + map += (row.productid -> row) + } + unsaved.size + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productcategory/ProductcategoryRepo.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productcategory/ProductcategoryRepo.scala index 3715886c04..b9ec06ec32 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productcategory/ProductcategoryRepo.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productcategory/ProductcategoryRepo.scala @@ -29,4 +29,7 @@ trait ProductcategoryRepo { def update: UpdateBuilder[ProductcategoryFields, ProductcategoryRow] def update(row: ProductcategoryRow)(implicit c: Connection): Boolean def upsert(unsaved: ProductcategoryRow)(implicit c: Connection): ProductcategoryRow + def upsertBatch(unsaved: Iterable[ProductcategoryRow])(implicit c: Connection): List[ProductcategoryRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Iterator[ProductcategoryRow], batchSize: Int = 10000)(implicit c: Connection): Int } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productcategory/ProductcategoryRepoImpl.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productcategory/ProductcategoryRepoImpl.scala index 95c88ef909..8a05b1550c 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productcategory/ProductcategoryRepoImpl.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productcategory/ProductcategoryRepoImpl.scala @@ -11,6 +11,7 @@ import adventureworks.customtypes.Defaulted import adventureworks.customtypes.TypoLocalDateTime import adventureworks.customtypes.TypoUUID import adventureworks.public.Name +import anorm.BatchSql import anorm.NamedParameter import anorm.ParameterValue import anorm.RowParser @@ -18,6 +19,7 @@ import anorm.SQL import anorm.SimpleSql import anorm.SqlStringInterpolation import java.sql.Connection +import scala.annotation.nowarn import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder import typo.dsl.SelectBuilderSql @@ -139,4 +141,45 @@ class ProductcategoryRepoImpl extends ProductcategoryRepo { .executeInsert(ProductcategoryRow.rowParser(1).single) } + override def upsertBatch(unsaved: Iterable[ProductcategoryRow])(implicit c: Connection): List[ProductcategoryRow] = { + def toNamedParameter(row: ProductcategoryRow): List[NamedParameter] = List( + NamedParameter("productcategoryid", ParameterValue(row.productcategoryid, null, ProductcategoryId.toStatement)), + NamedParameter("name", ParameterValue(row.name, null, Name.toStatement)), + NamedParameter("rowguid", ParameterValue(row.rowguid, null, TypoUUID.toStatement)), + NamedParameter("modifieddate", ParameterValue(row.modifieddate, null, TypoLocalDateTime.toStatement)) + ) + unsaved.toList match { + case Nil => Nil + case head :: rest => + new anorm.adventureworks.ExecuteReturningSyntax.Ops( + BatchSql( + s"""insert into production.productcategory("productcategoryid", "name", "rowguid", "modifieddate") + values ({productcategoryid}::int4, {name}::varchar, {rowguid}::uuid, {modifieddate}::timestamp) + on conflict ("productcategoryid") + do update set + "name" = EXCLUDED."name", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + returning "productcategoryid", "name", "rowguid", "modifieddate"::text + """, + toNamedParameter(head), + rest.map(toNamedParameter)* + ) + ).executeReturning(ProductcategoryRow.rowParser(1).*) + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[ProductcategoryRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + SQL"create temporary table productcategory_TEMP (like production.productcategory) on commit drop".execute(): @nowarn + streamingInsert(s"""copy productcategory_TEMP("productcategoryid", "name", "rowguid", "modifieddate") from stdin""", batchSize, unsaved)(ProductcategoryRow.text, c): @nowarn + SQL"""insert into production.productcategory("productcategoryid", "name", "rowguid", "modifieddate") + select * from productcategory_TEMP + on conflict ("productcategoryid") + do update set + "name" = EXCLUDED."name", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table productcategory_TEMP;""".executeUpdate() + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productcategory/ProductcategoryRepoMock.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productcategory/ProductcategoryRepoMock.scala index 98bf69181f..6b343b445b 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productcategory/ProductcategoryRepoMock.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productcategory/ProductcategoryRepoMock.scala @@ -87,4 +87,17 @@ class ProductcategoryRepoMock(toRow: Function1[ProductcategoryRowUnsaved, Produc map.put(unsaved.productcategoryid, unsaved): @nowarn unsaved } + override def upsertBatch(unsaved: Iterable[ProductcategoryRow])(implicit c: Connection): List[ProductcategoryRow] = { + unsaved.map { row => + map += (row.productcategoryid -> row) + row + }.toList + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[ProductcategoryRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + unsaved.foreach { row => + map += (row.productcategoryid -> row) + } + unsaved.size + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productcosthistory/ProductcosthistoryRepo.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productcosthistory/ProductcosthistoryRepo.scala index b1a1d6a0f7..96d1b517e5 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productcosthistory/ProductcosthistoryRepo.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productcosthistory/ProductcosthistoryRepo.scala @@ -29,4 +29,7 @@ trait ProductcosthistoryRepo { def update: UpdateBuilder[ProductcosthistoryFields, ProductcosthistoryRow] def update(row: ProductcosthistoryRow)(implicit c: Connection): Boolean def upsert(unsaved: ProductcosthistoryRow)(implicit c: Connection): ProductcosthistoryRow + def upsertBatch(unsaved: Iterable[ProductcosthistoryRow])(implicit c: Connection): List[ProductcosthistoryRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Iterator[ProductcosthistoryRow], batchSize: Int = 10000)(implicit c: Connection): Int } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productcosthistory/ProductcosthistoryRepoImpl.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productcosthistory/ProductcosthistoryRepoImpl.scala index 3d06d52ced..29759da239 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productcosthistory/ProductcosthistoryRepoImpl.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productcosthistory/ProductcosthistoryRepoImpl.scala @@ -10,6 +10,7 @@ package productcosthistory import adventureworks.customtypes.Defaulted import adventureworks.customtypes.TypoLocalDateTime import adventureworks.production.product.ProductId +import anorm.BatchSql import anorm.NamedParameter import anorm.ParameterValue import anorm.RowParser @@ -18,6 +19,7 @@ import anorm.SimpleSql import anorm.SqlStringInterpolation import anorm.ToStatement import java.sql.Connection +import scala.annotation.nowarn import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder import typo.dsl.SelectBuilderSql @@ -141,4 +143,46 @@ class ProductcosthistoryRepoImpl extends ProductcosthistoryRepo { .executeInsert(ProductcosthistoryRow.rowParser(1).single) } + override def upsertBatch(unsaved: Iterable[ProductcosthistoryRow])(implicit c: Connection): List[ProductcosthistoryRow] = { + def toNamedParameter(row: ProductcosthistoryRow): List[NamedParameter] = List( + NamedParameter("productid", ParameterValue(row.productid, null, ProductId.toStatement)), + NamedParameter("startdate", ParameterValue(row.startdate, null, TypoLocalDateTime.toStatement)), + NamedParameter("enddate", ParameterValue(row.enddate, null, ToStatement.optionToStatement(TypoLocalDateTime.toStatement, TypoLocalDateTime.parameterMetadata))), + NamedParameter("standardcost", ParameterValue(row.standardcost, null, ToStatement.scalaBigDecimalToStatement)), + NamedParameter("modifieddate", ParameterValue(row.modifieddate, null, TypoLocalDateTime.toStatement)) + ) + unsaved.toList match { + case Nil => Nil + case head :: rest => + new anorm.adventureworks.ExecuteReturningSyntax.Ops( + BatchSql( + s"""insert into production.productcosthistory("productid", "startdate", "enddate", "standardcost", "modifieddate") + values ({productid}::int4, {startdate}::timestamp, {enddate}::timestamp, {standardcost}::numeric, {modifieddate}::timestamp) + on conflict ("productid", "startdate") + do update set + "enddate" = EXCLUDED."enddate", + "standardcost" = EXCLUDED."standardcost", + "modifieddate" = EXCLUDED."modifieddate" + returning "productid", "startdate"::text, "enddate"::text, "standardcost", "modifieddate"::text + """, + toNamedParameter(head), + rest.map(toNamedParameter)* + ) + ).executeReturning(ProductcosthistoryRow.rowParser(1).*) + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[ProductcosthistoryRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + SQL"create temporary table productcosthistory_TEMP (like production.productcosthistory) on commit drop".execute(): @nowarn + streamingInsert(s"""copy productcosthistory_TEMP("productid", "startdate", "enddate", "standardcost", "modifieddate") from stdin""", batchSize, unsaved)(ProductcosthistoryRow.text, c): @nowarn + SQL"""insert into production.productcosthistory("productid", "startdate", "enddate", "standardcost", "modifieddate") + select * from productcosthistory_TEMP + on conflict ("productid", "startdate") + do update set + "enddate" = EXCLUDED."enddate", + "standardcost" = EXCLUDED."standardcost", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table productcosthistory_TEMP;""".executeUpdate() + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productcosthistory/ProductcosthistoryRepoMock.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productcosthistory/ProductcosthistoryRepoMock.scala index 4f92133d1a..36a348be4f 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productcosthistory/ProductcosthistoryRepoMock.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productcosthistory/ProductcosthistoryRepoMock.scala @@ -87,4 +87,17 @@ class ProductcosthistoryRepoMock(toRow: Function1[ProductcosthistoryRowUnsaved, map.put(unsaved.compositeId, unsaved): @nowarn unsaved } + override def upsertBatch(unsaved: Iterable[ProductcosthistoryRow])(implicit c: Connection): List[ProductcosthistoryRow] = { + unsaved.map { row => + map += (row.compositeId -> row) + row + }.toList + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[ProductcosthistoryRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + unsaved.foreach { row => + map += (row.compositeId -> row) + } + unsaved.size + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productdescription/ProductdescriptionRepo.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productdescription/ProductdescriptionRepo.scala index 56c66e1700..9cb988c8b0 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productdescription/ProductdescriptionRepo.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productdescription/ProductdescriptionRepo.scala @@ -29,4 +29,7 @@ trait ProductdescriptionRepo { def update: UpdateBuilder[ProductdescriptionFields, ProductdescriptionRow] def update(row: ProductdescriptionRow)(implicit c: Connection): Boolean def upsert(unsaved: ProductdescriptionRow)(implicit c: Connection): ProductdescriptionRow + def upsertBatch(unsaved: Iterable[ProductdescriptionRow])(implicit c: Connection): List[ProductdescriptionRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Iterator[ProductdescriptionRow], batchSize: Int = 10000)(implicit c: Connection): Int } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productdescription/ProductdescriptionRepoImpl.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productdescription/ProductdescriptionRepoImpl.scala index 42190ac7ac..a4b595c15e 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productdescription/ProductdescriptionRepoImpl.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productdescription/ProductdescriptionRepoImpl.scala @@ -10,6 +10,7 @@ package productdescription import adventureworks.customtypes.Defaulted import adventureworks.customtypes.TypoLocalDateTime import adventureworks.customtypes.TypoUUID +import anorm.BatchSql import anorm.NamedParameter import anorm.ParameterValue import anorm.RowParser @@ -18,6 +19,7 @@ import anorm.SimpleSql import anorm.SqlStringInterpolation import anorm.ToStatement import java.sql.Connection +import scala.annotation.nowarn import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder import typo.dsl.SelectBuilderSql @@ -139,4 +141,45 @@ class ProductdescriptionRepoImpl extends ProductdescriptionRepo { .executeInsert(ProductdescriptionRow.rowParser(1).single) } + override def upsertBatch(unsaved: Iterable[ProductdescriptionRow])(implicit c: Connection): List[ProductdescriptionRow] = { + def toNamedParameter(row: ProductdescriptionRow): List[NamedParameter] = List( + NamedParameter("productdescriptionid", ParameterValue(row.productdescriptionid, null, ProductdescriptionId.toStatement)), + NamedParameter("description", ParameterValue(row.description, null, ToStatement.stringToStatement)), + NamedParameter("rowguid", ParameterValue(row.rowguid, null, TypoUUID.toStatement)), + NamedParameter("modifieddate", ParameterValue(row.modifieddate, null, TypoLocalDateTime.toStatement)) + ) + unsaved.toList match { + case Nil => Nil + case head :: rest => + new anorm.adventureworks.ExecuteReturningSyntax.Ops( + BatchSql( + s"""insert into production.productdescription("productdescriptionid", "description", "rowguid", "modifieddate") + values ({productdescriptionid}::int4, {description}, {rowguid}::uuid, {modifieddate}::timestamp) + on conflict ("productdescriptionid") + do update set + "description" = EXCLUDED."description", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + returning "productdescriptionid", "description", "rowguid", "modifieddate"::text + """, + toNamedParameter(head), + rest.map(toNamedParameter)* + ) + ).executeReturning(ProductdescriptionRow.rowParser(1).*) + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[ProductdescriptionRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + SQL"create temporary table productdescription_TEMP (like production.productdescription) on commit drop".execute(): @nowarn + streamingInsert(s"""copy productdescription_TEMP("productdescriptionid", "description", "rowguid", "modifieddate") from stdin""", batchSize, unsaved)(ProductdescriptionRow.text, c): @nowarn + SQL"""insert into production.productdescription("productdescriptionid", "description", "rowguid", "modifieddate") + select * from productdescription_TEMP + on conflict ("productdescriptionid") + do update set + "description" = EXCLUDED."description", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table productdescription_TEMP;""".executeUpdate() + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productdescription/ProductdescriptionRepoMock.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productdescription/ProductdescriptionRepoMock.scala index 80c10400cf..cc7cd1ad0e 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productdescription/ProductdescriptionRepoMock.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productdescription/ProductdescriptionRepoMock.scala @@ -87,4 +87,17 @@ class ProductdescriptionRepoMock(toRow: Function1[ProductdescriptionRowUnsaved, map.put(unsaved.productdescriptionid, unsaved): @nowarn unsaved } + override def upsertBatch(unsaved: Iterable[ProductdescriptionRow])(implicit c: Connection): List[ProductdescriptionRow] = { + unsaved.map { row => + map += (row.productdescriptionid -> row) + row + }.toList + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[ProductdescriptionRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + unsaved.foreach { row => + map += (row.productdescriptionid -> row) + } + unsaved.size + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productdocument/ProductdocumentRepo.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productdocument/ProductdocumentRepo.scala index 2a78e52772..198e291d89 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productdocument/ProductdocumentRepo.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productdocument/ProductdocumentRepo.scala @@ -29,4 +29,7 @@ trait ProductdocumentRepo { def update: UpdateBuilder[ProductdocumentFields, ProductdocumentRow] def update(row: ProductdocumentRow)(implicit c: Connection): Boolean def upsert(unsaved: ProductdocumentRow)(implicit c: Connection): ProductdocumentRow + def upsertBatch(unsaved: Iterable[ProductdocumentRow])(implicit c: Connection): List[ProductdocumentRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Iterator[ProductdocumentRow], batchSize: Int = 10000)(implicit c: Connection): Int } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productdocument/ProductdocumentRepoImpl.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productdocument/ProductdocumentRepoImpl.scala index 28a794ad1e..f3c228a8aa 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productdocument/ProductdocumentRepoImpl.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productdocument/ProductdocumentRepoImpl.scala @@ -11,6 +11,7 @@ import adventureworks.customtypes.Defaulted import adventureworks.customtypes.TypoLocalDateTime import adventureworks.production.document.DocumentId import adventureworks.production.product.ProductId +import anorm.BatchSql import anorm.NamedParameter import anorm.ParameterValue import anorm.RowParser @@ -18,6 +19,7 @@ import anorm.SQL import anorm.SimpleSql import anorm.SqlStringInterpolation import java.sql.Connection +import scala.annotation.nowarn import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder import typo.dsl.SelectBuilderSql @@ -136,4 +138,40 @@ class ProductdocumentRepoImpl extends ProductdocumentRepo { .executeInsert(ProductdocumentRow.rowParser(1).single) } + override def upsertBatch(unsaved: Iterable[ProductdocumentRow])(implicit c: Connection): List[ProductdocumentRow] = { + def toNamedParameter(row: ProductdocumentRow): List[NamedParameter] = List( + NamedParameter("productid", ParameterValue(row.productid, null, ProductId.toStatement)), + NamedParameter("modifieddate", ParameterValue(row.modifieddate, null, TypoLocalDateTime.toStatement)), + NamedParameter("documentnode", ParameterValue(row.documentnode, null, DocumentId.toStatement)) + ) + unsaved.toList match { + case Nil => Nil + case head :: rest => + new anorm.adventureworks.ExecuteReturningSyntax.Ops( + BatchSql( + s"""insert into production.productdocument("productid", "modifieddate", "documentnode") + values ({productid}::int4, {modifieddate}::timestamp, {documentnode}) + on conflict ("productid", "documentnode") + do update set + "modifieddate" = EXCLUDED."modifieddate" + returning "productid", "modifieddate"::text, "documentnode" + """, + toNamedParameter(head), + rest.map(toNamedParameter)* + ) + ).executeReturning(ProductdocumentRow.rowParser(1).*) + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[ProductdocumentRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + SQL"create temporary table productdocument_TEMP (like production.productdocument) on commit drop".execute(): @nowarn + streamingInsert(s"""copy productdocument_TEMP("productid", "modifieddate", "documentnode") from stdin""", batchSize, unsaved)(ProductdocumentRow.text, c): @nowarn + SQL"""insert into production.productdocument("productid", "modifieddate", "documentnode") + select * from productdocument_TEMP + on conflict ("productid", "documentnode") + do update set + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table productdocument_TEMP;""".executeUpdate() + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productdocument/ProductdocumentRepoMock.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productdocument/ProductdocumentRepoMock.scala index d7b8cd12bf..08229c907d 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productdocument/ProductdocumentRepoMock.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productdocument/ProductdocumentRepoMock.scala @@ -87,4 +87,17 @@ class ProductdocumentRepoMock(toRow: Function1[ProductdocumentRowUnsaved, Produc map.put(unsaved.compositeId, unsaved): @nowarn unsaved } + override def upsertBatch(unsaved: Iterable[ProductdocumentRow])(implicit c: Connection): List[ProductdocumentRow] = { + unsaved.map { row => + map += (row.compositeId -> row) + row + }.toList + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[ProductdocumentRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + unsaved.foreach { row => + map += (row.compositeId -> row) + } + unsaved.size + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productinventory/ProductinventoryRepo.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productinventory/ProductinventoryRepo.scala index ed3a87eb90..32b1602db4 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productinventory/ProductinventoryRepo.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productinventory/ProductinventoryRepo.scala @@ -29,4 +29,7 @@ trait ProductinventoryRepo { def update: UpdateBuilder[ProductinventoryFields, ProductinventoryRow] def update(row: ProductinventoryRow)(implicit c: Connection): Boolean def upsert(unsaved: ProductinventoryRow)(implicit c: Connection): ProductinventoryRow + def upsertBatch(unsaved: Iterable[ProductinventoryRow])(implicit c: Connection): List[ProductinventoryRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Iterator[ProductinventoryRow], batchSize: Int = 10000)(implicit c: Connection): Int } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productinventory/ProductinventoryRepoImpl.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productinventory/ProductinventoryRepoImpl.scala index fd0dc5d4ef..bc6576b603 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productinventory/ProductinventoryRepoImpl.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productinventory/ProductinventoryRepoImpl.scala @@ -13,6 +13,7 @@ import adventureworks.customtypes.TypoShort import adventureworks.customtypes.TypoUUID import adventureworks.production.location.LocationId import adventureworks.production.product.ProductId +import anorm.BatchSql import anorm.NamedParameter import anorm.ParameterValue import anorm.RowParser @@ -21,6 +22,7 @@ import anorm.SimpleSql import anorm.SqlStringInterpolation import anorm.ToStatement import java.sql.Connection +import scala.annotation.nowarn import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder import typo.dsl.SelectBuilderSql @@ -158,4 +160,52 @@ class ProductinventoryRepoImpl extends ProductinventoryRepo { .executeInsert(ProductinventoryRow.rowParser(1).single) } + override def upsertBatch(unsaved: Iterable[ProductinventoryRow])(implicit c: Connection): List[ProductinventoryRow] = { + def toNamedParameter(row: ProductinventoryRow): List[NamedParameter] = List( + NamedParameter("productid", ParameterValue(row.productid, null, ProductId.toStatement)), + NamedParameter("locationid", ParameterValue(row.locationid, null, LocationId.toStatement)), + NamedParameter("shelf", ParameterValue(row.shelf, null, ToStatement.stringToStatement)), + NamedParameter("bin", ParameterValue(row.bin, null, TypoShort.toStatement)), + NamedParameter("quantity", ParameterValue(row.quantity, null, TypoShort.toStatement)), + NamedParameter("rowguid", ParameterValue(row.rowguid, null, TypoUUID.toStatement)), + NamedParameter("modifieddate", ParameterValue(row.modifieddate, null, TypoLocalDateTime.toStatement)) + ) + unsaved.toList match { + case Nil => Nil + case head :: rest => + new anorm.adventureworks.ExecuteReturningSyntax.Ops( + BatchSql( + s"""insert into production.productinventory("productid", "locationid", "shelf", "bin", "quantity", "rowguid", "modifieddate") + values ({productid}::int4, {locationid}::int2, {shelf}, {bin}::int2, {quantity}::int2, {rowguid}::uuid, {modifieddate}::timestamp) + on conflict ("productid", "locationid") + do update set + "shelf" = EXCLUDED."shelf", + "bin" = EXCLUDED."bin", + "quantity" = EXCLUDED."quantity", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + returning "productid", "locationid", "shelf", "bin", "quantity", "rowguid", "modifieddate"::text + """, + toNamedParameter(head), + rest.map(toNamedParameter)* + ) + ).executeReturning(ProductinventoryRow.rowParser(1).*) + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[ProductinventoryRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + SQL"create temporary table productinventory_TEMP (like production.productinventory) on commit drop".execute(): @nowarn + streamingInsert(s"""copy productinventory_TEMP("productid", "locationid", "shelf", "bin", "quantity", "rowguid", "modifieddate") from stdin""", batchSize, unsaved)(ProductinventoryRow.text, c): @nowarn + SQL"""insert into production.productinventory("productid", "locationid", "shelf", "bin", "quantity", "rowguid", "modifieddate") + select * from productinventory_TEMP + on conflict ("productid", "locationid") + do update set + "shelf" = EXCLUDED."shelf", + "bin" = EXCLUDED."bin", + "quantity" = EXCLUDED."quantity", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table productinventory_TEMP;""".executeUpdate() + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productinventory/ProductinventoryRepoMock.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productinventory/ProductinventoryRepoMock.scala index 0039022c35..75c940d7af 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productinventory/ProductinventoryRepoMock.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productinventory/ProductinventoryRepoMock.scala @@ -87,4 +87,17 @@ class ProductinventoryRepoMock(toRow: Function1[ProductinventoryRowUnsaved, Prod map.put(unsaved.compositeId, unsaved): @nowarn unsaved } + override def upsertBatch(unsaved: Iterable[ProductinventoryRow])(implicit c: Connection): List[ProductinventoryRow] = { + unsaved.map { row => + map += (row.compositeId -> row) + row + }.toList + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[ProductinventoryRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + unsaved.foreach { row => + map += (row.compositeId -> row) + } + unsaved.size + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productlistpricehistory/ProductlistpricehistoryRepo.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productlistpricehistory/ProductlistpricehistoryRepo.scala index f2af790106..f7d65f998d 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productlistpricehistory/ProductlistpricehistoryRepo.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productlistpricehistory/ProductlistpricehistoryRepo.scala @@ -29,4 +29,7 @@ trait ProductlistpricehistoryRepo { def update: UpdateBuilder[ProductlistpricehistoryFields, ProductlistpricehistoryRow] def update(row: ProductlistpricehistoryRow)(implicit c: Connection): Boolean def upsert(unsaved: ProductlistpricehistoryRow)(implicit c: Connection): ProductlistpricehistoryRow + def upsertBatch(unsaved: Iterable[ProductlistpricehistoryRow])(implicit c: Connection): List[ProductlistpricehistoryRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Iterator[ProductlistpricehistoryRow], batchSize: Int = 10000)(implicit c: Connection): Int } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productlistpricehistory/ProductlistpricehistoryRepoImpl.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productlistpricehistory/ProductlistpricehistoryRepoImpl.scala index 22559619f4..0ae88601e3 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productlistpricehistory/ProductlistpricehistoryRepoImpl.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productlistpricehistory/ProductlistpricehistoryRepoImpl.scala @@ -10,6 +10,7 @@ package productlistpricehistory import adventureworks.customtypes.Defaulted import adventureworks.customtypes.TypoLocalDateTime import adventureworks.production.product.ProductId +import anorm.BatchSql import anorm.NamedParameter import anorm.ParameterValue import anorm.RowParser @@ -18,6 +19,7 @@ import anorm.SimpleSql import anorm.SqlStringInterpolation import anorm.ToStatement import java.sql.Connection +import scala.annotation.nowarn import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder import typo.dsl.SelectBuilderSql @@ -141,4 +143,46 @@ class ProductlistpricehistoryRepoImpl extends ProductlistpricehistoryRepo { .executeInsert(ProductlistpricehistoryRow.rowParser(1).single) } + override def upsertBatch(unsaved: Iterable[ProductlistpricehistoryRow])(implicit c: Connection): List[ProductlistpricehistoryRow] = { + def toNamedParameter(row: ProductlistpricehistoryRow): List[NamedParameter] = List( + NamedParameter("productid", ParameterValue(row.productid, null, ProductId.toStatement)), + NamedParameter("startdate", ParameterValue(row.startdate, null, TypoLocalDateTime.toStatement)), + NamedParameter("enddate", ParameterValue(row.enddate, null, ToStatement.optionToStatement(TypoLocalDateTime.toStatement, TypoLocalDateTime.parameterMetadata))), + NamedParameter("listprice", ParameterValue(row.listprice, null, ToStatement.scalaBigDecimalToStatement)), + NamedParameter("modifieddate", ParameterValue(row.modifieddate, null, TypoLocalDateTime.toStatement)) + ) + unsaved.toList match { + case Nil => Nil + case head :: rest => + new anorm.adventureworks.ExecuteReturningSyntax.Ops( + BatchSql( + s"""insert into production.productlistpricehistory("productid", "startdate", "enddate", "listprice", "modifieddate") + values ({productid}::int4, {startdate}::timestamp, {enddate}::timestamp, {listprice}::numeric, {modifieddate}::timestamp) + on conflict ("productid", "startdate") + do update set + "enddate" = EXCLUDED."enddate", + "listprice" = EXCLUDED."listprice", + "modifieddate" = EXCLUDED."modifieddate" + returning "productid", "startdate"::text, "enddate"::text, "listprice", "modifieddate"::text + """, + toNamedParameter(head), + rest.map(toNamedParameter)* + ) + ).executeReturning(ProductlistpricehistoryRow.rowParser(1).*) + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[ProductlistpricehistoryRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + SQL"create temporary table productlistpricehistory_TEMP (like production.productlistpricehistory) on commit drop".execute(): @nowarn + streamingInsert(s"""copy productlistpricehistory_TEMP("productid", "startdate", "enddate", "listprice", "modifieddate") from stdin""", batchSize, unsaved)(ProductlistpricehistoryRow.text, c): @nowarn + SQL"""insert into production.productlistpricehistory("productid", "startdate", "enddate", "listprice", "modifieddate") + select * from productlistpricehistory_TEMP + on conflict ("productid", "startdate") + do update set + "enddate" = EXCLUDED."enddate", + "listprice" = EXCLUDED."listprice", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table productlistpricehistory_TEMP;""".executeUpdate() + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productlistpricehistory/ProductlistpricehistoryRepoMock.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productlistpricehistory/ProductlistpricehistoryRepoMock.scala index 36b13e37ad..4e6472f29e 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productlistpricehistory/ProductlistpricehistoryRepoMock.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productlistpricehistory/ProductlistpricehistoryRepoMock.scala @@ -87,4 +87,17 @@ class ProductlistpricehistoryRepoMock(toRow: Function1[ProductlistpricehistoryRo map.put(unsaved.compositeId, unsaved): @nowarn unsaved } + override def upsertBatch(unsaved: Iterable[ProductlistpricehistoryRow])(implicit c: Connection): List[ProductlistpricehistoryRow] = { + unsaved.map { row => + map += (row.compositeId -> row) + row + }.toList + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[ProductlistpricehistoryRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + unsaved.foreach { row => + map += (row.compositeId -> row) + } + unsaved.size + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productmodel/ProductmodelRepo.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productmodel/ProductmodelRepo.scala index 091ac06c9d..97e574513d 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productmodel/ProductmodelRepo.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productmodel/ProductmodelRepo.scala @@ -29,4 +29,7 @@ trait ProductmodelRepo { def update: UpdateBuilder[ProductmodelFields, ProductmodelRow] def update(row: ProductmodelRow)(implicit c: Connection): Boolean def upsert(unsaved: ProductmodelRow)(implicit c: Connection): ProductmodelRow + def upsertBatch(unsaved: Iterable[ProductmodelRow])(implicit c: Connection): List[ProductmodelRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Iterator[ProductmodelRow], batchSize: Int = 10000)(implicit c: Connection): Int } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productmodel/ProductmodelRepoImpl.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productmodel/ProductmodelRepoImpl.scala index 965bdf1e3c..951891e456 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productmodel/ProductmodelRepoImpl.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productmodel/ProductmodelRepoImpl.scala @@ -12,6 +12,7 @@ import adventureworks.customtypes.TypoLocalDateTime import adventureworks.customtypes.TypoUUID import adventureworks.customtypes.TypoXml import adventureworks.public.Name +import anorm.BatchSql import anorm.NamedParameter import anorm.ParameterValue import anorm.RowParser @@ -20,6 +21,7 @@ import anorm.SimpleSql import anorm.SqlStringInterpolation import anorm.ToStatement import java.sql.Connection +import scala.annotation.nowarn import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder import typo.dsl.SelectBuilderSql @@ -149,4 +151,51 @@ class ProductmodelRepoImpl extends ProductmodelRepo { .executeInsert(ProductmodelRow.rowParser(1).single) } + override def upsertBatch(unsaved: Iterable[ProductmodelRow])(implicit c: Connection): List[ProductmodelRow] = { + def toNamedParameter(row: ProductmodelRow): List[NamedParameter] = List( + NamedParameter("productmodelid", ParameterValue(row.productmodelid, null, ProductmodelId.toStatement)), + NamedParameter("name", ParameterValue(row.name, null, Name.toStatement)), + NamedParameter("catalogdescription", ParameterValue(row.catalogdescription, null, ToStatement.optionToStatement(TypoXml.toStatement, TypoXml.parameterMetadata))), + NamedParameter("instructions", ParameterValue(row.instructions, null, ToStatement.optionToStatement(TypoXml.toStatement, TypoXml.parameterMetadata))), + NamedParameter("rowguid", ParameterValue(row.rowguid, null, TypoUUID.toStatement)), + NamedParameter("modifieddate", ParameterValue(row.modifieddate, null, TypoLocalDateTime.toStatement)) + ) + unsaved.toList match { + case Nil => Nil + case head :: rest => + new anorm.adventureworks.ExecuteReturningSyntax.Ops( + BatchSql( + s"""insert into production.productmodel("productmodelid", "name", "catalogdescription", "instructions", "rowguid", "modifieddate") + values ({productmodelid}::int4, {name}::varchar, {catalogdescription}::xml, {instructions}::xml, {rowguid}::uuid, {modifieddate}::timestamp) + on conflict ("productmodelid") + do update set + "name" = EXCLUDED."name", + "catalogdescription" = EXCLUDED."catalogdescription", + "instructions" = EXCLUDED."instructions", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + returning "productmodelid", "name", "catalogdescription", "instructions", "rowguid", "modifieddate"::text + """, + toNamedParameter(head), + rest.map(toNamedParameter)* + ) + ).executeReturning(ProductmodelRow.rowParser(1).*) + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[ProductmodelRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + SQL"create temporary table productmodel_TEMP (like production.productmodel) on commit drop".execute(): @nowarn + streamingInsert(s"""copy productmodel_TEMP("productmodelid", "name", "catalogdescription", "instructions", "rowguid", "modifieddate") from stdin""", batchSize, unsaved)(ProductmodelRow.text, c): @nowarn + SQL"""insert into production.productmodel("productmodelid", "name", "catalogdescription", "instructions", "rowguid", "modifieddate") + select * from productmodel_TEMP + on conflict ("productmodelid") + do update set + "name" = EXCLUDED."name", + "catalogdescription" = EXCLUDED."catalogdescription", + "instructions" = EXCLUDED."instructions", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table productmodel_TEMP;""".executeUpdate() + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productmodel/ProductmodelRepoMock.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productmodel/ProductmodelRepoMock.scala index 138aaf1bdd..2ac224d6a5 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productmodel/ProductmodelRepoMock.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productmodel/ProductmodelRepoMock.scala @@ -87,4 +87,17 @@ class ProductmodelRepoMock(toRow: Function1[ProductmodelRowUnsaved, Productmodel map.put(unsaved.productmodelid, unsaved): @nowarn unsaved } + override def upsertBatch(unsaved: Iterable[ProductmodelRow])(implicit c: Connection): List[ProductmodelRow] = { + unsaved.map { row => + map += (row.productmodelid -> row) + row + }.toList + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[ProductmodelRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + unsaved.foreach { row => + map += (row.productmodelid -> row) + } + unsaved.size + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productmodelillustration/ProductmodelillustrationRepo.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productmodelillustration/ProductmodelillustrationRepo.scala index 31b7f19698..b889f26726 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productmodelillustration/ProductmodelillustrationRepo.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productmodelillustration/ProductmodelillustrationRepo.scala @@ -29,4 +29,7 @@ trait ProductmodelillustrationRepo { def update: UpdateBuilder[ProductmodelillustrationFields, ProductmodelillustrationRow] def update(row: ProductmodelillustrationRow)(implicit c: Connection): Boolean def upsert(unsaved: ProductmodelillustrationRow)(implicit c: Connection): ProductmodelillustrationRow + def upsertBatch(unsaved: Iterable[ProductmodelillustrationRow])(implicit c: Connection): List[ProductmodelillustrationRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Iterator[ProductmodelillustrationRow], batchSize: Int = 10000)(implicit c: Connection): Int } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productmodelillustration/ProductmodelillustrationRepoImpl.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productmodelillustration/ProductmodelillustrationRepoImpl.scala index 41df2e3e8a..294caa715c 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productmodelillustration/ProductmodelillustrationRepoImpl.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productmodelillustration/ProductmodelillustrationRepoImpl.scala @@ -11,6 +11,7 @@ import adventureworks.customtypes.Defaulted import adventureworks.customtypes.TypoLocalDateTime import adventureworks.production.illustration.IllustrationId import adventureworks.production.productmodel.ProductmodelId +import anorm.BatchSql import anorm.NamedParameter import anorm.ParameterValue import anorm.RowParser @@ -18,6 +19,7 @@ import anorm.SQL import anorm.SimpleSql import anorm.SqlStringInterpolation import java.sql.Connection +import scala.annotation.nowarn import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder import typo.dsl.SelectBuilderSql @@ -133,4 +135,40 @@ class ProductmodelillustrationRepoImpl extends ProductmodelillustrationRepo { .executeInsert(ProductmodelillustrationRow.rowParser(1).single) } + override def upsertBatch(unsaved: Iterable[ProductmodelillustrationRow])(implicit c: Connection): List[ProductmodelillustrationRow] = { + def toNamedParameter(row: ProductmodelillustrationRow): List[NamedParameter] = List( + NamedParameter("productmodelid", ParameterValue(row.productmodelid, null, ProductmodelId.toStatement)), + NamedParameter("illustrationid", ParameterValue(row.illustrationid, null, IllustrationId.toStatement)), + NamedParameter("modifieddate", ParameterValue(row.modifieddate, null, TypoLocalDateTime.toStatement)) + ) + unsaved.toList match { + case Nil => Nil + case head :: rest => + new anorm.adventureworks.ExecuteReturningSyntax.Ops( + BatchSql( + s"""insert into production.productmodelillustration("productmodelid", "illustrationid", "modifieddate") + values ({productmodelid}::int4, {illustrationid}::int4, {modifieddate}::timestamp) + on conflict ("productmodelid", "illustrationid") + do update set + "modifieddate" = EXCLUDED."modifieddate" + returning "productmodelid", "illustrationid", "modifieddate"::text + """, + toNamedParameter(head), + rest.map(toNamedParameter)* + ) + ).executeReturning(ProductmodelillustrationRow.rowParser(1).*) + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[ProductmodelillustrationRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + SQL"create temporary table productmodelillustration_TEMP (like production.productmodelillustration) on commit drop".execute(): @nowarn + streamingInsert(s"""copy productmodelillustration_TEMP("productmodelid", "illustrationid", "modifieddate") from stdin""", batchSize, unsaved)(ProductmodelillustrationRow.text, c): @nowarn + SQL"""insert into production.productmodelillustration("productmodelid", "illustrationid", "modifieddate") + select * from productmodelillustration_TEMP + on conflict ("productmodelid", "illustrationid") + do update set + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table productmodelillustration_TEMP;""".executeUpdate() + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productmodelillustration/ProductmodelillustrationRepoMock.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productmodelillustration/ProductmodelillustrationRepoMock.scala index 903ada02a8..ae362a4e96 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productmodelillustration/ProductmodelillustrationRepoMock.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productmodelillustration/ProductmodelillustrationRepoMock.scala @@ -87,4 +87,17 @@ class ProductmodelillustrationRepoMock(toRow: Function1[Productmodelillustration map.put(unsaved.compositeId, unsaved): @nowarn unsaved } + override def upsertBatch(unsaved: Iterable[ProductmodelillustrationRow])(implicit c: Connection): List[ProductmodelillustrationRow] = { + unsaved.map { row => + map += (row.compositeId -> row) + row + }.toList + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[ProductmodelillustrationRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + unsaved.foreach { row => + map += (row.compositeId -> row) + } + unsaved.size + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productmodelproductdescriptionculture/ProductmodelproductdescriptioncultureRepo.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productmodelproductdescriptionculture/ProductmodelproductdescriptioncultureRepo.scala index cbc87422c9..a6e0f537f4 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productmodelproductdescriptionculture/ProductmodelproductdescriptioncultureRepo.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productmodelproductdescriptionculture/ProductmodelproductdescriptioncultureRepo.scala @@ -29,4 +29,7 @@ trait ProductmodelproductdescriptioncultureRepo { def update: UpdateBuilder[ProductmodelproductdescriptioncultureFields, ProductmodelproductdescriptioncultureRow] def update(row: ProductmodelproductdescriptioncultureRow)(implicit c: Connection): Boolean def upsert(unsaved: ProductmodelproductdescriptioncultureRow)(implicit c: Connection): ProductmodelproductdescriptioncultureRow + def upsertBatch(unsaved: Iterable[ProductmodelproductdescriptioncultureRow])(implicit c: Connection): List[ProductmodelproductdescriptioncultureRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Iterator[ProductmodelproductdescriptioncultureRow], batchSize: Int = 10000)(implicit c: Connection): Int } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productmodelproductdescriptionculture/ProductmodelproductdescriptioncultureRepoImpl.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productmodelproductdescriptionculture/ProductmodelproductdescriptioncultureRepoImpl.scala index 366b82cff4..ec82f9f96b 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productmodelproductdescriptionculture/ProductmodelproductdescriptioncultureRepoImpl.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productmodelproductdescriptionculture/ProductmodelproductdescriptioncultureRepoImpl.scala @@ -12,6 +12,7 @@ import adventureworks.customtypes.TypoLocalDateTime import adventureworks.production.culture.CultureId import adventureworks.production.productdescription.ProductdescriptionId import adventureworks.production.productmodel.ProductmodelId +import anorm.BatchSql import anorm.NamedParameter import anorm.ParameterValue import anorm.RowParser @@ -19,6 +20,7 @@ import anorm.SQL import anorm.SimpleSql import anorm.SqlStringInterpolation import java.sql.Connection +import scala.annotation.nowarn import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder import typo.dsl.SelectBuilderSql @@ -138,4 +140,41 @@ class ProductmodelproductdescriptioncultureRepoImpl extends Productmodelproductd .executeInsert(ProductmodelproductdescriptioncultureRow.rowParser(1).single) } + override def upsertBatch(unsaved: Iterable[ProductmodelproductdescriptioncultureRow])(implicit c: Connection): List[ProductmodelproductdescriptioncultureRow] = { + def toNamedParameter(row: ProductmodelproductdescriptioncultureRow): List[NamedParameter] = List( + NamedParameter("productmodelid", ParameterValue(row.productmodelid, null, ProductmodelId.toStatement)), + NamedParameter("productdescriptionid", ParameterValue(row.productdescriptionid, null, ProductdescriptionId.toStatement)), + NamedParameter("cultureid", ParameterValue(row.cultureid, null, CultureId.toStatement)), + NamedParameter("modifieddate", ParameterValue(row.modifieddate, null, TypoLocalDateTime.toStatement)) + ) + unsaved.toList match { + case Nil => Nil + case head :: rest => + new anorm.adventureworks.ExecuteReturningSyntax.Ops( + BatchSql( + s"""insert into production.productmodelproductdescriptionculture("productmodelid", "productdescriptionid", "cultureid", "modifieddate") + values ({productmodelid}::int4, {productdescriptionid}::int4, {cultureid}::bpchar, {modifieddate}::timestamp) + on conflict ("productmodelid", "productdescriptionid", "cultureid") + do update set + "modifieddate" = EXCLUDED."modifieddate" + returning "productmodelid", "productdescriptionid", "cultureid", "modifieddate"::text + """, + toNamedParameter(head), + rest.map(toNamedParameter)* + ) + ).executeReturning(ProductmodelproductdescriptioncultureRow.rowParser(1).*) + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[ProductmodelproductdescriptioncultureRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + SQL"create temporary table productmodelproductdescriptionculture_TEMP (like production.productmodelproductdescriptionculture) on commit drop".execute(): @nowarn + streamingInsert(s"""copy productmodelproductdescriptionculture_TEMP("productmodelid", "productdescriptionid", "cultureid", "modifieddate") from stdin""", batchSize, unsaved)(ProductmodelproductdescriptioncultureRow.text, c): @nowarn + SQL"""insert into production.productmodelproductdescriptionculture("productmodelid", "productdescriptionid", "cultureid", "modifieddate") + select * from productmodelproductdescriptionculture_TEMP + on conflict ("productmodelid", "productdescriptionid", "cultureid") + do update set + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table productmodelproductdescriptionculture_TEMP;""".executeUpdate() + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productmodelproductdescriptionculture/ProductmodelproductdescriptioncultureRepoMock.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productmodelproductdescriptionculture/ProductmodelproductdescriptioncultureRepoMock.scala index be236f9531..ecb8b82456 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productmodelproductdescriptionculture/ProductmodelproductdescriptioncultureRepoMock.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productmodelproductdescriptionculture/ProductmodelproductdescriptioncultureRepoMock.scala @@ -87,4 +87,17 @@ class ProductmodelproductdescriptioncultureRepoMock(toRow: Function1[Productmode map.put(unsaved.compositeId, unsaved): @nowarn unsaved } + override def upsertBatch(unsaved: Iterable[ProductmodelproductdescriptioncultureRow])(implicit c: Connection): List[ProductmodelproductdescriptioncultureRow] = { + unsaved.map { row => + map += (row.compositeId -> row) + row + }.toList + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[ProductmodelproductdescriptioncultureRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + unsaved.foreach { row => + map += (row.compositeId -> row) + } + unsaved.size + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productphoto/ProductphotoRepo.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productphoto/ProductphotoRepo.scala index e73a0c3124..4b88246741 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productphoto/ProductphotoRepo.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productphoto/ProductphotoRepo.scala @@ -29,4 +29,7 @@ trait ProductphotoRepo { def update: UpdateBuilder[ProductphotoFields, ProductphotoRow] def update(row: ProductphotoRow)(implicit c: Connection): Boolean def upsert(unsaved: ProductphotoRow)(implicit c: Connection): ProductphotoRow + def upsertBatch(unsaved: Iterable[ProductphotoRow])(implicit c: Connection): List[ProductphotoRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Iterator[ProductphotoRow], batchSize: Int = 10000)(implicit c: Connection): Int } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productphoto/ProductphotoRepoImpl.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productphoto/ProductphotoRepoImpl.scala index c67fe329fb..6ec60ea74f 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productphoto/ProductphotoRepoImpl.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productphoto/ProductphotoRepoImpl.scala @@ -10,6 +10,7 @@ package productphoto import adventureworks.customtypes.Defaulted import adventureworks.customtypes.TypoBytea import adventureworks.customtypes.TypoLocalDateTime +import anorm.BatchSql import anorm.NamedParameter import anorm.ParameterMetaData import anorm.ParameterValue @@ -19,6 +20,7 @@ import anorm.SimpleSql import anorm.SqlStringInterpolation import anorm.ToStatement import java.sql.Connection +import scala.annotation.nowarn import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder import typo.dsl.SelectBuilderSql @@ -145,4 +147,51 @@ class ProductphotoRepoImpl extends ProductphotoRepo { .executeInsert(ProductphotoRow.rowParser(1).single) } + override def upsertBatch(unsaved: Iterable[ProductphotoRow])(implicit c: Connection): List[ProductphotoRow] = { + def toNamedParameter(row: ProductphotoRow): List[NamedParameter] = List( + NamedParameter("productphotoid", ParameterValue(row.productphotoid, null, ProductphotoId.toStatement)), + NamedParameter("thumbnailphoto", ParameterValue(row.thumbnailphoto, null, ToStatement.optionToStatement(TypoBytea.toStatement, TypoBytea.parameterMetadata))), + NamedParameter("thumbnailphotofilename", ParameterValue(row.thumbnailphotofilename, null, ToStatement.optionToStatement(ToStatement.stringToStatement, ParameterMetaData.StringParameterMetaData))), + NamedParameter("largephoto", ParameterValue(row.largephoto, null, ToStatement.optionToStatement(TypoBytea.toStatement, TypoBytea.parameterMetadata))), + NamedParameter("largephotofilename", ParameterValue(row.largephotofilename, null, ToStatement.optionToStatement(ToStatement.stringToStatement, ParameterMetaData.StringParameterMetaData))), + NamedParameter("modifieddate", ParameterValue(row.modifieddate, null, TypoLocalDateTime.toStatement)) + ) + unsaved.toList match { + case Nil => Nil + case head :: rest => + new anorm.adventureworks.ExecuteReturningSyntax.Ops( + BatchSql( + s"""insert into production.productphoto("productphotoid", "thumbnailphoto", "thumbnailphotofilename", "largephoto", "largephotofilename", "modifieddate") + values ({productphotoid}::int4, {thumbnailphoto}::bytea, {thumbnailphotofilename}, {largephoto}::bytea, {largephotofilename}, {modifieddate}::timestamp) + on conflict ("productphotoid") + do update set + "thumbnailphoto" = EXCLUDED."thumbnailphoto", + "thumbnailphotofilename" = EXCLUDED."thumbnailphotofilename", + "largephoto" = EXCLUDED."largephoto", + "largephotofilename" = EXCLUDED."largephotofilename", + "modifieddate" = EXCLUDED."modifieddate" + returning "productphotoid", "thumbnailphoto", "thumbnailphotofilename", "largephoto", "largephotofilename", "modifieddate"::text + """, + toNamedParameter(head), + rest.map(toNamedParameter)* + ) + ).executeReturning(ProductphotoRow.rowParser(1).*) + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[ProductphotoRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + SQL"create temporary table productphoto_TEMP (like production.productphoto) on commit drop".execute(): @nowarn + streamingInsert(s"""copy productphoto_TEMP("productphotoid", "thumbnailphoto", "thumbnailphotofilename", "largephoto", "largephotofilename", "modifieddate") from stdin""", batchSize, unsaved)(ProductphotoRow.text, c): @nowarn + SQL"""insert into production.productphoto("productphotoid", "thumbnailphoto", "thumbnailphotofilename", "largephoto", "largephotofilename", "modifieddate") + select * from productphoto_TEMP + on conflict ("productphotoid") + do update set + "thumbnailphoto" = EXCLUDED."thumbnailphoto", + "thumbnailphotofilename" = EXCLUDED."thumbnailphotofilename", + "largephoto" = EXCLUDED."largephoto", + "largephotofilename" = EXCLUDED."largephotofilename", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table productphoto_TEMP;""".executeUpdate() + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productphoto/ProductphotoRepoMock.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productphoto/ProductphotoRepoMock.scala index fa62988679..56b5cbfecb 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productphoto/ProductphotoRepoMock.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productphoto/ProductphotoRepoMock.scala @@ -87,4 +87,17 @@ class ProductphotoRepoMock(toRow: Function1[ProductphotoRowUnsaved, Productphoto map.put(unsaved.productphotoid, unsaved): @nowarn unsaved } + override def upsertBatch(unsaved: Iterable[ProductphotoRow])(implicit c: Connection): List[ProductphotoRow] = { + unsaved.map { row => + map += (row.productphotoid -> row) + row + }.toList + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[ProductphotoRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + unsaved.foreach { row => + map += (row.productphotoid -> row) + } + unsaved.size + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productproductphoto/ProductproductphotoRepo.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productproductphoto/ProductproductphotoRepo.scala index 669286788f..cd172131d4 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productproductphoto/ProductproductphotoRepo.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productproductphoto/ProductproductphotoRepo.scala @@ -29,4 +29,7 @@ trait ProductproductphotoRepo { def update: UpdateBuilder[ProductproductphotoFields, ProductproductphotoRow] def update(row: ProductproductphotoRow)(implicit c: Connection): Boolean def upsert(unsaved: ProductproductphotoRow)(implicit c: Connection): ProductproductphotoRow + def upsertBatch(unsaved: Iterable[ProductproductphotoRow])(implicit c: Connection): List[ProductproductphotoRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Iterator[ProductproductphotoRow], batchSize: Int = 10000)(implicit c: Connection): Int } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productproductphoto/ProductproductphotoRepoImpl.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productproductphoto/ProductproductphotoRepoImpl.scala index c790f676ac..8812d246a8 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productproductphoto/ProductproductphotoRepoImpl.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productproductphoto/ProductproductphotoRepoImpl.scala @@ -12,6 +12,7 @@ import adventureworks.customtypes.TypoLocalDateTime import adventureworks.production.product.ProductId import adventureworks.production.productphoto.ProductphotoId import adventureworks.public.Flag +import anorm.BatchSql import anorm.NamedParameter import anorm.ParameterValue import anorm.RowParser @@ -19,6 +20,7 @@ import anorm.SQL import anorm.SimpleSql import anorm.SqlStringInterpolation import java.sql.Connection +import scala.annotation.nowarn import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder import typo.dsl.SelectBuilderSql @@ -141,4 +143,43 @@ class ProductproductphotoRepoImpl extends ProductproductphotoRepo { .executeInsert(ProductproductphotoRow.rowParser(1).single) } + override def upsertBatch(unsaved: Iterable[ProductproductphotoRow])(implicit c: Connection): List[ProductproductphotoRow] = { + def toNamedParameter(row: ProductproductphotoRow): List[NamedParameter] = List( + NamedParameter("productid", ParameterValue(row.productid, null, ProductId.toStatement)), + NamedParameter("productphotoid", ParameterValue(row.productphotoid, null, ProductphotoId.toStatement)), + NamedParameter("primary", ParameterValue(row.primary, null, Flag.toStatement)), + NamedParameter("modifieddate", ParameterValue(row.modifieddate, null, TypoLocalDateTime.toStatement)) + ) + unsaved.toList match { + case Nil => Nil + case head :: rest => + new anorm.adventureworks.ExecuteReturningSyntax.Ops( + BatchSql( + s"""insert into production.productproductphoto("productid", "productphotoid", "primary", "modifieddate") + values ({productid}::int4, {productphotoid}::int4, {primary}::bool, {modifieddate}::timestamp) + on conflict ("productid", "productphotoid") + do update set + "primary" = EXCLUDED."primary", + "modifieddate" = EXCLUDED."modifieddate" + returning "productid", "productphotoid", "primary", "modifieddate"::text + """, + toNamedParameter(head), + rest.map(toNamedParameter)* + ) + ).executeReturning(ProductproductphotoRow.rowParser(1).*) + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[ProductproductphotoRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + SQL"create temporary table productproductphoto_TEMP (like production.productproductphoto) on commit drop".execute(): @nowarn + streamingInsert(s"""copy productproductphoto_TEMP("productid", "productphotoid", "primary", "modifieddate") from stdin""", batchSize, unsaved)(ProductproductphotoRow.text, c): @nowarn + SQL"""insert into production.productproductphoto("productid", "productphotoid", "primary", "modifieddate") + select * from productproductphoto_TEMP + on conflict ("productid", "productphotoid") + do update set + "primary" = EXCLUDED."primary", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table productproductphoto_TEMP;""".executeUpdate() + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productproductphoto/ProductproductphotoRepoMock.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productproductphoto/ProductproductphotoRepoMock.scala index 71f288b1e1..8c274815d7 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productproductphoto/ProductproductphotoRepoMock.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productproductphoto/ProductproductphotoRepoMock.scala @@ -87,4 +87,17 @@ class ProductproductphotoRepoMock(toRow: Function1[ProductproductphotoRowUnsaved map.put(unsaved.compositeId, unsaved): @nowarn unsaved } + override def upsertBatch(unsaved: Iterable[ProductproductphotoRow])(implicit c: Connection): List[ProductproductphotoRow] = { + unsaved.map { row => + map += (row.compositeId -> row) + row + }.toList + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[ProductproductphotoRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + unsaved.foreach { row => + map += (row.compositeId -> row) + } + unsaved.size + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productreview/ProductreviewRepo.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productreview/ProductreviewRepo.scala index 1dce4ece3c..16c0c68147 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productreview/ProductreviewRepo.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productreview/ProductreviewRepo.scala @@ -29,4 +29,7 @@ trait ProductreviewRepo { def update: UpdateBuilder[ProductreviewFields, ProductreviewRow] def update(row: ProductreviewRow)(implicit c: Connection): Boolean def upsert(unsaved: ProductreviewRow)(implicit c: Connection): ProductreviewRow + def upsertBatch(unsaved: Iterable[ProductreviewRow])(implicit c: Connection): List[ProductreviewRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Iterator[ProductreviewRow], batchSize: Int = 10000)(implicit c: Connection): Int } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productreview/ProductreviewRepoImpl.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productreview/ProductreviewRepoImpl.scala index da515e31d5..5256b0e305 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productreview/ProductreviewRepoImpl.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productreview/ProductreviewRepoImpl.scala @@ -11,6 +11,7 @@ import adventureworks.customtypes.Defaulted import adventureworks.customtypes.TypoLocalDateTime import adventureworks.production.product.ProductId import adventureworks.public.Name +import anorm.BatchSql import anorm.NamedParameter import anorm.ParameterMetaData import anorm.ParameterValue @@ -20,6 +21,7 @@ import anorm.SimpleSql import anorm.SqlStringInterpolation import anorm.ToStatement import java.sql.Connection +import scala.annotation.nowarn import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder import typo.dsl.SelectBuilderSql @@ -157,4 +159,57 @@ class ProductreviewRepoImpl extends ProductreviewRepo { .executeInsert(ProductreviewRow.rowParser(1).single) } + override def upsertBatch(unsaved: Iterable[ProductreviewRow])(implicit c: Connection): List[ProductreviewRow] = { + def toNamedParameter(row: ProductreviewRow): List[NamedParameter] = List( + NamedParameter("productreviewid", ParameterValue(row.productreviewid, null, ProductreviewId.toStatement)), + NamedParameter("productid", ParameterValue(row.productid, null, ProductId.toStatement)), + NamedParameter("reviewername", ParameterValue(row.reviewername, null, Name.toStatement)), + NamedParameter("reviewdate", ParameterValue(row.reviewdate, null, TypoLocalDateTime.toStatement)), + NamedParameter("emailaddress", ParameterValue(row.emailaddress, null, ToStatement.stringToStatement)), + NamedParameter("rating", ParameterValue(row.rating, null, ToStatement.intToStatement)), + NamedParameter("comments", ParameterValue(row.comments, null, ToStatement.optionToStatement(ToStatement.stringToStatement, ParameterMetaData.StringParameterMetaData))), + NamedParameter("modifieddate", ParameterValue(row.modifieddate, null, TypoLocalDateTime.toStatement)) + ) + unsaved.toList match { + case Nil => Nil + case head :: rest => + new anorm.adventureworks.ExecuteReturningSyntax.Ops( + BatchSql( + s"""insert into production.productreview("productreviewid", "productid", "reviewername", "reviewdate", "emailaddress", "rating", "comments", "modifieddate") + values ({productreviewid}::int4, {productid}::int4, {reviewername}::varchar, {reviewdate}::timestamp, {emailaddress}, {rating}::int4, {comments}, {modifieddate}::timestamp) + on conflict ("productreviewid") + do update set + "productid" = EXCLUDED."productid", + "reviewername" = EXCLUDED."reviewername", + "reviewdate" = EXCLUDED."reviewdate", + "emailaddress" = EXCLUDED."emailaddress", + "rating" = EXCLUDED."rating", + "comments" = EXCLUDED."comments", + "modifieddate" = EXCLUDED."modifieddate" + returning "productreviewid", "productid", "reviewername", "reviewdate"::text, "emailaddress", "rating", "comments", "modifieddate"::text + """, + toNamedParameter(head), + rest.map(toNamedParameter)* + ) + ).executeReturning(ProductreviewRow.rowParser(1).*) + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[ProductreviewRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + SQL"create temporary table productreview_TEMP (like production.productreview) on commit drop".execute(): @nowarn + streamingInsert(s"""copy productreview_TEMP("productreviewid", "productid", "reviewername", "reviewdate", "emailaddress", "rating", "comments", "modifieddate") from stdin""", batchSize, unsaved)(ProductreviewRow.text, c): @nowarn + SQL"""insert into production.productreview("productreviewid", "productid", "reviewername", "reviewdate", "emailaddress", "rating", "comments", "modifieddate") + select * from productreview_TEMP + on conflict ("productreviewid") + do update set + "productid" = EXCLUDED."productid", + "reviewername" = EXCLUDED."reviewername", + "reviewdate" = EXCLUDED."reviewdate", + "emailaddress" = EXCLUDED."emailaddress", + "rating" = EXCLUDED."rating", + "comments" = EXCLUDED."comments", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table productreview_TEMP;""".executeUpdate() + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productreview/ProductreviewRepoMock.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productreview/ProductreviewRepoMock.scala index 1bc884d950..f65ed62d30 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productreview/ProductreviewRepoMock.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productreview/ProductreviewRepoMock.scala @@ -87,4 +87,17 @@ class ProductreviewRepoMock(toRow: Function1[ProductreviewRowUnsaved, Productrev map.put(unsaved.productreviewid, unsaved): @nowarn unsaved } + override def upsertBatch(unsaved: Iterable[ProductreviewRow])(implicit c: Connection): List[ProductreviewRow] = { + unsaved.map { row => + map += (row.productreviewid -> row) + row + }.toList + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[ProductreviewRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + unsaved.foreach { row => + map += (row.productreviewid -> row) + } + unsaved.size + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productsubcategory/ProductsubcategoryRepo.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productsubcategory/ProductsubcategoryRepo.scala index 14ea904267..41fcf62943 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productsubcategory/ProductsubcategoryRepo.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productsubcategory/ProductsubcategoryRepo.scala @@ -29,4 +29,7 @@ trait ProductsubcategoryRepo { def update: UpdateBuilder[ProductsubcategoryFields, ProductsubcategoryRow] def update(row: ProductsubcategoryRow)(implicit c: Connection): Boolean def upsert(unsaved: ProductsubcategoryRow)(implicit c: Connection): ProductsubcategoryRow + def upsertBatch(unsaved: Iterable[ProductsubcategoryRow])(implicit c: Connection): List[ProductsubcategoryRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Iterator[ProductsubcategoryRow], batchSize: Int = 10000)(implicit c: Connection): Int } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productsubcategory/ProductsubcategoryRepoImpl.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productsubcategory/ProductsubcategoryRepoImpl.scala index cade4b72d9..b2c54983da 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productsubcategory/ProductsubcategoryRepoImpl.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productsubcategory/ProductsubcategoryRepoImpl.scala @@ -12,6 +12,7 @@ import adventureworks.customtypes.TypoLocalDateTime import adventureworks.customtypes.TypoUUID import adventureworks.production.productcategory.ProductcategoryId import adventureworks.public.Name +import anorm.BatchSql import anorm.NamedParameter import anorm.ParameterValue import anorm.RowParser @@ -19,6 +20,7 @@ import anorm.SQL import anorm.SimpleSql import anorm.SqlStringInterpolation import java.sql.Connection +import scala.annotation.nowarn import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder import typo.dsl.SelectBuilderSql @@ -144,4 +146,48 @@ class ProductsubcategoryRepoImpl extends ProductsubcategoryRepo { .executeInsert(ProductsubcategoryRow.rowParser(1).single) } + override def upsertBatch(unsaved: Iterable[ProductsubcategoryRow])(implicit c: Connection): List[ProductsubcategoryRow] = { + def toNamedParameter(row: ProductsubcategoryRow): List[NamedParameter] = List( + NamedParameter("productsubcategoryid", ParameterValue(row.productsubcategoryid, null, ProductsubcategoryId.toStatement)), + NamedParameter("productcategoryid", ParameterValue(row.productcategoryid, null, ProductcategoryId.toStatement)), + NamedParameter("name", ParameterValue(row.name, null, Name.toStatement)), + NamedParameter("rowguid", ParameterValue(row.rowguid, null, TypoUUID.toStatement)), + NamedParameter("modifieddate", ParameterValue(row.modifieddate, null, TypoLocalDateTime.toStatement)) + ) + unsaved.toList match { + case Nil => Nil + case head :: rest => + new anorm.adventureworks.ExecuteReturningSyntax.Ops( + BatchSql( + s"""insert into production.productsubcategory("productsubcategoryid", "productcategoryid", "name", "rowguid", "modifieddate") + values ({productsubcategoryid}::int4, {productcategoryid}::int4, {name}::varchar, {rowguid}::uuid, {modifieddate}::timestamp) + on conflict ("productsubcategoryid") + do update set + "productcategoryid" = EXCLUDED."productcategoryid", + "name" = EXCLUDED."name", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + returning "productsubcategoryid", "productcategoryid", "name", "rowguid", "modifieddate"::text + """, + toNamedParameter(head), + rest.map(toNamedParameter)* + ) + ).executeReturning(ProductsubcategoryRow.rowParser(1).*) + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[ProductsubcategoryRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + SQL"create temporary table productsubcategory_TEMP (like production.productsubcategory) on commit drop".execute(): @nowarn + streamingInsert(s"""copy productsubcategory_TEMP("productsubcategoryid", "productcategoryid", "name", "rowguid", "modifieddate") from stdin""", batchSize, unsaved)(ProductsubcategoryRow.text, c): @nowarn + SQL"""insert into production.productsubcategory("productsubcategoryid", "productcategoryid", "name", "rowguid", "modifieddate") + select * from productsubcategory_TEMP + on conflict ("productsubcategoryid") + do update set + "productcategoryid" = EXCLUDED."productcategoryid", + "name" = EXCLUDED."name", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table productsubcategory_TEMP;""".executeUpdate() + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productsubcategory/ProductsubcategoryRepoMock.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productsubcategory/ProductsubcategoryRepoMock.scala index 2336212e1b..62510fdabc 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productsubcategory/ProductsubcategoryRepoMock.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/productsubcategory/ProductsubcategoryRepoMock.scala @@ -87,4 +87,17 @@ class ProductsubcategoryRepoMock(toRow: Function1[ProductsubcategoryRowUnsaved, map.put(unsaved.productsubcategoryid, unsaved): @nowarn unsaved } + override def upsertBatch(unsaved: Iterable[ProductsubcategoryRow])(implicit c: Connection): List[ProductsubcategoryRow] = { + unsaved.map { row => + map += (row.productsubcategoryid -> row) + row + }.toList + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[ProductsubcategoryRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + unsaved.foreach { row => + map += (row.productsubcategoryid -> row) + } + unsaved.size + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/scrapreason/ScrapreasonRepo.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/scrapreason/ScrapreasonRepo.scala index 011a077cd4..7ebfefd6c6 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/scrapreason/ScrapreasonRepo.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/scrapreason/ScrapreasonRepo.scala @@ -29,4 +29,7 @@ trait ScrapreasonRepo { def update: UpdateBuilder[ScrapreasonFields, ScrapreasonRow] def update(row: ScrapreasonRow)(implicit c: Connection): Boolean def upsert(unsaved: ScrapreasonRow)(implicit c: Connection): ScrapreasonRow + def upsertBatch(unsaved: Iterable[ScrapreasonRow])(implicit c: Connection): List[ScrapreasonRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Iterator[ScrapreasonRow], batchSize: Int = 10000)(implicit c: Connection): Int } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/scrapreason/ScrapreasonRepoImpl.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/scrapreason/ScrapreasonRepoImpl.scala index e19d32c764..41dfd1c30a 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/scrapreason/ScrapreasonRepoImpl.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/scrapreason/ScrapreasonRepoImpl.scala @@ -10,6 +10,7 @@ package scrapreason import adventureworks.customtypes.Defaulted import adventureworks.customtypes.TypoLocalDateTime import adventureworks.public.Name +import anorm.BatchSql import anorm.NamedParameter import anorm.ParameterValue import anorm.RowParser @@ -17,6 +18,7 @@ import anorm.SQL import anorm.SimpleSql import anorm.SqlStringInterpolation import java.sql.Connection +import scala.annotation.nowarn import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder import typo.dsl.SelectBuilderSql @@ -131,4 +133,42 @@ class ScrapreasonRepoImpl extends ScrapreasonRepo { .executeInsert(ScrapreasonRow.rowParser(1).single) } + override def upsertBatch(unsaved: Iterable[ScrapreasonRow])(implicit c: Connection): List[ScrapreasonRow] = { + def toNamedParameter(row: ScrapreasonRow): List[NamedParameter] = List( + NamedParameter("scrapreasonid", ParameterValue(row.scrapreasonid, null, ScrapreasonId.toStatement)), + NamedParameter("name", ParameterValue(row.name, null, Name.toStatement)), + NamedParameter("modifieddate", ParameterValue(row.modifieddate, null, TypoLocalDateTime.toStatement)) + ) + unsaved.toList match { + case Nil => Nil + case head :: rest => + new anorm.adventureworks.ExecuteReturningSyntax.Ops( + BatchSql( + s"""insert into production.scrapreason("scrapreasonid", "name", "modifieddate") + values ({scrapreasonid}::int4, {name}::varchar, {modifieddate}::timestamp) + on conflict ("scrapreasonid") + do update set + "name" = EXCLUDED."name", + "modifieddate" = EXCLUDED."modifieddate" + returning "scrapreasonid", "name", "modifieddate"::text + """, + toNamedParameter(head), + rest.map(toNamedParameter)* + ) + ).executeReturning(ScrapreasonRow.rowParser(1).*) + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[ScrapreasonRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + SQL"create temporary table scrapreason_TEMP (like production.scrapreason) on commit drop".execute(): @nowarn + streamingInsert(s"""copy scrapreason_TEMP("scrapreasonid", "name", "modifieddate") from stdin""", batchSize, unsaved)(ScrapreasonRow.text, c): @nowarn + SQL"""insert into production.scrapreason("scrapreasonid", "name", "modifieddate") + select * from scrapreason_TEMP + on conflict ("scrapreasonid") + do update set + "name" = EXCLUDED."name", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table scrapreason_TEMP;""".executeUpdate() + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/scrapreason/ScrapreasonRepoMock.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/scrapreason/ScrapreasonRepoMock.scala index 5d161f69cf..107aa26c22 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/scrapreason/ScrapreasonRepoMock.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/scrapreason/ScrapreasonRepoMock.scala @@ -87,4 +87,17 @@ class ScrapreasonRepoMock(toRow: Function1[ScrapreasonRowUnsaved, ScrapreasonRow map.put(unsaved.scrapreasonid, unsaved): @nowarn unsaved } + override def upsertBatch(unsaved: Iterable[ScrapreasonRow])(implicit c: Connection): List[ScrapreasonRow] = { + unsaved.map { row => + map += (row.scrapreasonid -> row) + row + }.toList + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[ScrapreasonRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + unsaved.foreach { row => + map += (row.scrapreasonid -> row) + } + unsaved.size + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/transactionhistory/TransactionhistoryRepo.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/transactionhistory/TransactionhistoryRepo.scala index 0a61c5d64e..0dbc93d544 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/transactionhistory/TransactionhistoryRepo.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/transactionhistory/TransactionhistoryRepo.scala @@ -29,4 +29,7 @@ trait TransactionhistoryRepo { def update: UpdateBuilder[TransactionhistoryFields, TransactionhistoryRow] def update(row: TransactionhistoryRow)(implicit c: Connection): Boolean def upsert(unsaved: TransactionhistoryRow)(implicit c: Connection): TransactionhistoryRow + def upsertBatch(unsaved: Iterable[TransactionhistoryRow])(implicit c: Connection): List[TransactionhistoryRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Iterator[TransactionhistoryRow], batchSize: Int = 10000)(implicit c: Connection): Int } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/transactionhistory/TransactionhistoryRepoImpl.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/transactionhistory/TransactionhistoryRepoImpl.scala index 48f473c4ed..3562022969 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/transactionhistory/TransactionhistoryRepoImpl.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/transactionhistory/TransactionhistoryRepoImpl.scala @@ -10,6 +10,7 @@ package transactionhistory import adventureworks.customtypes.Defaulted import adventureworks.customtypes.TypoLocalDateTime import adventureworks.production.product.ProductId +import anorm.BatchSql import anorm.NamedParameter import anorm.ParameterValue import anorm.RowParser @@ -18,6 +19,7 @@ import anorm.SimpleSql import anorm.SqlStringInterpolation import anorm.ToStatement import java.sql.Connection +import scala.annotation.nowarn import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder import typo.dsl.SelectBuilderSql @@ -162,4 +164,60 @@ class TransactionhistoryRepoImpl extends TransactionhistoryRepo { .executeInsert(TransactionhistoryRow.rowParser(1).single) } + override def upsertBatch(unsaved: Iterable[TransactionhistoryRow])(implicit c: Connection): List[TransactionhistoryRow] = { + def toNamedParameter(row: TransactionhistoryRow): List[NamedParameter] = List( + NamedParameter("transactionid", ParameterValue(row.transactionid, null, TransactionhistoryId.toStatement)), + NamedParameter("productid", ParameterValue(row.productid, null, ProductId.toStatement)), + NamedParameter("referenceorderid", ParameterValue(row.referenceorderid, null, ToStatement.intToStatement)), + NamedParameter("referenceorderlineid", ParameterValue(row.referenceorderlineid, null, ToStatement.intToStatement)), + NamedParameter("transactiondate", ParameterValue(row.transactiondate, null, TypoLocalDateTime.toStatement)), + NamedParameter("transactiontype", ParameterValue(row.transactiontype, null, ToStatement.stringToStatement)), + NamedParameter("quantity", ParameterValue(row.quantity, null, ToStatement.intToStatement)), + NamedParameter("actualcost", ParameterValue(row.actualcost, null, ToStatement.scalaBigDecimalToStatement)), + NamedParameter("modifieddate", ParameterValue(row.modifieddate, null, TypoLocalDateTime.toStatement)) + ) + unsaved.toList match { + case Nil => Nil + case head :: rest => + new anorm.adventureworks.ExecuteReturningSyntax.Ops( + BatchSql( + s"""insert into production.transactionhistory("transactionid", "productid", "referenceorderid", "referenceorderlineid", "transactiondate", "transactiontype", "quantity", "actualcost", "modifieddate") + values ({transactionid}::int4, {productid}::int4, {referenceorderid}::int4, {referenceorderlineid}::int4, {transactiondate}::timestamp, {transactiontype}::bpchar, {quantity}::int4, {actualcost}::numeric, {modifieddate}::timestamp) + on conflict ("transactionid") + do update set + "productid" = EXCLUDED."productid", + "referenceorderid" = EXCLUDED."referenceorderid", + "referenceorderlineid" = EXCLUDED."referenceorderlineid", + "transactiondate" = EXCLUDED."transactiondate", + "transactiontype" = EXCLUDED."transactiontype", + "quantity" = EXCLUDED."quantity", + "actualcost" = EXCLUDED."actualcost", + "modifieddate" = EXCLUDED."modifieddate" + returning "transactionid", "productid", "referenceorderid", "referenceorderlineid", "transactiondate"::text, "transactiontype", "quantity", "actualcost", "modifieddate"::text + """, + toNamedParameter(head), + rest.map(toNamedParameter)* + ) + ).executeReturning(TransactionhistoryRow.rowParser(1).*) + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[TransactionhistoryRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + SQL"create temporary table transactionhistory_TEMP (like production.transactionhistory) on commit drop".execute(): @nowarn + streamingInsert(s"""copy transactionhistory_TEMP("transactionid", "productid", "referenceorderid", "referenceorderlineid", "transactiondate", "transactiontype", "quantity", "actualcost", "modifieddate") from stdin""", batchSize, unsaved)(TransactionhistoryRow.text, c): @nowarn + SQL"""insert into production.transactionhistory("transactionid", "productid", "referenceorderid", "referenceorderlineid", "transactiondate", "transactiontype", "quantity", "actualcost", "modifieddate") + select * from transactionhistory_TEMP + on conflict ("transactionid") + do update set + "productid" = EXCLUDED."productid", + "referenceorderid" = EXCLUDED."referenceorderid", + "referenceorderlineid" = EXCLUDED."referenceorderlineid", + "transactiondate" = EXCLUDED."transactiondate", + "transactiontype" = EXCLUDED."transactiontype", + "quantity" = EXCLUDED."quantity", + "actualcost" = EXCLUDED."actualcost", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table transactionhistory_TEMP;""".executeUpdate() + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/transactionhistory/TransactionhistoryRepoMock.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/transactionhistory/TransactionhistoryRepoMock.scala index d3a7048ab5..6eab7339a0 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/transactionhistory/TransactionhistoryRepoMock.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/transactionhistory/TransactionhistoryRepoMock.scala @@ -87,4 +87,17 @@ class TransactionhistoryRepoMock(toRow: Function1[TransactionhistoryRowUnsaved, map.put(unsaved.transactionid, unsaved): @nowarn unsaved } + override def upsertBatch(unsaved: Iterable[TransactionhistoryRow])(implicit c: Connection): List[TransactionhistoryRow] = { + unsaved.map { row => + map += (row.transactionid -> row) + row + }.toList + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[TransactionhistoryRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + unsaved.foreach { row => + map += (row.transactionid -> row) + } + unsaved.size + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/transactionhistoryarchive/TransactionhistoryarchiveRepo.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/transactionhistoryarchive/TransactionhistoryarchiveRepo.scala index 6e0b145c2b..8d30c0a9b5 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/transactionhistoryarchive/TransactionhistoryarchiveRepo.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/transactionhistoryarchive/TransactionhistoryarchiveRepo.scala @@ -29,4 +29,7 @@ trait TransactionhistoryarchiveRepo { def update: UpdateBuilder[TransactionhistoryarchiveFields, TransactionhistoryarchiveRow] def update(row: TransactionhistoryarchiveRow)(implicit c: Connection): Boolean def upsert(unsaved: TransactionhistoryarchiveRow)(implicit c: Connection): TransactionhistoryarchiveRow + def upsertBatch(unsaved: Iterable[TransactionhistoryarchiveRow])(implicit c: Connection): List[TransactionhistoryarchiveRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Iterator[TransactionhistoryarchiveRow], batchSize: Int = 10000)(implicit c: Connection): Int } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/transactionhistoryarchive/TransactionhistoryarchiveRepoImpl.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/transactionhistoryarchive/TransactionhistoryarchiveRepoImpl.scala index d3ebb6a4a5..2f601f16ce 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/transactionhistoryarchive/TransactionhistoryarchiveRepoImpl.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/transactionhistoryarchive/TransactionhistoryarchiveRepoImpl.scala @@ -9,6 +9,7 @@ package transactionhistoryarchive import adventureworks.customtypes.Defaulted import adventureworks.customtypes.TypoLocalDateTime +import anorm.BatchSql import anorm.NamedParameter import anorm.ParameterValue import anorm.RowParser @@ -17,6 +18,7 @@ import anorm.SimpleSql import anorm.SqlStringInterpolation import anorm.ToStatement import java.sql.Connection +import scala.annotation.nowarn import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder import typo.dsl.SelectBuilderSql @@ -158,4 +160,60 @@ class TransactionhistoryarchiveRepoImpl extends TransactionhistoryarchiveRepo { .executeInsert(TransactionhistoryarchiveRow.rowParser(1).single) } + override def upsertBatch(unsaved: Iterable[TransactionhistoryarchiveRow])(implicit c: Connection): List[TransactionhistoryarchiveRow] = { + def toNamedParameter(row: TransactionhistoryarchiveRow): List[NamedParameter] = List( + NamedParameter("transactionid", ParameterValue(row.transactionid, null, TransactionhistoryarchiveId.toStatement)), + NamedParameter("productid", ParameterValue(row.productid, null, ToStatement.intToStatement)), + NamedParameter("referenceorderid", ParameterValue(row.referenceorderid, null, ToStatement.intToStatement)), + NamedParameter("referenceorderlineid", ParameterValue(row.referenceorderlineid, null, ToStatement.intToStatement)), + NamedParameter("transactiondate", ParameterValue(row.transactiondate, null, TypoLocalDateTime.toStatement)), + NamedParameter("transactiontype", ParameterValue(row.transactiontype, null, ToStatement.stringToStatement)), + NamedParameter("quantity", ParameterValue(row.quantity, null, ToStatement.intToStatement)), + NamedParameter("actualcost", ParameterValue(row.actualcost, null, ToStatement.scalaBigDecimalToStatement)), + NamedParameter("modifieddate", ParameterValue(row.modifieddate, null, TypoLocalDateTime.toStatement)) + ) + unsaved.toList match { + case Nil => Nil + case head :: rest => + new anorm.adventureworks.ExecuteReturningSyntax.Ops( + BatchSql( + s"""insert into production.transactionhistoryarchive("transactionid", "productid", "referenceorderid", "referenceorderlineid", "transactiondate", "transactiontype", "quantity", "actualcost", "modifieddate") + values ({transactionid}::int4, {productid}::int4, {referenceorderid}::int4, {referenceorderlineid}::int4, {transactiondate}::timestamp, {transactiontype}::bpchar, {quantity}::int4, {actualcost}::numeric, {modifieddate}::timestamp) + on conflict ("transactionid") + do update set + "productid" = EXCLUDED."productid", + "referenceorderid" = EXCLUDED."referenceorderid", + "referenceorderlineid" = EXCLUDED."referenceorderlineid", + "transactiondate" = EXCLUDED."transactiondate", + "transactiontype" = EXCLUDED."transactiontype", + "quantity" = EXCLUDED."quantity", + "actualcost" = EXCLUDED."actualcost", + "modifieddate" = EXCLUDED."modifieddate" + returning "transactionid", "productid", "referenceorderid", "referenceorderlineid", "transactiondate"::text, "transactiontype", "quantity", "actualcost", "modifieddate"::text + """, + toNamedParameter(head), + rest.map(toNamedParameter)* + ) + ).executeReturning(TransactionhistoryarchiveRow.rowParser(1).*) + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[TransactionhistoryarchiveRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + SQL"create temporary table transactionhistoryarchive_TEMP (like production.transactionhistoryarchive) on commit drop".execute(): @nowarn + streamingInsert(s"""copy transactionhistoryarchive_TEMP("transactionid", "productid", "referenceorderid", "referenceorderlineid", "transactiondate", "transactiontype", "quantity", "actualcost", "modifieddate") from stdin""", batchSize, unsaved)(TransactionhistoryarchiveRow.text, c): @nowarn + SQL"""insert into production.transactionhistoryarchive("transactionid", "productid", "referenceorderid", "referenceorderlineid", "transactiondate", "transactiontype", "quantity", "actualcost", "modifieddate") + select * from transactionhistoryarchive_TEMP + on conflict ("transactionid") + do update set + "productid" = EXCLUDED."productid", + "referenceorderid" = EXCLUDED."referenceorderid", + "referenceorderlineid" = EXCLUDED."referenceorderlineid", + "transactiondate" = EXCLUDED."transactiondate", + "transactiontype" = EXCLUDED."transactiontype", + "quantity" = EXCLUDED."quantity", + "actualcost" = EXCLUDED."actualcost", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table transactionhistoryarchive_TEMP;""".executeUpdate() + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/transactionhistoryarchive/TransactionhistoryarchiveRepoMock.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/transactionhistoryarchive/TransactionhistoryarchiveRepoMock.scala index 9c972c3b84..b4b9ad1d8e 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/transactionhistoryarchive/TransactionhistoryarchiveRepoMock.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/transactionhistoryarchive/TransactionhistoryarchiveRepoMock.scala @@ -87,4 +87,17 @@ class TransactionhistoryarchiveRepoMock(toRow: Function1[Transactionhistoryarchi map.put(unsaved.transactionid, unsaved): @nowarn unsaved } + override def upsertBatch(unsaved: Iterable[TransactionhistoryarchiveRow])(implicit c: Connection): List[TransactionhistoryarchiveRow] = { + unsaved.map { row => + map += (row.transactionid -> row) + row + }.toList + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[TransactionhistoryarchiveRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + unsaved.foreach { row => + map += (row.transactionid -> row) + } + unsaved.size + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/unitmeasure/UnitmeasureRepo.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/unitmeasure/UnitmeasureRepo.scala index fbf624a03d..ca017dc087 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/unitmeasure/UnitmeasureRepo.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/unitmeasure/UnitmeasureRepo.scala @@ -29,4 +29,7 @@ trait UnitmeasureRepo { def update: UpdateBuilder[UnitmeasureFields, UnitmeasureRow] def update(row: UnitmeasureRow)(implicit c: Connection): Boolean def upsert(unsaved: UnitmeasureRow)(implicit c: Connection): UnitmeasureRow + def upsertBatch(unsaved: Iterable[UnitmeasureRow])(implicit c: Connection): List[UnitmeasureRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Iterator[UnitmeasureRow], batchSize: Int = 10000)(implicit c: Connection): Int } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/unitmeasure/UnitmeasureRepoImpl.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/unitmeasure/UnitmeasureRepoImpl.scala index 3afc1c48c6..bb09e5832a 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/unitmeasure/UnitmeasureRepoImpl.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/unitmeasure/UnitmeasureRepoImpl.scala @@ -10,6 +10,7 @@ package unitmeasure import adventureworks.customtypes.Defaulted import adventureworks.customtypes.TypoLocalDateTime import adventureworks.public.Name +import anorm.BatchSql import anorm.NamedParameter import anorm.ParameterValue import anorm.RowParser @@ -17,6 +18,7 @@ import anorm.SQL import anorm.SimpleSql import anorm.SqlStringInterpolation import java.sql.Connection +import scala.annotation.nowarn import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder import typo.dsl.SelectBuilderSql @@ -128,4 +130,42 @@ class UnitmeasureRepoImpl extends UnitmeasureRepo { .executeInsert(UnitmeasureRow.rowParser(1).single) } + override def upsertBatch(unsaved: Iterable[UnitmeasureRow])(implicit c: Connection): List[UnitmeasureRow] = { + def toNamedParameter(row: UnitmeasureRow): List[NamedParameter] = List( + NamedParameter("unitmeasurecode", ParameterValue(row.unitmeasurecode, null, UnitmeasureId.toStatement)), + NamedParameter("name", ParameterValue(row.name, null, Name.toStatement)), + NamedParameter("modifieddate", ParameterValue(row.modifieddate, null, TypoLocalDateTime.toStatement)) + ) + unsaved.toList match { + case Nil => Nil + case head :: rest => + new anorm.adventureworks.ExecuteReturningSyntax.Ops( + BatchSql( + s"""insert into production.unitmeasure("unitmeasurecode", "name", "modifieddate") + values ({unitmeasurecode}::bpchar, {name}::varchar, {modifieddate}::timestamp) + on conflict ("unitmeasurecode") + do update set + "name" = EXCLUDED."name", + "modifieddate" = EXCLUDED."modifieddate" + returning "unitmeasurecode", "name", "modifieddate"::text + """, + toNamedParameter(head), + rest.map(toNamedParameter)* + ) + ).executeReturning(UnitmeasureRow.rowParser(1).*) + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[UnitmeasureRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + SQL"create temporary table unitmeasure_TEMP (like production.unitmeasure) on commit drop".execute(): @nowarn + streamingInsert(s"""copy unitmeasure_TEMP("unitmeasurecode", "name", "modifieddate") from stdin""", batchSize, unsaved)(UnitmeasureRow.text, c): @nowarn + SQL"""insert into production.unitmeasure("unitmeasurecode", "name", "modifieddate") + select * from unitmeasure_TEMP + on conflict ("unitmeasurecode") + do update set + "name" = EXCLUDED."name", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table unitmeasure_TEMP;""".executeUpdate() + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/unitmeasure/UnitmeasureRepoMock.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/unitmeasure/UnitmeasureRepoMock.scala index 5248989617..95af9d14cd 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/unitmeasure/UnitmeasureRepoMock.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/unitmeasure/UnitmeasureRepoMock.scala @@ -87,4 +87,17 @@ class UnitmeasureRepoMock(toRow: Function1[UnitmeasureRowUnsaved, UnitmeasureRow map.put(unsaved.unitmeasurecode, unsaved): @nowarn unsaved } + override def upsertBatch(unsaved: Iterable[UnitmeasureRow])(implicit c: Connection): List[UnitmeasureRow] = { + unsaved.map { row => + map += (row.unitmeasurecode -> row) + row + }.toList + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[UnitmeasureRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + unsaved.foreach { row => + map += (row.unitmeasurecode -> row) + } + unsaved.size + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/workorder/WorkorderRepo.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/workorder/WorkorderRepo.scala index 08cb693e5c..c2f4828596 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/workorder/WorkorderRepo.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/workorder/WorkorderRepo.scala @@ -29,4 +29,7 @@ trait WorkorderRepo { def update: UpdateBuilder[WorkorderFields, WorkorderRow] def update(row: WorkorderRow)(implicit c: Connection): Boolean def upsert(unsaved: WorkorderRow)(implicit c: Connection): WorkorderRow + def upsertBatch(unsaved: Iterable[WorkorderRow])(implicit c: Connection): List[WorkorderRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Iterator[WorkorderRow], batchSize: Int = 10000)(implicit c: Connection): Int } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/workorder/WorkorderRepoImpl.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/workorder/WorkorderRepoImpl.scala index 23741d0ea7..ae22a06d01 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/workorder/WorkorderRepoImpl.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/workorder/WorkorderRepoImpl.scala @@ -12,6 +12,7 @@ import adventureworks.customtypes.TypoLocalDateTime import adventureworks.customtypes.TypoShort import adventureworks.production.product.ProductId import adventureworks.production.scrapreason.ScrapreasonId +import anorm.BatchSql import anorm.NamedParameter import anorm.ParameterValue import anorm.RowParser @@ -20,6 +21,7 @@ import anorm.SimpleSql import anorm.SqlStringInterpolation import anorm.ToStatement import java.sql.Connection +import scala.annotation.nowarn import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder import typo.dsl.SelectBuilderSql @@ -158,4 +160,60 @@ class WorkorderRepoImpl extends WorkorderRepo { .executeInsert(WorkorderRow.rowParser(1).single) } + override def upsertBatch(unsaved: Iterable[WorkorderRow])(implicit c: Connection): List[WorkorderRow] = { + def toNamedParameter(row: WorkorderRow): List[NamedParameter] = List( + NamedParameter("workorderid", ParameterValue(row.workorderid, null, WorkorderId.toStatement)), + NamedParameter("productid", ParameterValue(row.productid, null, ProductId.toStatement)), + NamedParameter("orderqty", ParameterValue(row.orderqty, null, ToStatement.intToStatement)), + NamedParameter("scrappedqty", ParameterValue(row.scrappedqty, null, TypoShort.toStatement)), + NamedParameter("startdate", ParameterValue(row.startdate, null, TypoLocalDateTime.toStatement)), + NamedParameter("enddate", ParameterValue(row.enddate, null, ToStatement.optionToStatement(TypoLocalDateTime.toStatement, TypoLocalDateTime.parameterMetadata))), + NamedParameter("duedate", ParameterValue(row.duedate, null, TypoLocalDateTime.toStatement)), + NamedParameter("scrapreasonid", ParameterValue(row.scrapreasonid, null, ToStatement.optionToStatement(ScrapreasonId.toStatement, ScrapreasonId.parameterMetadata))), + NamedParameter("modifieddate", ParameterValue(row.modifieddate, null, TypoLocalDateTime.toStatement)) + ) + unsaved.toList match { + case Nil => Nil + case head :: rest => + new anorm.adventureworks.ExecuteReturningSyntax.Ops( + BatchSql( + s"""insert into production.workorder("workorderid", "productid", "orderqty", "scrappedqty", "startdate", "enddate", "duedate", "scrapreasonid", "modifieddate") + values ({workorderid}::int4, {productid}::int4, {orderqty}::int4, {scrappedqty}::int2, {startdate}::timestamp, {enddate}::timestamp, {duedate}::timestamp, {scrapreasonid}::int2, {modifieddate}::timestamp) + on conflict ("workorderid") + do update set + "productid" = EXCLUDED."productid", + "orderqty" = EXCLUDED."orderqty", + "scrappedqty" = EXCLUDED."scrappedqty", + "startdate" = EXCLUDED."startdate", + "enddate" = EXCLUDED."enddate", + "duedate" = EXCLUDED."duedate", + "scrapreasonid" = EXCLUDED."scrapreasonid", + "modifieddate" = EXCLUDED."modifieddate" + returning "workorderid", "productid", "orderqty", "scrappedqty", "startdate"::text, "enddate"::text, "duedate"::text, "scrapreasonid", "modifieddate"::text + """, + toNamedParameter(head), + rest.map(toNamedParameter)* + ) + ).executeReturning(WorkorderRow.rowParser(1).*) + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[WorkorderRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + SQL"create temporary table workorder_TEMP (like production.workorder) on commit drop".execute(): @nowarn + streamingInsert(s"""copy workorder_TEMP("workorderid", "productid", "orderqty", "scrappedqty", "startdate", "enddate", "duedate", "scrapreasonid", "modifieddate") from stdin""", batchSize, unsaved)(WorkorderRow.text, c): @nowarn + SQL"""insert into production.workorder("workorderid", "productid", "orderqty", "scrappedqty", "startdate", "enddate", "duedate", "scrapreasonid", "modifieddate") + select * from workorder_TEMP + on conflict ("workorderid") + do update set + "productid" = EXCLUDED."productid", + "orderqty" = EXCLUDED."orderqty", + "scrappedqty" = EXCLUDED."scrappedqty", + "startdate" = EXCLUDED."startdate", + "enddate" = EXCLUDED."enddate", + "duedate" = EXCLUDED."duedate", + "scrapreasonid" = EXCLUDED."scrapreasonid", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table workorder_TEMP;""".executeUpdate() + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/workorder/WorkorderRepoMock.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/workorder/WorkorderRepoMock.scala index 7e787c8f23..f21b6e49a9 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/workorder/WorkorderRepoMock.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/workorder/WorkorderRepoMock.scala @@ -87,4 +87,17 @@ class WorkorderRepoMock(toRow: Function1[WorkorderRowUnsaved, WorkorderRow], map.put(unsaved.workorderid, unsaved): @nowarn unsaved } + override def upsertBatch(unsaved: Iterable[WorkorderRow])(implicit c: Connection): List[WorkorderRow] = { + unsaved.map { row => + map += (row.workorderid -> row) + row + }.toList + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[WorkorderRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + unsaved.foreach { row => + map += (row.workorderid -> row) + } + unsaved.size + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/workorderrouting/WorkorderroutingRepo.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/workorderrouting/WorkorderroutingRepo.scala index a7b98aa541..d87cb2cae8 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/workorderrouting/WorkorderroutingRepo.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/workorderrouting/WorkorderroutingRepo.scala @@ -29,4 +29,7 @@ trait WorkorderroutingRepo { def update: UpdateBuilder[WorkorderroutingFields, WorkorderroutingRow] def update(row: WorkorderroutingRow)(implicit c: Connection): Boolean def upsert(unsaved: WorkorderroutingRow)(implicit c: Connection): WorkorderroutingRow + def upsertBatch(unsaved: Iterable[WorkorderroutingRow])(implicit c: Connection): List[WorkorderroutingRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Iterator[WorkorderroutingRow], batchSize: Int = 10000)(implicit c: Connection): Int } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/workorderrouting/WorkorderroutingRepoImpl.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/workorderrouting/WorkorderroutingRepoImpl.scala index 7c3081d795..13dff9a307 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/workorderrouting/WorkorderroutingRepoImpl.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/workorderrouting/WorkorderroutingRepoImpl.scala @@ -12,6 +12,7 @@ import adventureworks.customtypes.TypoLocalDateTime import adventureworks.customtypes.TypoShort import adventureworks.production.location.LocationId import adventureworks.production.workorder.WorkorderId +import anorm.BatchSql import anorm.NamedParameter import anorm.ParameterMetaData import anorm.ParameterValue @@ -21,6 +22,7 @@ import anorm.SimpleSql import anorm.SqlStringInterpolation import anorm.ToStatement import java.sql.Connection +import scala.annotation.nowarn import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder import typo.dsl.SelectBuilderSql @@ -172,4 +174,65 @@ class WorkorderroutingRepoImpl extends WorkorderroutingRepo { .executeInsert(WorkorderroutingRow.rowParser(1).single) } + override def upsertBatch(unsaved: Iterable[WorkorderroutingRow])(implicit c: Connection): List[WorkorderroutingRow] = { + def toNamedParameter(row: WorkorderroutingRow): List[NamedParameter] = List( + NamedParameter("workorderid", ParameterValue(row.workorderid, null, WorkorderId.toStatement)), + NamedParameter("productid", ParameterValue(row.productid, null, ToStatement.intToStatement)), + NamedParameter("operationsequence", ParameterValue(row.operationsequence, null, TypoShort.toStatement)), + NamedParameter("locationid", ParameterValue(row.locationid, null, LocationId.toStatement)), + NamedParameter("scheduledstartdate", ParameterValue(row.scheduledstartdate, null, TypoLocalDateTime.toStatement)), + NamedParameter("scheduledenddate", ParameterValue(row.scheduledenddate, null, TypoLocalDateTime.toStatement)), + NamedParameter("actualstartdate", ParameterValue(row.actualstartdate, null, ToStatement.optionToStatement(TypoLocalDateTime.toStatement, TypoLocalDateTime.parameterMetadata))), + NamedParameter("actualenddate", ParameterValue(row.actualenddate, null, ToStatement.optionToStatement(TypoLocalDateTime.toStatement, TypoLocalDateTime.parameterMetadata))), + NamedParameter("actualresourcehrs", ParameterValue(row.actualresourcehrs, null, ToStatement.optionToStatement(ToStatement.scalaBigDecimalToStatement, ParameterMetaData.BigDecimalParameterMetaData))), + NamedParameter("plannedcost", ParameterValue(row.plannedcost, null, ToStatement.scalaBigDecimalToStatement)), + NamedParameter("actualcost", ParameterValue(row.actualcost, null, ToStatement.optionToStatement(ToStatement.scalaBigDecimalToStatement, ParameterMetaData.BigDecimalParameterMetaData))), + NamedParameter("modifieddate", ParameterValue(row.modifieddate, null, TypoLocalDateTime.toStatement)) + ) + unsaved.toList match { + case Nil => Nil + case head :: rest => + new anorm.adventureworks.ExecuteReturningSyntax.Ops( + BatchSql( + s"""insert into production.workorderrouting("workorderid", "productid", "operationsequence", "locationid", "scheduledstartdate", "scheduledenddate", "actualstartdate", "actualenddate", "actualresourcehrs", "plannedcost", "actualcost", "modifieddate") + values ({workorderid}::int4, {productid}::int4, {operationsequence}::int2, {locationid}::int2, {scheduledstartdate}::timestamp, {scheduledenddate}::timestamp, {actualstartdate}::timestamp, {actualenddate}::timestamp, {actualresourcehrs}::numeric, {plannedcost}::numeric, {actualcost}::numeric, {modifieddate}::timestamp) + on conflict ("workorderid", "productid", "operationsequence") + do update set + "locationid" = EXCLUDED."locationid", + "scheduledstartdate" = EXCLUDED."scheduledstartdate", + "scheduledenddate" = EXCLUDED."scheduledenddate", + "actualstartdate" = EXCLUDED."actualstartdate", + "actualenddate" = EXCLUDED."actualenddate", + "actualresourcehrs" = EXCLUDED."actualresourcehrs", + "plannedcost" = EXCLUDED."plannedcost", + "actualcost" = EXCLUDED."actualcost", + "modifieddate" = EXCLUDED."modifieddate" + returning "workorderid", "productid", "operationsequence", "locationid", "scheduledstartdate"::text, "scheduledenddate"::text, "actualstartdate"::text, "actualenddate"::text, "actualresourcehrs", "plannedcost", "actualcost", "modifieddate"::text + """, + toNamedParameter(head), + rest.map(toNamedParameter)* + ) + ).executeReturning(WorkorderroutingRow.rowParser(1).*) + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[WorkorderroutingRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + SQL"create temporary table workorderrouting_TEMP (like production.workorderrouting) on commit drop".execute(): @nowarn + streamingInsert(s"""copy workorderrouting_TEMP("workorderid", "productid", "operationsequence", "locationid", "scheduledstartdate", "scheduledenddate", "actualstartdate", "actualenddate", "actualresourcehrs", "plannedcost", "actualcost", "modifieddate") from stdin""", batchSize, unsaved)(WorkorderroutingRow.text, c): @nowarn + SQL"""insert into production.workorderrouting("workorderid", "productid", "operationsequence", "locationid", "scheduledstartdate", "scheduledenddate", "actualstartdate", "actualenddate", "actualresourcehrs", "plannedcost", "actualcost", "modifieddate") + select * from workorderrouting_TEMP + on conflict ("workorderid", "productid", "operationsequence") + do update set + "locationid" = EXCLUDED."locationid", + "scheduledstartdate" = EXCLUDED."scheduledstartdate", + "scheduledenddate" = EXCLUDED."scheduledenddate", + "actualstartdate" = EXCLUDED."actualstartdate", + "actualenddate" = EXCLUDED."actualenddate", + "actualresourcehrs" = EXCLUDED."actualresourcehrs", + "plannedcost" = EXCLUDED."plannedcost", + "actualcost" = EXCLUDED."actualcost", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table workorderrouting_TEMP;""".executeUpdate() + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/workorderrouting/WorkorderroutingRepoMock.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/workorderrouting/WorkorderroutingRepoMock.scala index d302a12ed3..d17a8660d2 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/production/workorderrouting/WorkorderroutingRepoMock.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/production/workorderrouting/WorkorderroutingRepoMock.scala @@ -87,4 +87,17 @@ class WorkorderroutingRepoMock(toRow: Function1[WorkorderroutingRowUnsaved, Work map.put(unsaved.compositeId, unsaved): @nowarn unsaved } + override def upsertBatch(unsaved: Iterable[WorkorderroutingRow])(implicit c: Connection): List[WorkorderroutingRow] = { + unsaved.map { row => + map += (row.compositeId -> row) + row + }.toList + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[WorkorderroutingRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + unsaved.foreach { row => + map += (row.compositeId -> row) + } + unsaved.size + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/public/flaff/FlaffRepo.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/public/flaff/FlaffRepo.scala index 609b5fb3e8..caf14d62bd 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/public/flaff/FlaffRepo.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/public/flaff/FlaffRepo.scala @@ -26,4 +26,7 @@ trait FlaffRepo { def update: UpdateBuilder[FlaffFields, FlaffRow] def update(row: FlaffRow)(implicit c: Connection): Boolean def upsert(unsaved: FlaffRow)(implicit c: Connection): FlaffRow + def upsertBatch(unsaved: Iterable[FlaffRow])(implicit c: Connection): List[FlaffRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Iterator[FlaffRow], batchSize: Int = 10000)(implicit c: Connection): Int } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/public/flaff/FlaffRepoImpl.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/public/flaff/FlaffRepoImpl.scala index 71e7cab75c..281c56705f 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/public/flaff/FlaffRepoImpl.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/public/flaff/FlaffRepoImpl.scala @@ -7,10 +7,13 @@ package adventureworks package public package flaff +import anorm.BatchSql +import anorm.NamedParameter import anorm.ParameterValue import anorm.SqlStringInterpolation import anorm.ToStatement import java.sql.Connection +import scala.annotation.nowarn import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder import typo.dsl.SelectBuilderSql @@ -103,4 +106,42 @@ class FlaffRepoImpl extends FlaffRepo { .executeInsert(FlaffRow.rowParser(1).single) } + override def upsertBatch(unsaved: Iterable[FlaffRow])(implicit c: Connection): List[FlaffRow] = { + def toNamedParameter(row: FlaffRow): List[NamedParameter] = List( + NamedParameter("code", ParameterValue(row.code, null, ShortText.toStatement)), + NamedParameter("another_code", ParameterValue(row.anotherCode, null, ToStatement.stringToStatement)), + NamedParameter("some_number", ParameterValue(row.someNumber, null, ToStatement.intToStatement)), + NamedParameter("specifier", ParameterValue(row.specifier, null, ShortText.toStatement)), + NamedParameter("parentspecifier", ParameterValue(row.parentspecifier, null, ToStatement.optionToStatement(ShortText.toStatement, ShortText.parameterMetadata))) + ) + unsaved.toList match { + case Nil => Nil + case head :: rest => + new anorm.adventureworks.ExecuteReturningSyntax.Ops( + BatchSql( + s"""insert into public.flaff("code", "another_code", "some_number", "specifier", "parentspecifier") + values ({code}::text, {another_code}, {some_number}::int4, {specifier}::text, {parentspecifier}::text) + on conflict ("code", "another_code", "some_number", "specifier") + do update set + "parentspecifier" = EXCLUDED."parentspecifier" + returning "code", "another_code", "some_number", "specifier", "parentspecifier" + """, + toNamedParameter(head), + rest.map(toNamedParameter)* + ) + ).executeReturning(FlaffRow.rowParser(1).*) + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[FlaffRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + SQL"create temporary table flaff_TEMP (like public.flaff) on commit drop".execute(): @nowarn + streamingInsert(s"""copy flaff_TEMP("code", "another_code", "some_number", "specifier", "parentspecifier") from stdin""", batchSize, unsaved)(FlaffRow.text, c): @nowarn + SQL"""insert into public.flaff("code", "another_code", "some_number", "specifier", "parentspecifier") + select * from flaff_TEMP + on conflict ("code", "another_code", "some_number", "specifier") + do update set + "parentspecifier" = EXCLUDED."parentspecifier" + ; + drop table flaff_TEMP;""".executeUpdate() + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/public/flaff/FlaffRepoMock.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/public/flaff/FlaffRepoMock.scala index 59eae40c99..58a5d388d2 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/public/flaff/FlaffRepoMock.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/public/flaff/FlaffRepoMock.scala @@ -75,4 +75,17 @@ class FlaffRepoMock(map: scala.collection.mutable.Map[FlaffId, FlaffRow] = scala map.put(unsaved.compositeId, unsaved): @nowarn unsaved } + override def upsertBatch(unsaved: Iterable[FlaffRow])(implicit c: Connection): List[FlaffRow] = { + unsaved.map { row => + map += (row.compositeId -> row) + row + }.toList + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[FlaffRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + unsaved.foreach { row => + map += (row.compositeId -> row) + } + unsaved.size + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/public/identity_test/IdentityTestRepo.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/public/identity_test/IdentityTestRepo.scala index f684ba2886..7f7eca706c 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/public/identity_test/IdentityTestRepo.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/public/identity_test/IdentityTestRepo.scala @@ -29,4 +29,7 @@ trait IdentityTestRepo { def update: UpdateBuilder[IdentityTestFields, IdentityTestRow] def update(row: IdentityTestRow)(implicit c: Connection): Boolean def upsert(unsaved: IdentityTestRow)(implicit c: Connection): IdentityTestRow + def upsertBatch(unsaved: Iterable[IdentityTestRow])(implicit c: Connection): List[IdentityTestRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Iterator[IdentityTestRow], batchSize: Int = 10000)(implicit c: Connection): Int } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/public/identity_test/IdentityTestRepoImpl.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/public/identity_test/IdentityTestRepoImpl.scala index 91b4026e5f..9658a7aa24 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/public/identity_test/IdentityTestRepoImpl.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/public/identity_test/IdentityTestRepoImpl.scala @@ -8,6 +8,7 @@ package public package identity_test import adventureworks.customtypes.Defaulted +import anorm.BatchSql import anorm.NamedParameter import anorm.ParameterValue import anorm.RowParser @@ -16,6 +17,7 @@ import anorm.SimpleSql import anorm.SqlStringInterpolation import anorm.ToStatement import java.sql.Connection +import scala.annotation.nowarn import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder import typo.dsl.SelectBuilderSql @@ -126,4 +128,42 @@ class IdentityTestRepoImpl extends IdentityTestRepo { .executeInsert(IdentityTestRow.rowParser(1).single) } + override def upsertBatch(unsaved: Iterable[IdentityTestRow])(implicit c: Connection): List[IdentityTestRow] = { + def toNamedParameter(row: IdentityTestRow): List[NamedParameter] = List( + NamedParameter("always_generated", ParameterValue(row.alwaysGenerated, null, ToStatement.intToStatement)), + NamedParameter("default_generated", ParameterValue(row.defaultGenerated, null, ToStatement.intToStatement)), + NamedParameter("name", ParameterValue(row.name, null, IdentityTestId.toStatement)) + ) + unsaved.toList match { + case Nil => Nil + case head :: rest => + new anorm.adventureworks.ExecuteReturningSyntax.Ops( + BatchSql( + s"""insert into public.identity-test("always_generated", "default_generated", "name") + values ({always_generated}::int4, {default_generated}::int4, {name}) + on conflict ("name") + do update set + "always_generated" = EXCLUDED."always_generated", + "default_generated" = EXCLUDED."default_generated" + returning "always_generated", "default_generated", "name" + """, + toNamedParameter(head), + rest.map(toNamedParameter)* + ) + ).executeReturning(IdentityTestRow.rowParser(1).*) + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[IdentityTestRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + SQL"create temporary table identity-test_TEMP (like public.identity-test) on commit drop".execute(): @nowarn + streamingInsert(s"""copy identity-test_TEMP("always_generated", "default_generated", "name") from stdin""", batchSize, unsaved)(IdentityTestRow.text, c): @nowarn + SQL"""insert into public.identity-test("always_generated", "default_generated", "name") + select * from identity-test_TEMP + on conflict ("name") + do update set + "always_generated" = EXCLUDED."always_generated", + "default_generated" = EXCLUDED."default_generated" + ; + drop table identity-test_TEMP;""".executeUpdate() + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/public/identity_test/IdentityTestRepoMock.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/public/identity_test/IdentityTestRepoMock.scala index b52d7335a4..f360aff446 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/public/identity_test/IdentityTestRepoMock.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/public/identity_test/IdentityTestRepoMock.scala @@ -87,4 +87,17 @@ class IdentityTestRepoMock(toRow: Function1[IdentityTestRowUnsaved, IdentityTest map.put(unsaved.name, unsaved): @nowarn unsaved } + override def upsertBatch(unsaved: Iterable[IdentityTestRow])(implicit c: Connection): List[IdentityTestRow] = { + unsaved.map { row => + map += (row.name -> row) + row + }.toList + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[IdentityTestRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + unsaved.foreach { row => + map += (row.name -> row) + } + unsaved.size + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/public/users/UsersRepo.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/public/users/UsersRepo.scala index b91d7dbd7c..9f7963d1af 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/public/users/UsersRepo.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/public/users/UsersRepo.scala @@ -31,4 +31,7 @@ trait UsersRepo { def update: UpdateBuilder[UsersFields, UsersRow] def update(row: UsersRow)(implicit c: Connection): Boolean def upsert(unsaved: UsersRow)(implicit c: Connection): UsersRow + def upsertBatch(unsaved: Iterable[UsersRow])(implicit c: Connection): List[UsersRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Iterator[UsersRow], batchSize: Int = 10000)(implicit c: Connection): Int } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/public/users/UsersRepoImpl.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/public/users/UsersRepoImpl.scala index 1734741fdb..2339840eae 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/public/users/UsersRepoImpl.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/public/users/UsersRepoImpl.scala @@ -10,6 +10,7 @@ package users import adventureworks.customtypes.Defaulted import adventureworks.customtypes.TypoInstant import adventureworks.customtypes.TypoUnknownCitext +import anorm.BatchSql import anorm.NamedParameter import anorm.ParameterMetaData import anorm.ParameterValue @@ -19,6 +20,7 @@ import anorm.SimpleSql import anorm.SqlStringInterpolation import anorm.ToStatement import java.sql.Connection +import scala.annotation.nowarn import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder import typo.dsl.SelectBuilderSql @@ -153,4 +155,54 @@ class UsersRepoImpl extends UsersRepo { .executeInsert(UsersRow.rowParser(1).single) } + override def upsertBatch(unsaved: Iterable[UsersRow])(implicit c: Connection): List[UsersRow] = { + def toNamedParameter(row: UsersRow): List[NamedParameter] = List( + NamedParameter("user_id", ParameterValue(row.userId, null, UsersId.toStatement)), + NamedParameter("name", ParameterValue(row.name, null, ToStatement.stringToStatement)), + NamedParameter("last_name", ParameterValue(row.lastName, null, ToStatement.optionToStatement(ToStatement.stringToStatement, ParameterMetaData.StringParameterMetaData))), + NamedParameter("email", ParameterValue(row.email, null, TypoUnknownCitext.toStatement)), + NamedParameter("password", ParameterValue(row.password, null, ToStatement.stringToStatement)), + NamedParameter("created_at", ParameterValue(row.createdAt, null, TypoInstant.toStatement)), + NamedParameter("verified_on", ParameterValue(row.verifiedOn, null, ToStatement.optionToStatement(TypoInstant.toStatement, TypoInstant.parameterMetadata))) + ) + unsaved.toList match { + case Nil => Nil + case head :: rest => + new anorm.adventureworks.ExecuteReturningSyntax.Ops( + BatchSql( + s"""insert into public.users("user_id", "name", "last_name", "email", "password", "created_at", "verified_on") + values ({user_id}::uuid, {name}, {last_name}, {email}::citext, {password}, {created_at}::timestamptz, {verified_on}::timestamptz) + on conflict ("user_id") + do update set + "name" = EXCLUDED."name", + "last_name" = EXCLUDED."last_name", + "email" = EXCLUDED."email", + "password" = EXCLUDED."password", + "created_at" = EXCLUDED."created_at", + "verified_on" = EXCLUDED."verified_on" + returning "user_id", "name", "last_name", "email"::text, "password", "created_at"::text, "verified_on"::text + """, + toNamedParameter(head), + rest.map(toNamedParameter)* + ) + ).executeReturning(UsersRow.rowParser(1).*) + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[UsersRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + SQL"create temporary table users_TEMP (like public.users) on commit drop".execute(): @nowarn + streamingInsert(s"""copy users_TEMP("user_id", "name", "last_name", "email", "password", "created_at", "verified_on") from stdin""", batchSize, unsaved)(UsersRow.text, c): @nowarn + SQL"""insert into public.users("user_id", "name", "last_name", "email", "password", "created_at", "verified_on") + select * from users_TEMP + on conflict ("user_id") + do update set + "name" = EXCLUDED."name", + "last_name" = EXCLUDED."last_name", + "email" = EXCLUDED."email", + "password" = EXCLUDED."password", + "created_at" = EXCLUDED."created_at", + "verified_on" = EXCLUDED."verified_on" + ; + drop table users_TEMP;""".executeUpdate() + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/public/users/UsersRepoMock.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/public/users/UsersRepoMock.scala index 5c36bc2de8..24abf7a4a1 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/public/users/UsersRepoMock.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/public/users/UsersRepoMock.scala @@ -91,4 +91,17 @@ class UsersRepoMock(toRow: Function1[UsersRowUnsaved, UsersRow], map.put(unsaved.userId, unsaved): @nowarn unsaved } + override def upsertBatch(unsaved: Iterable[UsersRow])(implicit c: Connection): List[UsersRow] = { + unsaved.map { row => + map += (row.userId -> row) + row + }.toList + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[UsersRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + unsaved.foreach { row => + map += (row.userId -> row) + } + unsaved.size + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/purchasing/productvendor/ProductvendorRepo.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/purchasing/productvendor/ProductvendorRepo.scala index 6be9365233..f485e415e8 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/purchasing/productvendor/ProductvendorRepo.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/purchasing/productvendor/ProductvendorRepo.scala @@ -29,4 +29,7 @@ trait ProductvendorRepo { def update: UpdateBuilder[ProductvendorFields, ProductvendorRow] def update(row: ProductvendorRow)(implicit c: Connection): Boolean def upsert(unsaved: ProductvendorRow)(implicit c: Connection): ProductvendorRow + def upsertBatch(unsaved: Iterable[ProductvendorRow])(implicit c: Connection): List[ProductvendorRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Iterator[ProductvendorRow], batchSize: Int = 10000)(implicit c: Connection): Int } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/purchasing/productvendor/ProductvendorRepoImpl.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/purchasing/productvendor/ProductvendorRepoImpl.scala index 0be92ff9e4..c5588cb269 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/purchasing/productvendor/ProductvendorRepoImpl.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/purchasing/productvendor/ProductvendorRepoImpl.scala @@ -12,6 +12,7 @@ import adventureworks.customtypes.TypoLocalDateTime import adventureworks.person.businessentity.BusinessentityId import adventureworks.production.product.ProductId import adventureworks.production.unitmeasure.UnitmeasureId +import anorm.BatchSql import anorm.NamedParameter import anorm.ParameterMetaData import anorm.ParameterValue @@ -21,6 +22,7 @@ import anorm.SimpleSql import anorm.SqlStringInterpolation import anorm.ToStatement import java.sql.Connection +import scala.annotation.nowarn import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder import typo.dsl.SelectBuilderSql @@ -168,4 +170,64 @@ class ProductvendorRepoImpl extends ProductvendorRepo { .executeInsert(ProductvendorRow.rowParser(1).single) } + override def upsertBatch(unsaved: Iterable[ProductvendorRow])(implicit c: Connection): List[ProductvendorRow] = { + def toNamedParameter(row: ProductvendorRow): List[NamedParameter] = List( + NamedParameter("productid", ParameterValue(row.productid, null, ProductId.toStatement)), + NamedParameter("businessentityid", ParameterValue(row.businessentityid, null, BusinessentityId.toStatement)), + NamedParameter("averageleadtime", ParameterValue(row.averageleadtime, null, ToStatement.intToStatement)), + NamedParameter("standardprice", ParameterValue(row.standardprice, null, ToStatement.scalaBigDecimalToStatement)), + NamedParameter("lastreceiptcost", ParameterValue(row.lastreceiptcost, null, ToStatement.optionToStatement(ToStatement.scalaBigDecimalToStatement, ParameterMetaData.BigDecimalParameterMetaData))), + NamedParameter("lastreceiptdate", ParameterValue(row.lastreceiptdate, null, ToStatement.optionToStatement(TypoLocalDateTime.toStatement, TypoLocalDateTime.parameterMetadata))), + NamedParameter("minorderqty", ParameterValue(row.minorderqty, null, ToStatement.intToStatement)), + NamedParameter("maxorderqty", ParameterValue(row.maxorderqty, null, ToStatement.intToStatement)), + NamedParameter("onorderqty", ParameterValue(row.onorderqty, null, ToStatement.optionToStatement(ToStatement.intToStatement, ParameterMetaData.IntParameterMetaData))), + NamedParameter("unitmeasurecode", ParameterValue(row.unitmeasurecode, null, UnitmeasureId.toStatement)), + NamedParameter("modifieddate", ParameterValue(row.modifieddate, null, TypoLocalDateTime.toStatement)) + ) + unsaved.toList match { + case Nil => Nil + case head :: rest => + new anorm.adventureworks.ExecuteReturningSyntax.Ops( + BatchSql( + s"""insert into purchasing.productvendor("productid", "businessentityid", "averageleadtime", "standardprice", "lastreceiptcost", "lastreceiptdate", "minorderqty", "maxorderqty", "onorderqty", "unitmeasurecode", "modifieddate") + values ({productid}::int4, {businessentityid}::int4, {averageleadtime}::int4, {standardprice}::numeric, {lastreceiptcost}::numeric, {lastreceiptdate}::timestamp, {minorderqty}::int4, {maxorderqty}::int4, {onorderqty}::int4, {unitmeasurecode}::bpchar, {modifieddate}::timestamp) + on conflict ("productid", "businessentityid") + do update set + "averageleadtime" = EXCLUDED."averageleadtime", + "standardprice" = EXCLUDED."standardprice", + "lastreceiptcost" = EXCLUDED."lastreceiptcost", + "lastreceiptdate" = EXCLUDED."lastreceiptdate", + "minorderqty" = EXCLUDED."minorderqty", + "maxorderqty" = EXCLUDED."maxorderqty", + "onorderqty" = EXCLUDED."onorderqty", + "unitmeasurecode" = EXCLUDED."unitmeasurecode", + "modifieddate" = EXCLUDED."modifieddate" + returning "productid", "businessentityid", "averageleadtime", "standardprice", "lastreceiptcost", "lastreceiptdate"::text, "minorderqty", "maxorderqty", "onorderqty", "unitmeasurecode", "modifieddate"::text + """, + toNamedParameter(head), + rest.map(toNamedParameter)* + ) + ).executeReturning(ProductvendorRow.rowParser(1).*) + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[ProductvendorRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + SQL"create temporary table productvendor_TEMP (like purchasing.productvendor) on commit drop".execute(): @nowarn + streamingInsert(s"""copy productvendor_TEMP("productid", "businessentityid", "averageleadtime", "standardprice", "lastreceiptcost", "lastreceiptdate", "minorderqty", "maxorderqty", "onorderqty", "unitmeasurecode", "modifieddate") from stdin""", batchSize, unsaved)(ProductvendorRow.text, c): @nowarn + SQL"""insert into purchasing.productvendor("productid", "businessentityid", "averageleadtime", "standardprice", "lastreceiptcost", "lastreceiptdate", "minorderqty", "maxorderqty", "onorderqty", "unitmeasurecode", "modifieddate") + select * from productvendor_TEMP + on conflict ("productid", "businessentityid") + do update set + "averageleadtime" = EXCLUDED."averageleadtime", + "standardprice" = EXCLUDED."standardprice", + "lastreceiptcost" = EXCLUDED."lastreceiptcost", + "lastreceiptdate" = EXCLUDED."lastreceiptdate", + "minorderqty" = EXCLUDED."minorderqty", + "maxorderqty" = EXCLUDED."maxorderqty", + "onorderqty" = EXCLUDED."onorderqty", + "unitmeasurecode" = EXCLUDED."unitmeasurecode", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table productvendor_TEMP;""".executeUpdate() + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/purchasing/productvendor/ProductvendorRepoMock.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/purchasing/productvendor/ProductvendorRepoMock.scala index c478d4ffd0..5d51e3fe35 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/purchasing/productvendor/ProductvendorRepoMock.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/purchasing/productvendor/ProductvendorRepoMock.scala @@ -87,4 +87,17 @@ class ProductvendorRepoMock(toRow: Function1[ProductvendorRowUnsaved, Productven map.put(unsaved.compositeId, unsaved): @nowarn unsaved } + override def upsertBatch(unsaved: Iterable[ProductvendorRow])(implicit c: Connection): List[ProductvendorRow] = { + unsaved.map { row => + map += (row.compositeId -> row) + row + }.toList + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[ProductvendorRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + unsaved.foreach { row => + map += (row.compositeId -> row) + } + unsaved.size + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/purchasing/purchaseorderheader/PurchaseorderheaderRepo.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/purchasing/purchaseorderheader/PurchaseorderheaderRepo.scala index 6eff633cc3..35c18c78cd 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/purchasing/purchaseorderheader/PurchaseorderheaderRepo.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/purchasing/purchaseorderheader/PurchaseorderheaderRepo.scala @@ -29,4 +29,7 @@ trait PurchaseorderheaderRepo { def update: UpdateBuilder[PurchaseorderheaderFields, PurchaseorderheaderRow] def update(row: PurchaseorderheaderRow)(implicit c: Connection): Boolean def upsert(unsaved: PurchaseorderheaderRow)(implicit c: Connection): PurchaseorderheaderRow + def upsertBatch(unsaved: Iterable[PurchaseorderheaderRow])(implicit c: Connection): List[PurchaseorderheaderRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Iterator[PurchaseorderheaderRow], batchSize: Int = 10000)(implicit c: Connection): Int } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/purchasing/purchaseorderheader/PurchaseorderheaderRepoImpl.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/purchasing/purchaseorderheader/PurchaseorderheaderRepoImpl.scala index 9b71be81cb..53c084b130 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/purchasing/purchaseorderheader/PurchaseorderheaderRepoImpl.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/purchasing/purchaseorderheader/PurchaseorderheaderRepoImpl.scala @@ -12,6 +12,7 @@ import adventureworks.customtypes.TypoLocalDateTime import adventureworks.customtypes.TypoShort import adventureworks.person.businessentity.BusinessentityId import adventureworks.purchasing.shipmethod.ShipmethodId +import anorm.BatchSql import anorm.NamedParameter import anorm.ParameterValue import anorm.RowParser @@ -20,6 +21,7 @@ import anorm.SimpleSql import anorm.SqlStringInterpolation import anorm.ToStatement import java.sql.Connection +import scala.annotation.nowarn import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder import typo.dsl.SelectBuilderSql @@ -188,4 +190,69 @@ class PurchaseorderheaderRepoImpl extends PurchaseorderheaderRepo { .executeInsert(PurchaseorderheaderRow.rowParser(1).single) } + override def upsertBatch(unsaved: Iterable[PurchaseorderheaderRow])(implicit c: Connection): List[PurchaseorderheaderRow] = { + def toNamedParameter(row: PurchaseorderheaderRow): List[NamedParameter] = List( + NamedParameter("purchaseorderid", ParameterValue(row.purchaseorderid, null, PurchaseorderheaderId.toStatement)), + NamedParameter("revisionnumber", ParameterValue(row.revisionnumber, null, TypoShort.toStatement)), + NamedParameter("status", ParameterValue(row.status, null, TypoShort.toStatement)), + NamedParameter("employeeid", ParameterValue(row.employeeid, null, BusinessentityId.toStatement)), + NamedParameter("vendorid", ParameterValue(row.vendorid, null, BusinessentityId.toStatement)), + NamedParameter("shipmethodid", ParameterValue(row.shipmethodid, null, ShipmethodId.toStatement)), + NamedParameter("orderdate", ParameterValue(row.orderdate, null, TypoLocalDateTime.toStatement)), + NamedParameter("shipdate", ParameterValue(row.shipdate, null, ToStatement.optionToStatement(TypoLocalDateTime.toStatement, TypoLocalDateTime.parameterMetadata))), + NamedParameter("subtotal", ParameterValue(row.subtotal, null, ToStatement.scalaBigDecimalToStatement)), + NamedParameter("taxamt", ParameterValue(row.taxamt, null, ToStatement.scalaBigDecimalToStatement)), + NamedParameter("freight", ParameterValue(row.freight, null, ToStatement.scalaBigDecimalToStatement)), + NamedParameter("modifieddate", ParameterValue(row.modifieddate, null, TypoLocalDateTime.toStatement)) + ) + unsaved.toList match { + case Nil => Nil + case head :: rest => + new anorm.adventureworks.ExecuteReturningSyntax.Ops( + BatchSql( + s"""insert into purchasing.purchaseorderheader("purchaseorderid", "revisionnumber", "status", "employeeid", "vendorid", "shipmethodid", "orderdate", "shipdate", "subtotal", "taxamt", "freight", "modifieddate") + values ({purchaseorderid}::int4, {revisionnumber}::int2, {status}::int2, {employeeid}::int4, {vendorid}::int4, {shipmethodid}::int4, {orderdate}::timestamp, {shipdate}::timestamp, {subtotal}::numeric, {taxamt}::numeric, {freight}::numeric, {modifieddate}::timestamp) + on conflict ("purchaseorderid") + do update set + "revisionnumber" = EXCLUDED."revisionnumber", + "status" = EXCLUDED."status", + "employeeid" = EXCLUDED."employeeid", + "vendorid" = EXCLUDED."vendorid", + "shipmethodid" = EXCLUDED."shipmethodid", + "orderdate" = EXCLUDED."orderdate", + "shipdate" = EXCLUDED."shipdate", + "subtotal" = EXCLUDED."subtotal", + "taxamt" = EXCLUDED."taxamt", + "freight" = EXCLUDED."freight", + "modifieddate" = EXCLUDED."modifieddate" + returning "purchaseorderid", "revisionnumber", "status", "employeeid", "vendorid", "shipmethodid", "orderdate"::text, "shipdate"::text, "subtotal", "taxamt", "freight", "modifieddate"::text + """, + toNamedParameter(head), + rest.map(toNamedParameter)* + ) + ).executeReturning(PurchaseorderheaderRow.rowParser(1).*) + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[PurchaseorderheaderRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + SQL"create temporary table purchaseorderheader_TEMP (like purchasing.purchaseorderheader) on commit drop".execute(): @nowarn + streamingInsert(s"""copy purchaseorderheader_TEMP("purchaseorderid", "revisionnumber", "status", "employeeid", "vendorid", "shipmethodid", "orderdate", "shipdate", "subtotal", "taxamt", "freight", "modifieddate") from stdin""", batchSize, unsaved)(PurchaseorderheaderRow.text, c): @nowarn + SQL"""insert into purchasing.purchaseorderheader("purchaseorderid", "revisionnumber", "status", "employeeid", "vendorid", "shipmethodid", "orderdate", "shipdate", "subtotal", "taxamt", "freight", "modifieddate") + select * from purchaseorderheader_TEMP + on conflict ("purchaseorderid") + do update set + "revisionnumber" = EXCLUDED."revisionnumber", + "status" = EXCLUDED."status", + "employeeid" = EXCLUDED."employeeid", + "vendorid" = EXCLUDED."vendorid", + "shipmethodid" = EXCLUDED."shipmethodid", + "orderdate" = EXCLUDED."orderdate", + "shipdate" = EXCLUDED."shipdate", + "subtotal" = EXCLUDED."subtotal", + "taxamt" = EXCLUDED."taxamt", + "freight" = EXCLUDED."freight", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table purchaseorderheader_TEMP;""".executeUpdate() + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/purchasing/purchaseorderheader/PurchaseorderheaderRepoMock.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/purchasing/purchaseorderheader/PurchaseorderheaderRepoMock.scala index b87ed685b3..f94480732d 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/purchasing/purchaseorderheader/PurchaseorderheaderRepoMock.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/purchasing/purchaseorderheader/PurchaseorderheaderRepoMock.scala @@ -87,4 +87,17 @@ class PurchaseorderheaderRepoMock(toRow: Function1[PurchaseorderheaderRowUnsaved map.put(unsaved.purchaseorderid, unsaved): @nowarn unsaved } + override def upsertBatch(unsaved: Iterable[PurchaseorderheaderRow])(implicit c: Connection): List[PurchaseorderheaderRow] = { + unsaved.map { row => + map += (row.purchaseorderid -> row) + row + }.toList + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[PurchaseorderheaderRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + unsaved.foreach { row => + map += (row.purchaseorderid -> row) + } + unsaved.size + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/purchasing/shipmethod/ShipmethodRepo.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/purchasing/shipmethod/ShipmethodRepo.scala index 2bcc28e36d..a0a41b3e81 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/purchasing/shipmethod/ShipmethodRepo.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/purchasing/shipmethod/ShipmethodRepo.scala @@ -29,4 +29,7 @@ trait ShipmethodRepo { def update: UpdateBuilder[ShipmethodFields, ShipmethodRow] def update(row: ShipmethodRow)(implicit c: Connection): Boolean def upsert(unsaved: ShipmethodRow)(implicit c: Connection): ShipmethodRow + def upsertBatch(unsaved: Iterable[ShipmethodRow])(implicit c: Connection): List[ShipmethodRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Iterator[ShipmethodRow], batchSize: Int = 10000)(implicit c: Connection): Int } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/purchasing/shipmethod/ShipmethodRepoImpl.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/purchasing/shipmethod/ShipmethodRepoImpl.scala index 6b41ceec70..baf2f33b8e 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/purchasing/shipmethod/ShipmethodRepoImpl.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/purchasing/shipmethod/ShipmethodRepoImpl.scala @@ -11,6 +11,7 @@ import adventureworks.customtypes.Defaulted import adventureworks.customtypes.TypoLocalDateTime import adventureworks.customtypes.TypoUUID import adventureworks.public.Name +import anorm.BatchSql import anorm.NamedParameter import anorm.ParameterValue import anorm.RowParser @@ -19,6 +20,7 @@ import anorm.SimpleSql import anorm.SqlStringInterpolation import anorm.ToStatement import java.sql.Connection +import scala.annotation.nowarn import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder import typo.dsl.SelectBuilderSql @@ -154,4 +156,51 @@ class ShipmethodRepoImpl extends ShipmethodRepo { .executeInsert(ShipmethodRow.rowParser(1).single) } + override def upsertBatch(unsaved: Iterable[ShipmethodRow])(implicit c: Connection): List[ShipmethodRow] = { + def toNamedParameter(row: ShipmethodRow): List[NamedParameter] = List( + NamedParameter("shipmethodid", ParameterValue(row.shipmethodid, null, ShipmethodId.toStatement)), + NamedParameter("name", ParameterValue(row.name, null, Name.toStatement)), + NamedParameter("shipbase", ParameterValue(row.shipbase, null, ToStatement.scalaBigDecimalToStatement)), + NamedParameter("shiprate", ParameterValue(row.shiprate, null, ToStatement.scalaBigDecimalToStatement)), + NamedParameter("rowguid", ParameterValue(row.rowguid, null, TypoUUID.toStatement)), + NamedParameter("modifieddate", ParameterValue(row.modifieddate, null, TypoLocalDateTime.toStatement)) + ) + unsaved.toList match { + case Nil => Nil + case head :: rest => + new anorm.adventureworks.ExecuteReturningSyntax.Ops( + BatchSql( + s"""insert into purchasing.shipmethod("shipmethodid", "name", "shipbase", "shiprate", "rowguid", "modifieddate") + values ({shipmethodid}::int4, {name}::varchar, {shipbase}::numeric, {shiprate}::numeric, {rowguid}::uuid, {modifieddate}::timestamp) + on conflict ("shipmethodid") + do update set + "name" = EXCLUDED."name", + "shipbase" = EXCLUDED."shipbase", + "shiprate" = EXCLUDED."shiprate", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + returning "shipmethodid", "name", "shipbase", "shiprate", "rowguid", "modifieddate"::text + """, + toNamedParameter(head), + rest.map(toNamedParameter)* + ) + ).executeReturning(ShipmethodRow.rowParser(1).*) + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[ShipmethodRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + SQL"create temporary table shipmethod_TEMP (like purchasing.shipmethod) on commit drop".execute(): @nowarn + streamingInsert(s"""copy shipmethod_TEMP("shipmethodid", "name", "shipbase", "shiprate", "rowguid", "modifieddate") from stdin""", batchSize, unsaved)(ShipmethodRow.text, c): @nowarn + SQL"""insert into purchasing.shipmethod("shipmethodid", "name", "shipbase", "shiprate", "rowguid", "modifieddate") + select * from shipmethod_TEMP + on conflict ("shipmethodid") + do update set + "name" = EXCLUDED."name", + "shipbase" = EXCLUDED."shipbase", + "shiprate" = EXCLUDED."shiprate", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table shipmethod_TEMP;""".executeUpdate() + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/purchasing/shipmethod/ShipmethodRepoMock.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/purchasing/shipmethod/ShipmethodRepoMock.scala index e20971743f..0786b7a247 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/purchasing/shipmethod/ShipmethodRepoMock.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/purchasing/shipmethod/ShipmethodRepoMock.scala @@ -87,4 +87,17 @@ class ShipmethodRepoMock(toRow: Function1[ShipmethodRowUnsaved, ShipmethodRow], map.put(unsaved.shipmethodid, unsaved): @nowarn unsaved } + override def upsertBatch(unsaved: Iterable[ShipmethodRow])(implicit c: Connection): List[ShipmethodRow] = { + unsaved.map { row => + map += (row.shipmethodid -> row) + row + }.toList + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[ShipmethodRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + unsaved.foreach { row => + map += (row.shipmethodid -> row) + } + unsaved.size + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/purchasing/vendor/VendorRepo.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/purchasing/vendor/VendorRepo.scala index 1c6e1739c5..539bc68bad 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/purchasing/vendor/VendorRepo.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/purchasing/vendor/VendorRepo.scala @@ -30,4 +30,7 @@ trait VendorRepo { def update: UpdateBuilder[VendorFields, VendorRow] def update(row: VendorRow)(implicit c: Connection): Boolean def upsert(unsaved: VendorRow)(implicit c: Connection): VendorRow + def upsertBatch(unsaved: Iterable[VendorRow])(implicit c: Connection): List[VendorRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Iterator[VendorRow], batchSize: Int = 10000)(implicit c: Connection): Int } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/purchasing/vendor/VendorRepoImpl.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/purchasing/vendor/VendorRepoImpl.scala index 7808f01971..9e5a53d5ca 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/purchasing/vendor/VendorRepoImpl.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/purchasing/vendor/VendorRepoImpl.scala @@ -14,6 +14,7 @@ import adventureworks.person.businessentity.BusinessentityId import adventureworks.public.AccountNumber import adventureworks.public.Flag import adventureworks.public.Name +import anorm.BatchSql import anorm.NamedParameter import anorm.ParameterMetaData import anorm.ParameterValue @@ -23,6 +24,7 @@ import anorm.SimpleSql import anorm.SqlStringInterpolation import anorm.ToStatement import java.sql.Connection +import scala.annotation.nowarn import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder import typo.dsl.SelectBuilderSql @@ -160,4 +162,57 @@ class VendorRepoImpl extends VendorRepo { .executeInsert(VendorRow.rowParser(1).single) } + override def upsertBatch(unsaved: Iterable[VendorRow])(implicit c: Connection): List[VendorRow] = { + def toNamedParameter(row: VendorRow): List[NamedParameter] = List( + NamedParameter("businessentityid", ParameterValue(row.businessentityid, null, BusinessentityId.toStatement)), + NamedParameter("accountnumber", ParameterValue(row.accountnumber, null, AccountNumber.toStatement)), + NamedParameter("name", ParameterValue(row.name, null, Name.toStatement)), + NamedParameter("creditrating", ParameterValue(row.creditrating, null, TypoShort.toStatement)), + NamedParameter("preferredvendorstatus", ParameterValue(row.preferredvendorstatus, null, Flag.toStatement)), + NamedParameter("activeflag", ParameterValue(row.activeflag, null, Flag.toStatement)), + NamedParameter("purchasingwebserviceurl", ParameterValue(row.purchasingwebserviceurl, null, ToStatement.optionToStatement(ToStatement.stringToStatement, ParameterMetaData.StringParameterMetaData))), + NamedParameter("modifieddate", ParameterValue(row.modifieddate, null, TypoLocalDateTime.toStatement)) + ) + unsaved.toList match { + case Nil => Nil + case head :: rest => + new anorm.adventureworks.ExecuteReturningSyntax.Ops( + BatchSql( + s"""insert into purchasing.vendor("businessentityid", "accountnumber", "name", "creditrating", "preferredvendorstatus", "activeflag", "purchasingwebserviceurl", "modifieddate") + values ({businessentityid}::int4, {accountnumber}::varchar, {name}::varchar, {creditrating}::int2, {preferredvendorstatus}::bool, {activeflag}::bool, {purchasingwebserviceurl}, {modifieddate}::timestamp) + on conflict ("businessentityid") + do update set + "accountnumber" = EXCLUDED."accountnumber", + "name" = EXCLUDED."name", + "creditrating" = EXCLUDED."creditrating", + "preferredvendorstatus" = EXCLUDED."preferredvendorstatus", + "activeflag" = EXCLUDED."activeflag", + "purchasingwebserviceurl" = EXCLUDED."purchasingwebserviceurl", + "modifieddate" = EXCLUDED."modifieddate" + returning "businessentityid", "accountnumber", "name", "creditrating", "preferredvendorstatus", "activeflag", "purchasingwebserviceurl", "modifieddate"::text + """, + toNamedParameter(head), + rest.map(toNamedParameter)* + ) + ).executeReturning(VendorRow.rowParser(1).*) + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[VendorRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + SQL"create temporary table vendor_TEMP (like purchasing.vendor) on commit drop".execute(): @nowarn + streamingInsert(s"""copy vendor_TEMP("businessentityid", "accountnumber", "name", "creditrating", "preferredvendorstatus", "activeflag", "purchasingwebserviceurl", "modifieddate") from stdin""", batchSize, unsaved)(VendorRow.text, c): @nowarn + SQL"""insert into purchasing.vendor("businessentityid", "accountnumber", "name", "creditrating", "preferredvendorstatus", "activeflag", "purchasingwebserviceurl", "modifieddate") + select * from vendor_TEMP + on conflict ("businessentityid") + do update set + "accountnumber" = EXCLUDED."accountnumber", + "name" = EXCLUDED."name", + "creditrating" = EXCLUDED."creditrating", + "preferredvendorstatus" = EXCLUDED."preferredvendorstatus", + "activeflag" = EXCLUDED."activeflag", + "purchasingwebserviceurl" = EXCLUDED."purchasingwebserviceurl", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table vendor_TEMP;""".executeUpdate() + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/purchasing/vendor/VendorRepoMock.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/purchasing/vendor/VendorRepoMock.scala index 75443f4d89..efc42e3979 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/purchasing/vendor/VendorRepoMock.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/purchasing/vendor/VendorRepoMock.scala @@ -88,4 +88,17 @@ class VendorRepoMock(toRow: Function1[VendorRowUnsaved, VendorRow], map.put(unsaved.businessentityid, unsaved): @nowarn unsaved } + override def upsertBatch(unsaved: Iterable[VendorRow])(implicit c: Connection): List[VendorRow] = { + unsaved.map { row => + map += (row.businessentityid -> row) + row + }.toList + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[VendorRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + unsaved.foreach { row => + map += (row.businessentityid -> row) + } + unsaved.size + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/countryregioncurrency/CountryregioncurrencyRepo.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/countryregioncurrency/CountryregioncurrencyRepo.scala index b688c30554..3c7005e552 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/countryregioncurrency/CountryregioncurrencyRepo.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/countryregioncurrency/CountryregioncurrencyRepo.scala @@ -29,4 +29,7 @@ trait CountryregioncurrencyRepo { def update: UpdateBuilder[CountryregioncurrencyFields, CountryregioncurrencyRow] def update(row: CountryregioncurrencyRow)(implicit c: Connection): Boolean def upsert(unsaved: CountryregioncurrencyRow)(implicit c: Connection): CountryregioncurrencyRow + def upsertBatch(unsaved: Iterable[CountryregioncurrencyRow])(implicit c: Connection): List[CountryregioncurrencyRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Iterator[CountryregioncurrencyRow], batchSize: Int = 10000)(implicit c: Connection): Int } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/countryregioncurrency/CountryregioncurrencyRepoImpl.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/countryregioncurrency/CountryregioncurrencyRepoImpl.scala index 5d3284f755..87e8c06757 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/countryregioncurrency/CountryregioncurrencyRepoImpl.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/countryregioncurrency/CountryregioncurrencyRepoImpl.scala @@ -11,6 +11,7 @@ import adventureworks.customtypes.Defaulted import adventureworks.customtypes.TypoLocalDateTime import adventureworks.person.countryregion.CountryregionId import adventureworks.sales.currency.CurrencyId +import anorm.BatchSql import anorm.NamedParameter import anorm.ParameterValue import anorm.RowParser @@ -18,6 +19,7 @@ import anorm.SQL import anorm.SimpleSql import anorm.SqlStringInterpolation import java.sql.Connection +import scala.annotation.nowarn import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder import typo.dsl.SelectBuilderSql @@ -133,4 +135,40 @@ class CountryregioncurrencyRepoImpl extends CountryregioncurrencyRepo { .executeInsert(CountryregioncurrencyRow.rowParser(1).single) } + override def upsertBatch(unsaved: Iterable[CountryregioncurrencyRow])(implicit c: Connection): List[CountryregioncurrencyRow] = { + def toNamedParameter(row: CountryregioncurrencyRow): List[NamedParameter] = List( + NamedParameter("countryregioncode", ParameterValue(row.countryregioncode, null, CountryregionId.toStatement)), + NamedParameter("currencycode", ParameterValue(row.currencycode, null, CurrencyId.toStatement)), + NamedParameter("modifieddate", ParameterValue(row.modifieddate, null, TypoLocalDateTime.toStatement)) + ) + unsaved.toList match { + case Nil => Nil + case head :: rest => + new anorm.adventureworks.ExecuteReturningSyntax.Ops( + BatchSql( + s"""insert into sales.countryregioncurrency("countryregioncode", "currencycode", "modifieddate") + values ({countryregioncode}, {currencycode}::bpchar, {modifieddate}::timestamp) + on conflict ("countryregioncode", "currencycode") + do update set + "modifieddate" = EXCLUDED."modifieddate" + returning "countryregioncode", "currencycode", "modifieddate"::text + """, + toNamedParameter(head), + rest.map(toNamedParameter)* + ) + ).executeReturning(CountryregioncurrencyRow.rowParser(1).*) + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[CountryregioncurrencyRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + SQL"create temporary table countryregioncurrency_TEMP (like sales.countryregioncurrency) on commit drop".execute(): @nowarn + streamingInsert(s"""copy countryregioncurrency_TEMP("countryregioncode", "currencycode", "modifieddate") from stdin""", batchSize, unsaved)(CountryregioncurrencyRow.text, c): @nowarn + SQL"""insert into sales.countryregioncurrency("countryregioncode", "currencycode", "modifieddate") + select * from countryregioncurrency_TEMP + on conflict ("countryregioncode", "currencycode") + do update set + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table countryregioncurrency_TEMP;""".executeUpdate() + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/countryregioncurrency/CountryregioncurrencyRepoMock.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/countryregioncurrency/CountryregioncurrencyRepoMock.scala index 0c568d54ee..534ddb8c1f 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/countryregioncurrency/CountryregioncurrencyRepoMock.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/countryregioncurrency/CountryregioncurrencyRepoMock.scala @@ -87,4 +87,17 @@ class CountryregioncurrencyRepoMock(toRow: Function1[CountryregioncurrencyRowUns map.put(unsaved.compositeId, unsaved): @nowarn unsaved } + override def upsertBatch(unsaved: Iterable[CountryregioncurrencyRow])(implicit c: Connection): List[CountryregioncurrencyRow] = { + unsaved.map { row => + map += (row.compositeId -> row) + row + }.toList + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[CountryregioncurrencyRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + unsaved.foreach { row => + map += (row.compositeId -> row) + } + unsaved.size + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/creditcard/CreditcardRepo.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/creditcard/CreditcardRepo.scala index 51ba3e86f1..9264921906 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/creditcard/CreditcardRepo.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/creditcard/CreditcardRepo.scala @@ -31,4 +31,7 @@ trait CreditcardRepo { def update: UpdateBuilder[CreditcardFields, CreditcardRow] def update(row: CreditcardRow)(implicit c: Connection): Boolean def upsert(unsaved: CreditcardRow)(implicit c: Connection): CreditcardRow + def upsertBatch(unsaved: Iterable[CreditcardRow])(implicit c: Connection): List[CreditcardRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Iterator[CreditcardRow], batchSize: Int = 10000)(implicit c: Connection): Int } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/creditcard/CreditcardRepoImpl.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/creditcard/CreditcardRepoImpl.scala index 123da8791a..279a95d23d 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/creditcard/CreditcardRepoImpl.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/creditcard/CreditcardRepoImpl.scala @@ -11,6 +11,7 @@ import adventureworks.customtypes.Defaulted import adventureworks.customtypes.TypoLocalDateTime import adventureworks.customtypes.TypoShort import adventureworks.userdefined.CustomCreditcardId +import anorm.BatchSql import anorm.NamedParameter import anorm.ParameterValue import anorm.RowParser @@ -19,6 +20,7 @@ import anorm.SimpleSql import anorm.SqlStringInterpolation import anorm.ToStatement import java.sql.Connection +import scala.annotation.nowarn import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder import typo.dsl.SelectBuilderSql @@ -145,4 +147,51 @@ class CreditcardRepoImpl extends CreditcardRepo { .executeInsert(CreditcardRow.rowParser(1).single) } + override def upsertBatch(unsaved: Iterable[CreditcardRow])(implicit c: Connection): List[CreditcardRow] = { + def toNamedParameter(row: CreditcardRow): List[NamedParameter] = List( + NamedParameter("creditcardid", ParameterValue(row.creditcardid, null, /* user-picked */ CustomCreditcardId.toStatement)), + NamedParameter("cardtype", ParameterValue(row.cardtype, null, ToStatement.stringToStatement)), + NamedParameter("cardnumber", ParameterValue(row.cardnumber, null, ToStatement.stringToStatement)), + NamedParameter("expmonth", ParameterValue(row.expmonth, null, TypoShort.toStatement)), + NamedParameter("expyear", ParameterValue(row.expyear, null, TypoShort.toStatement)), + NamedParameter("modifieddate", ParameterValue(row.modifieddate, null, TypoLocalDateTime.toStatement)) + ) + unsaved.toList match { + case Nil => Nil + case head :: rest => + new anorm.adventureworks.ExecuteReturningSyntax.Ops( + BatchSql( + s"""insert into sales.creditcard("creditcardid", "cardtype", "cardnumber", "expmonth", "expyear", "modifieddate") + values ({creditcardid}::int4, {cardtype}, {cardnumber}, {expmonth}::int2, {expyear}::int2, {modifieddate}::timestamp) + on conflict ("creditcardid") + do update set + "cardtype" = EXCLUDED."cardtype", + "cardnumber" = EXCLUDED."cardnumber", + "expmonth" = EXCLUDED."expmonth", + "expyear" = EXCLUDED."expyear", + "modifieddate" = EXCLUDED."modifieddate" + returning "creditcardid", "cardtype", "cardnumber", "expmonth", "expyear", "modifieddate"::text + """, + toNamedParameter(head), + rest.map(toNamedParameter)* + ) + ).executeReturning(CreditcardRow.rowParser(1).*) + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[CreditcardRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + SQL"create temporary table creditcard_TEMP (like sales.creditcard) on commit drop".execute(): @nowarn + streamingInsert(s"""copy creditcard_TEMP("creditcardid", "cardtype", "cardnumber", "expmonth", "expyear", "modifieddate") from stdin""", batchSize, unsaved)(CreditcardRow.text, c): @nowarn + SQL"""insert into sales.creditcard("creditcardid", "cardtype", "cardnumber", "expmonth", "expyear", "modifieddate") + select * from creditcard_TEMP + on conflict ("creditcardid") + do update set + "cardtype" = EXCLUDED."cardtype", + "cardnumber" = EXCLUDED."cardnumber", + "expmonth" = EXCLUDED."expmonth", + "expyear" = EXCLUDED."expyear", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table creditcard_TEMP;""".executeUpdate() + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/creditcard/CreditcardRepoMock.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/creditcard/CreditcardRepoMock.scala index 5ecef2b209..0cfc1066d8 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/creditcard/CreditcardRepoMock.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/creditcard/CreditcardRepoMock.scala @@ -89,4 +89,17 @@ class CreditcardRepoMock(toRow: Function1[CreditcardRowUnsaved, CreditcardRow], map.put(unsaved.creditcardid, unsaved): @nowarn unsaved } + override def upsertBatch(unsaved: Iterable[CreditcardRow])(implicit c: Connection): List[CreditcardRow] = { + unsaved.map { row => + map += (row.creditcardid -> row) + row + }.toList + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[CreditcardRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + unsaved.foreach { row => + map += (row.creditcardid -> row) + } + unsaved.size + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/currency/CurrencyRepo.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/currency/CurrencyRepo.scala index 5fe703c795..5b394ac6b7 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/currency/CurrencyRepo.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/currency/CurrencyRepo.scala @@ -29,4 +29,7 @@ trait CurrencyRepo { def update: UpdateBuilder[CurrencyFields, CurrencyRow] def update(row: CurrencyRow)(implicit c: Connection): Boolean def upsert(unsaved: CurrencyRow)(implicit c: Connection): CurrencyRow + def upsertBatch(unsaved: Iterable[CurrencyRow])(implicit c: Connection): List[CurrencyRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Iterator[CurrencyRow], batchSize: Int = 10000)(implicit c: Connection): Int } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/currency/CurrencyRepoImpl.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/currency/CurrencyRepoImpl.scala index 7ba724beec..84193857e8 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/currency/CurrencyRepoImpl.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/currency/CurrencyRepoImpl.scala @@ -10,6 +10,7 @@ package currency import adventureworks.customtypes.Defaulted import adventureworks.customtypes.TypoLocalDateTime import adventureworks.public.Name +import anorm.BatchSql import anorm.NamedParameter import anorm.ParameterValue import anorm.RowParser @@ -17,6 +18,7 @@ import anorm.SQL import anorm.SimpleSql import anorm.SqlStringInterpolation import java.sql.Connection +import scala.annotation.nowarn import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder import typo.dsl.SelectBuilderSql @@ -128,4 +130,42 @@ class CurrencyRepoImpl extends CurrencyRepo { .executeInsert(CurrencyRow.rowParser(1).single) } + override def upsertBatch(unsaved: Iterable[CurrencyRow])(implicit c: Connection): List[CurrencyRow] = { + def toNamedParameter(row: CurrencyRow): List[NamedParameter] = List( + NamedParameter("currencycode", ParameterValue(row.currencycode, null, CurrencyId.toStatement)), + NamedParameter("name", ParameterValue(row.name, null, Name.toStatement)), + NamedParameter("modifieddate", ParameterValue(row.modifieddate, null, TypoLocalDateTime.toStatement)) + ) + unsaved.toList match { + case Nil => Nil + case head :: rest => + new anorm.adventureworks.ExecuteReturningSyntax.Ops( + BatchSql( + s"""insert into sales.currency("currencycode", "name", "modifieddate") + values ({currencycode}::bpchar, {name}::varchar, {modifieddate}::timestamp) + on conflict ("currencycode") + do update set + "name" = EXCLUDED."name", + "modifieddate" = EXCLUDED."modifieddate" + returning "currencycode", "name", "modifieddate"::text + """, + toNamedParameter(head), + rest.map(toNamedParameter)* + ) + ).executeReturning(CurrencyRow.rowParser(1).*) + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[CurrencyRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + SQL"create temporary table currency_TEMP (like sales.currency) on commit drop".execute(): @nowarn + streamingInsert(s"""copy currency_TEMP("currencycode", "name", "modifieddate") from stdin""", batchSize, unsaved)(CurrencyRow.text, c): @nowarn + SQL"""insert into sales.currency("currencycode", "name", "modifieddate") + select * from currency_TEMP + on conflict ("currencycode") + do update set + "name" = EXCLUDED."name", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table currency_TEMP;""".executeUpdate() + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/currency/CurrencyRepoMock.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/currency/CurrencyRepoMock.scala index 5bf8164da9..f486159d63 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/currency/CurrencyRepoMock.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/currency/CurrencyRepoMock.scala @@ -87,4 +87,17 @@ class CurrencyRepoMock(toRow: Function1[CurrencyRowUnsaved, CurrencyRow], map.put(unsaved.currencycode, unsaved): @nowarn unsaved } + override def upsertBatch(unsaved: Iterable[CurrencyRow])(implicit c: Connection): List[CurrencyRow] = { + unsaved.map { row => + map += (row.currencycode -> row) + row + }.toList + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[CurrencyRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + unsaved.foreach { row => + map += (row.currencycode -> row) + } + unsaved.size + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/currencyrate/CurrencyrateRepo.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/currencyrate/CurrencyrateRepo.scala index 51c409f6fa..6c55126260 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/currencyrate/CurrencyrateRepo.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/currencyrate/CurrencyrateRepo.scala @@ -29,4 +29,7 @@ trait CurrencyrateRepo { def update: UpdateBuilder[CurrencyrateFields, CurrencyrateRow] def update(row: CurrencyrateRow)(implicit c: Connection): Boolean def upsert(unsaved: CurrencyrateRow)(implicit c: Connection): CurrencyrateRow + def upsertBatch(unsaved: Iterable[CurrencyrateRow])(implicit c: Connection): List[CurrencyrateRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Iterator[CurrencyrateRow], batchSize: Int = 10000)(implicit c: Connection): Int } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/currencyrate/CurrencyrateRepoImpl.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/currencyrate/CurrencyrateRepoImpl.scala index 71aab263ff..4fd34febd2 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/currencyrate/CurrencyrateRepoImpl.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/currencyrate/CurrencyrateRepoImpl.scala @@ -10,6 +10,7 @@ package currencyrate import adventureworks.customtypes.Defaulted import adventureworks.customtypes.TypoLocalDateTime import adventureworks.sales.currency.CurrencyId +import anorm.BatchSql import anorm.NamedParameter import anorm.ParameterValue import anorm.RowParser @@ -18,6 +19,7 @@ import anorm.SimpleSql import anorm.SqlStringInterpolation import anorm.ToStatement import java.sql.Connection +import scala.annotation.nowarn import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder import typo.dsl.SelectBuilderSql @@ -148,4 +150,54 @@ class CurrencyrateRepoImpl extends CurrencyrateRepo { .executeInsert(CurrencyrateRow.rowParser(1).single) } + override def upsertBatch(unsaved: Iterable[CurrencyrateRow])(implicit c: Connection): List[CurrencyrateRow] = { + def toNamedParameter(row: CurrencyrateRow): List[NamedParameter] = List( + NamedParameter("currencyrateid", ParameterValue(row.currencyrateid, null, CurrencyrateId.toStatement)), + NamedParameter("currencyratedate", ParameterValue(row.currencyratedate, null, TypoLocalDateTime.toStatement)), + NamedParameter("fromcurrencycode", ParameterValue(row.fromcurrencycode, null, CurrencyId.toStatement)), + NamedParameter("tocurrencycode", ParameterValue(row.tocurrencycode, null, CurrencyId.toStatement)), + NamedParameter("averagerate", ParameterValue(row.averagerate, null, ToStatement.scalaBigDecimalToStatement)), + NamedParameter("endofdayrate", ParameterValue(row.endofdayrate, null, ToStatement.scalaBigDecimalToStatement)), + NamedParameter("modifieddate", ParameterValue(row.modifieddate, null, TypoLocalDateTime.toStatement)) + ) + unsaved.toList match { + case Nil => Nil + case head :: rest => + new anorm.adventureworks.ExecuteReturningSyntax.Ops( + BatchSql( + s"""insert into sales.currencyrate("currencyrateid", "currencyratedate", "fromcurrencycode", "tocurrencycode", "averagerate", "endofdayrate", "modifieddate") + values ({currencyrateid}::int4, {currencyratedate}::timestamp, {fromcurrencycode}::bpchar, {tocurrencycode}::bpchar, {averagerate}::numeric, {endofdayrate}::numeric, {modifieddate}::timestamp) + on conflict ("currencyrateid") + do update set + "currencyratedate" = EXCLUDED."currencyratedate", + "fromcurrencycode" = EXCLUDED."fromcurrencycode", + "tocurrencycode" = EXCLUDED."tocurrencycode", + "averagerate" = EXCLUDED."averagerate", + "endofdayrate" = EXCLUDED."endofdayrate", + "modifieddate" = EXCLUDED."modifieddate" + returning "currencyrateid", "currencyratedate"::text, "fromcurrencycode", "tocurrencycode", "averagerate", "endofdayrate", "modifieddate"::text + """, + toNamedParameter(head), + rest.map(toNamedParameter)* + ) + ).executeReturning(CurrencyrateRow.rowParser(1).*) + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[CurrencyrateRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + SQL"create temporary table currencyrate_TEMP (like sales.currencyrate) on commit drop".execute(): @nowarn + streamingInsert(s"""copy currencyrate_TEMP("currencyrateid", "currencyratedate", "fromcurrencycode", "tocurrencycode", "averagerate", "endofdayrate", "modifieddate") from stdin""", batchSize, unsaved)(CurrencyrateRow.text, c): @nowarn + SQL"""insert into sales.currencyrate("currencyrateid", "currencyratedate", "fromcurrencycode", "tocurrencycode", "averagerate", "endofdayrate", "modifieddate") + select * from currencyrate_TEMP + on conflict ("currencyrateid") + do update set + "currencyratedate" = EXCLUDED."currencyratedate", + "fromcurrencycode" = EXCLUDED."fromcurrencycode", + "tocurrencycode" = EXCLUDED."tocurrencycode", + "averagerate" = EXCLUDED."averagerate", + "endofdayrate" = EXCLUDED."endofdayrate", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table currencyrate_TEMP;""".executeUpdate() + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/currencyrate/CurrencyrateRepoMock.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/currencyrate/CurrencyrateRepoMock.scala index 4f17e13bd7..19c9ef7baa 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/currencyrate/CurrencyrateRepoMock.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/currencyrate/CurrencyrateRepoMock.scala @@ -87,4 +87,17 @@ class CurrencyrateRepoMock(toRow: Function1[CurrencyrateRowUnsaved, Currencyrate map.put(unsaved.currencyrateid, unsaved): @nowarn unsaved } + override def upsertBatch(unsaved: Iterable[CurrencyrateRow])(implicit c: Connection): List[CurrencyrateRow] = { + unsaved.map { row => + map += (row.currencyrateid -> row) + row + }.toList + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[CurrencyrateRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + unsaved.foreach { row => + map += (row.currencyrateid -> row) + } + unsaved.size + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/customer/CustomerRepo.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/customer/CustomerRepo.scala index 1c04b7e16d..1b7a03b0e6 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/customer/CustomerRepo.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/customer/CustomerRepo.scala @@ -29,4 +29,7 @@ trait CustomerRepo { def update: UpdateBuilder[CustomerFields, CustomerRow] def update(row: CustomerRow)(implicit c: Connection): Boolean def upsert(unsaved: CustomerRow)(implicit c: Connection): CustomerRow + def upsertBatch(unsaved: Iterable[CustomerRow])(implicit c: Connection): List[CustomerRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Iterator[CustomerRow], batchSize: Int = 10000)(implicit c: Connection): Int } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/customer/CustomerRepoImpl.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/customer/CustomerRepoImpl.scala index 06e88c2a20..f0a524144f 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/customer/CustomerRepoImpl.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/customer/CustomerRepoImpl.scala @@ -12,6 +12,7 @@ import adventureworks.customtypes.TypoLocalDateTime import adventureworks.customtypes.TypoUUID import adventureworks.person.businessentity.BusinessentityId import adventureworks.sales.salesterritory.SalesterritoryId +import anorm.BatchSql import anorm.NamedParameter import anorm.ParameterValue import anorm.RowParser @@ -20,6 +21,7 @@ import anorm.SimpleSql import anorm.SqlStringInterpolation import anorm.ToStatement import java.sql.Connection +import scala.annotation.nowarn import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder import typo.dsl.SelectBuilderSql @@ -149,4 +151,51 @@ class CustomerRepoImpl extends CustomerRepo { .executeInsert(CustomerRow.rowParser(1).single) } + override def upsertBatch(unsaved: Iterable[CustomerRow])(implicit c: Connection): List[CustomerRow] = { + def toNamedParameter(row: CustomerRow): List[NamedParameter] = List( + NamedParameter("customerid", ParameterValue(row.customerid, null, CustomerId.toStatement)), + NamedParameter("personid", ParameterValue(row.personid, null, ToStatement.optionToStatement(BusinessentityId.toStatement, BusinessentityId.parameterMetadata))), + NamedParameter("storeid", ParameterValue(row.storeid, null, ToStatement.optionToStatement(BusinessentityId.toStatement, BusinessentityId.parameterMetadata))), + NamedParameter("territoryid", ParameterValue(row.territoryid, null, ToStatement.optionToStatement(SalesterritoryId.toStatement, SalesterritoryId.parameterMetadata))), + NamedParameter("rowguid", ParameterValue(row.rowguid, null, TypoUUID.toStatement)), + NamedParameter("modifieddate", ParameterValue(row.modifieddate, null, TypoLocalDateTime.toStatement)) + ) + unsaved.toList match { + case Nil => Nil + case head :: rest => + new anorm.adventureworks.ExecuteReturningSyntax.Ops( + BatchSql( + s"""insert into sales.customer("customerid", "personid", "storeid", "territoryid", "rowguid", "modifieddate") + values ({customerid}::int4, {personid}::int4, {storeid}::int4, {territoryid}::int4, {rowguid}::uuid, {modifieddate}::timestamp) + on conflict ("customerid") + do update set + "personid" = EXCLUDED."personid", + "storeid" = EXCLUDED."storeid", + "territoryid" = EXCLUDED."territoryid", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + returning "customerid", "personid", "storeid", "territoryid", "rowguid", "modifieddate"::text + """, + toNamedParameter(head), + rest.map(toNamedParameter)* + ) + ).executeReturning(CustomerRow.rowParser(1).*) + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[CustomerRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + SQL"create temporary table customer_TEMP (like sales.customer) on commit drop".execute(): @nowarn + streamingInsert(s"""copy customer_TEMP("customerid", "personid", "storeid", "territoryid", "rowguid", "modifieddate") from stdin""", batchSize, unsaved)(CustomerRow.text, c): @nowarn + SQL"""insert into sales.customer("customerid", "personid", "storeid", "territoryid", "rowguid", "modifieddate") + select * from customer_TEMP + on conflict ("customerid") + do update set + "personid" = EXCLUDED."personid", + "storeid" = EXCLUDED."storeid", + "territoryid" = EXCLUDED."territoryid", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table customer_TEMP;""".executeUpdate() + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/customer/CustomerRepoMock.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/customer/CustomerRepoMock.scala index 909601c34e..e50c0773e5 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/customer/CustomerRepoMock.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/customer/CustomerRepoMock.scala @@ -87,4 +87,17 @@ class CustomerRepoMock(toRow: Function1[CustomerRowUnsaved, CustomerRow], map.put(unsaved.customerid, unsaved): @nowarn unsaved } + override def upsertBatch(unsaved: Iterable[CustomerRow])(implicit c: Connection): List[CustomerRow] = { + unsaved.map { row => + map += (row.customerid -> row) + row + }.toList + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[CustomerRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + unsaved.foreach { row => + map += (row.customerid -> row) + } + unsaved.size + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/personcreditcard/PersoncreditcardRepo.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/personcreditcard/PersoncreditcardRepo.scala index 2748f91ad8..0aeb4a01dd 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/personcreditcard/PersoncreditcardRepo.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/personcreditcard/PersoncreditcardRepo.scala @@ -31,4 +31,7 @@ trait PersoncreditcardRepo { def update: UpdateBuilder[PersoncreditcardFields, PersoncreditcardRow] def update(row: PersoncreditcardRow)(implicit c: Connection): Boolean def upsert(unsaved: PersoncreditcardRow)(implicit c: Connection): PersoncreditcardRow + def upsertBatch(unsaved: Iterable[PersoncreditcardRow])(implicit c: Connection): List[PersoncreditcardRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Iterator[PersoncreditcardRow], batchSize: Int = 10000)(implicit c: Connection): Int } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/personcreditcard/PersoncreditcardRepoImpl.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/personcreditcard/PersoncreditcardRepoImpl.scala index 4e6597cb39..80244eedba 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/personcreditcard/PersoncreditcardRepoImpl.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/personcreditcard/PersoncreditcardRepoImpl.scala @@ -11,6 +11,7 @@ import adventureworks.customtypes.Defaulted import adventureworks.customtypes.TypoLocalDateTime import adventureworks.person.businessentity.BusinessentityId import adventureworks.userdefined.CustomCreditcardId +import anorm.BatchSql import anorm.NamedParameter import anorm.ParameterValue import anorm.RowParser @@ -19,6 +20,7 @@ import anorm.SimpleSql import anorm.SqlStringInterpolation import anorm.ToStatement import java.sql.Connection +import scala.annotation.nowarn import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder import typo.dsl.SelectBuilderSql @@ -134,4 +136,40 @@ class PersoncreditcardRepoImpl extends PersoncreditcardRepo { .executeInsert(PersoncreditcardRow.rowParser(1).single) } + override def upsertBatch(unsaved: Iterable[PersoncreditcardRow])(implicit c: Connection): List[PersoncreditcardRow] = { + def toNamedParameter(row: PersoncreditcardRow): List[NamedParameter] = List( + NamedParameter("businessentityid", ParameterValue(row.businessentityid, null, BusinessentityId.toStatement)), + NamedParameter("creditcardid", ParameterValue(row.creditcardid, null, /* user-picked */ CustomCreditcardId.toStatement)), + NamedParameter("modifieddate", ParameterValue(row.modifieddate, null, TypoLocalDateTime.toStatement)) + ) + unsaved.toList match { + case Nil => Nil + case head :: rest => + new anorm.adventureworks.ExecuteReturningSyntax.Ops( + BatchSql( + s"""insert into sales.personcreditcard("businessentityid", "creditcardid", "modifieddate") + values ({businessentityid}::int4, {creditcardid}::int4, {modifieddate}::timestamp) + on conflict ("businessentityid", "creditcardid") + do update set + "modifieddate" = EXCLUDED."modifieddate" + returning "businessentityid", "creditcardid", "modifieddate"::text + """, + toNamedParameter(head), + rest.map(toNamedParameter)* + ) + ).executeReturning(PersoncreditcardRow.rowParser(1).*) + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[PersoncreditcardRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + SQL"create temporary table personcreditcard_TEMP (like sales.personcreditcard) on commit drop".execute(): @nowarn + streamingInsert(s"""copy personcreditcard_TEMP("businessentityid", "creditcardid", "modifieddate") from stdin""", batchSize, unsaved)(PersoncreditcardRow.text, c): @nowarn + SQL"""insert into sales.personcreditcard("businessentityid", "creditcardid", "modifieddate") + select * from personcreditcard_TEMP + on conflict ("businessentityid", "creditcardid") + do update set + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table personcreditcard_TEMP;""".executeUpdate() + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/personcreditcard/PersoncreditcardRepoMock.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/personcreditcard/PersoncreditcardRepoMock.scala index 9f2dba0e44..ae1bb5b942 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/personcreditcard/PersoncreditcardRepoMock.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/personcreditcard/PersoncreditcardRepoMock.scala @@ -89,4 +89,17 @@ class PersoncreditcardRepoMock(toRow: Function1[PersoncreditcardRowUnsaved, Pers map.put(unsaved.compositeId, unsaved): @nowarn unsaved } + override def upsertBatch(unsaved: Iterable[PersoncreditcardRow])(implicit c: Connection): List[PersoncreditcardRow] = { + unsaved.map { row => + map += (row.compositeId -> row) + row + }.toList + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[PersoncreditcardRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + unsaved.foreach { row => + map += (row.compositeId -> row) + } + unsaved.size + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/salesorderdetail/SalesorderdetailRepo.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/salesorderdetail/SalesorderdetailRepo.scala index a6d48fa801..0b1e009ea9 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/salesorderdetail/SalesorderdetailRepo.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/salesorderdetail/SalesorderdetailRepo.scala @@ -29,4 +29,7 @@ trait SalesorderdetailRepo { def update: UpdateBuilder[SalesorderdetailFields, SalesorderdetailRow] def update(row: SalesorderdetailRow)(implicit c: Connection): Boolean def upsert(unsaved: SalesorderdetailRow)(implicit c: Connection): SalesorderdetailRow + def upsertBatch(unsaved: Iterable[SalesorderdetailRow])(implicit c: Connection): List[SalesorderdetailRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Iterator[SalesorderdetailRow], batchSize: Int = 10000)(implicit c: Connection): Int } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/salesorderdetail/SalesorderdetailRepoImpl.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/salesorderdetail/SalesorderdetailRepoImpl.scala index 6f2d7ee79b..200f66212c 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/salesorderdetail/SalesorderdetailRepoImpl.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/salesorderdetail/SalesorderdetailRepoImpl.scala @@ -14,6 +14,7 @@ import adventureworks.customtypes.TypoUUID import adventureworks.production.product.ProductId import adventureworks.sales.salesorderheader.SalesorderheaderId import adventureworks.sales.specialoffer.SpecialofferId +import anorm.BatchSql import anorm.NamedParameter import anorm.ParameterMetaData import anorm.ParameterValue @@ -23,6 +24,7 @@ import anorm.SimpleSql import anorm.SqlStringInterpolation import anorm.ToStatement import java.sql.Connection +import scala.annotation.nowarn import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder import typo.dsl.SelectBuilderSql @@ -175,4 +177,61 @@ class SalesorderdetailRepoImpl extends SalesorderdetailRepo { .executeInsert(SalesorderdetailRow.rowParser(1).single) } + override def upsertBatch(unsaved: Iterable[SalesorderdetailRow])(implicit c: Connection): List[SalesorderdetailRow] = { + def toNamedParameter(row: SalesorderdetailRow): List[NamedParameter] = List( + NamedParameter("salesorderid", ParameterValue(row.salesorderid, null, SalesorderheaderId.toStatement)), + NamedParameter("salesorderdetailid", ParameterValue(row.salesorderdetailid, null, ToStatement.intToStatement)), + NamedParameter("carriertrackingnumber", ParameterValue(row.carriertrackingnumber, null, ToStatement.optionToStatement(ToStatement.stringToStatement, ParameterMetaData.StringParameterMetaData))), + NamedParameter("orderqty", ParameterValue(row.orderqty, null, TypoShort.toStatement)), + NamedParameter("productid", ParameterValue(row.productid, null, ProductId.toStatement)), + NamedParameter("specialofferid", ParameterValue(row.specialofferid, null, SpecialofferId.toStatement)), + NamedParameter("unitprice", ParameterValue(row.unitprice, null, ToStatement.scalaBigDecimalToStatement)), + NamedParameter("unitpricediscount", ParameterValue(row.unitpricediscount, null, ToStatement.scalaBigDecimalToStatement)), + NamedParameter("rowguid", ParameterValue(row.rowguid, null, TypoUUID.toStatement)), + NamedParameter("modifieddate", ParameterValue(row.modifieddate, null, TypoLocalDateTime.toStatement)) + ) + unsaved.toList match { + case Nil => Nil + case head :: rest => + new anorm.adventureworks.ExecuteReturningSyntax.Ops( + BatchSql( + s"""insert into sales.salesorderdetail("salesorderid", "salesorderdetailid", "carriertrackingnumber", "orderqty", "productid", "specialofferid", "unitprice", "unitpricediscount", "rowguid", "modifieddate") + values ({salesorderid}::int4, {salesorderdetailid}::int4, {carriertrackingnumber}, {orderqty}::int2, {productid}::int4, {specialofferid}::int4, {unitprice}::numeric, {unitpricediscount}::numeric, {rowguid}::uuid, {modifieddate}::timestamp) + on conflict ("salesorderid", "salesorderdetailid") + do update set + "carriertrackingnumber" = EXCLUDED."carriertrackingnumber", + "orderqty" = EXCLUDED."orderqty", + "productid" = EXCLUDED."productid", + "specialofferid" = EXCLUDED."specialofferid", + "unitprice" = EXCLUDED."unitprice", + "unitpricediscount" = EXCLUDED."unitpricediscount", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + returning "salesorderid", "salesorderdetailid", "carriertrackingnumber", "orderqty", "productid", "specialofferid", "unitprice", "unitpricediscount", "rowguid", "modifieddate"::text + """, + toNamedParameter(head), + rest.map(toNamedParameter)* + ) + ).executeReturning(SalesorderdetailRow.rowParser(1).*) + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[SalesorderdetailRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + SQL"create temporary table salesorderdetail_TEMP (like sales.salesorderdetail) on commit drop".execute(): @nowarn + streamingInsert(s"""copy salesorderdetail_TEMP("salesorderid", "salesorderdetailid", "carriertrackingnumber", "orderqty", "productid", "specialofferid", "unitprice", "unitpricediscount", "rowguid", "modifieddate") from stdin""", batchSize, unsaved)(SalesorderdetailRow.text, c): @nowarn + SQL"""insert into sales.salesorderdetail("salesorderid", "salesorderdetailid", "carriertrackingnumber", "orderqty", "productid", "specialofferid", "unitprice", "unitpricediscount", "rowguid", "modifieddate") + select * from salesorderdetail_TEMP + on conflict ("salesorderid", "salesorderdetailid") + do update set + "carriertrackingnumber" = EXCLUDED."carriertrackingnumber", + "orderqty" = EXCLUDED."orderqty", + "productid" = EXCLUDED."productid", + "specialofferid" = EXCLUDED."specialofferid", + "unitprice" = EXCLUDED."unitprice", + "unitpricediscount" = EXCLUDED."unitpricediscount", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table salesorderdetail_TEMP;""".executeUpdate() + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/salesorderdetail/SalesorderdetailRepoMock.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/salesorderdetail/SalesorderdetailRepoMock.scala index b2df97004c..53b9aada26 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/salesorderdetail/SalesorderdetailRepoMock.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/salesorderdetail/SalesorderdetailRepoMock.scala @@ -87,4 +87,17 @@ class SalesorderdetailRepoMock(toRow: Function1[SalesorderdetailRowUnsaved, Sale map.put(unsaved.compositeId, unsaved): @nowarn unsaved } + override def upsertBatch(unsaved: Iterable[SalesorderdetailRow])(implicit c: Connection): List[SalesorderdetailRow] = { + unsaved.map { row => + map += (row.compositeId -> row) + row + }.toList + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[SalesorderdetailRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + unsaved.foreach { row => + map += (row.compositeId -> row) + } + unsaved.size + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/salesorderheader/SalesorderheaderRepo.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/salesorderheader/SalesorderheaderRepo.scala index f9ee388f14..c5a3c7c102 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/salesorderheader/SalesorderheaderRepo.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/salesorderheader/SalesorderheaderRepo.scala @@ -29,4 +29,7 @@ trait SalesorderheaderRepo { def update: UpdateBuilder[SalesorderheaderFields, SalesorderheaderRow] def update(row: SalesorderheaderRow)(implicit c: Connection): Boolean def upsert(unsaved: SalesorderheaderRow)(implicit c: Connection): SalesorderheaderRow + def upsertBatch(unsaved: Iterable[SalesorderheaderRow])(implicit c: Connection): List[SalesorderheaderRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Iterator[SalesorderheaderRow], batchSize: Int = 10000)(implicit c: Connection): Int } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/salesorderheader/SalesorderheaderRepoImpl.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/salesorderheader/SalesorderheaderRepoImpl.scala index 7d63f09f7c..0c3db5ea1e 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/salesorderheader/SalesorderheaderRepoImpl.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/salesorderheader/SalesorderheaderRepoImpl.scala @@ -21,6 +21,7 @@ import adventureworks.sales.currencyrate.CurrencyrateId import adventureworks.sales.customer.CustomerId import adventureworks.sales.salesterritory.SalesterritoryId import adventureworks.userdefined.CustomCreditcardId +import anorm.BatchSql import anorm.NamedParameter import anorm.ParameterMetaData import anorm.ParameterValue @@ -30,6 +31,7 @@ import anorm.SimpleSql import anorm.SqlStringInterpolation import anorm.ToStatement import java.sql.Connection +import scala.annotation.nowarn import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder import typo.dsl.SelectBuilderSql @@ -256,4 +258,108 @@ class SalesorderheaderRepoImpl extends SalesorderheaderRepo { .executeInsert(SalesorderheaderRow.rowParser(1).single) } + override def upsertBatch(unsaved: Iterable[SalesorderheaderRow])(implicit c: Connection): List[SalesorderheaderRow] = { + def toNamedParameter(row: SalesorderheaderRow): List[NamedParameter] = List( + NamedParameter("salesorderid", ParameterValue(row.salesorderid, null, SalesorderheaderId.toStatement)), + NamedParameter("revisionnumber", ParameterValue(row.revisionnumber, null, TypoShort.toStatement)), + NamedParameter("orderdate", ParameterValue(row.orderdate, null, TypoLocalDateTime.toStatement)), + NamedParameter("duedate", ParameterValue(row.duedate, null, TypoLocalDateTime.toStatement)), + NamedParameter("shipdate", ParameterValue(row.shipdate, null, ToStatement.optionToStatement(TypoLocalDateTime.toStatement, TypoLocalDateTime.parameterMetadata))), + NamedParameter("status", ParameterValue(row.status, null, TypoShort.toStatement)), + NamedParameter("onlineorderflag", ParameterValue(row.onlineorderflag, null, Flag.toStatement)), + NamedParameter("purchaseordernumber", ParameterValue(row.purchaseordernumber, null, ToStatement.optionToStatement(OrderNumber.toStatement, OrderNumber.parameterMetadata))), + NamedParameter("accountnumber", ParameterValue(row.accountnumber, null, ToStatement.optionToStatement(AccountNumber.toStatement, AccountNumber.parameterMetadata))), + NamedParameter("customerid", ParameterValue(row.customerid, null, CustomerId.toStatement)), + NamedParameter("salespersonid", ParameterValue(row.salespersonid, null, ToStatement.optionToStatement(BusinessentityId.toStatement, BusinessentityId.parameterMetadata))), + NamedParameter("territoryid", ParameterValue(row.territoryid, null, ToStatement.optionToStatement(SalesterritoryId.toStatement, SalesterritoryId.parameterMetadata))), + NamedParameter("billtoaddressid", ParameterValue(row.billtoaddressid, null, AddressId.toStatement)), + NamedParameter("shiptoaddressid", ParameterValue(row.shiptoaddressid, null, AddressId.toStatement)), + NamedParameter("shipmethodid", ParameterValue(row.shipmethodid, null, ShipmethodId.toStatement)), + NamedParameter("creditcardid", ParameterValue(row.creditcardid, null, ToStatement.optionToStatement(CustomCreditcardId.toStatement, CustomCreditcardId.parameterMetadata))), + NamedParameter("creditcardapprovalcode", ParameterValue(row.creditcardapprovalcode, null, ToStatement.optionToStatement(ToStatement.stringToStatement, ParameterMetaData.StringParameterMetaData))), + NamedParameter("currencyrateid", ParameterValue(row.currencyrateid, null, ToStatement.optionToStatement(CurrencyrateId.toStatement, CurrencyrateId.parameterMetadata))), + NamedParameter("subtotal", ParameterValue(row.subtotal, null, ToStatement.scalaBigDecimalToStatement)), + NamedParameter("taxamt", ParameterValue(row.taxamt, null, ToStatement.scalaBigDecimalToStatement)), + NamedParameter("freight", ParameterValue(row.freight, null, ToStatement.scalaBigDecimalToStatement)), + NamedParameter("totaldue", ParameterValue(row.totaldue, null, ToStatement.optionToStatement(ToStatement.scalaBigDecimalToStatement, ParameterMetaData.BigDecimalParameterMetaData))), + NamedParameter("comment", ParameterValue(row.comment, null, ToStatement.optionToStatement(ToStatement.stringToStatement, ParameterMetaData.StringParameterMetaData))), + NamedParameter("rowguid", ParameterValue(row.rowguid, null, TypoUUID.toStatement)), + NamedParameter("modifieddate", ParameterValue(row.modifieddate, null, TypoLocalDateTime.toStatement)) + ) + unsaved.toList match { + case Nil => Nil + case head :: rest => + new anorm.adventureworks.ExecuteReturningSyntax.Ops( + BatchSql( + s"""insert into sales.salesorderheader("salesorderid", "revisionnumber", "orderdate", "duedate", "shipdate", "status", "onlineorderflag", "purchaseordernumber", "accountnumber", "customerid", "salespersonid", "territoryid", "billtoaddressid", "shiptoaddressid", "shipmethodid", "creditcardid", "creditcardapprovalcode", "currencyrateid", "subtotal", "taxamt", "freight", "totaldue", "comment", "rowguid", "modifieddate") + values ({salesorderid}::int4, {revisionnumber}::int2, {orderdate}::timestamp, {duedate}::timestamp, {shipdate}::timestamp, {status}::int2, {onlineorderflag}::bool, {purchaseordernumber}::varchar, {accountnumber}::varchar, {customerid}::int4, {salespersonid}::int4, {territoryid}::int4, {billtoaddressid}::int4, {shiptoaddressid}::int4, {shipmethodid}::int4, {creditcardid}::int4, {creditcardapprovalcode}, {currencyrateid}::int4, {subtotal}::numeric, {taxamt}::numeric, {freight}::numeric, {totaldue}::numeric, {comment}, {rowguid}::uuid, {modifieddate}::timestamp) + on conflict ("salesorderid") + do update set + "revisionnumber" = EXCLUDED."revisionnumber", + "orderdate" = EXCLUDED."orderdate", + "duedate" = EXCLUDED."duedate", + "shipdate" = EXCLUDED."shipdate", + "status" = EXCLUDED."status", + "onlineorderflag" = EXCLUDED."onlineorderflag", + "purchaseordernumber" = EXCLUDED."purchaseordernumber", + "accountnumber" = EXCLUDED."accountnumber", + "customerid" = EXCLUDED."customerid", + "salespersonid" = EXCLUDED."salespersonid", + "territoryid" = EXCLUDED."territoryid", + "billtoaddressid" = EXCLUDED."billtoaddressid", + "shiptoaddressid" = EXCLUDED."shiptoaddressid", + "shipmethodid" = EXCLUDED."shipmethodid", + "creditcardid" = EXCLUDED."creditcardid", + "creditcardapprovalcode" = EXCLUDED."creditcardapprovalcode", + "currencyrateid" = EXCLUDED."currencyrateid", + "subtotal" = EXCLUDED."subtotal", + "taxamt" = EXCLUDED."taxamt", + "freight" = EXCLUDED."freight", + "totaldue" = EXCLUDED."totaldue", + "comment" = EXCLUDED."comment", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + returning "salesorderid", "revisionnumber", "orderdate"::text, "duedate"::text, "shipdate"::text, "status", "onlineorderflag", "purchaseordernumber", "accountnumber", "customerid", "salespersonid", "territoryid", "billtoaddressid", "shiptoaddressid", "shipmethodid", "creditcardid", "creditcardapprovalcode", "currencyrateid", "subtotal", "taxamt", "freight", "totaldue", "comment", "rowguid", "modifieddate"::text + """, + toNamedParameter(head), + rest.map(toNamedParameter)* + ) + ).executeReturning(SalesorderheaderRow.rowParser(1).*) + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[SalesorderheaderRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + SQL"create temporary table salesorderheader_TEMP (like sales.salesorderheader) on commit drop".execute(): @nowarn + streamingInsert(s"""copy salesorderheader_TEMP("salesorderid", "revisionnumber", "orderdate", "duedate", "shipdate", "status", "onlineorderflag", "purchaseordernumber", "accountnumber", "customerid", "salespersonid", "territoryid", "billtoaddressid", "shiptoaddressid", "shipmethodid", "creditcardid", "creditcardapprovalcode", "currencyrateid", "subtotal", "taxamt", "freight", "totaldue", "comment", "rowguid", "modifieddate") from stdin""", batchSize, unsaved)(SalesorderheaderRow.text, c): @nowarn + SQL"""insert into sales.salesorderheader("salesorderid", "revisionnumber", "orderdate", "duedate", "shipdate", "status", "onlineorderflag", "purchaseordernumber", "accountnumber", "customerid", "salespersonid", "territoryid", "billtoaddressid", "shiptoaddressid", "shipmethodid", "creditcardid", "creditcardapprovalcode", "currencyrateid", "subtotal", "taxamt", "freight", "totaldue", "comment", "rowguid", "modifieddate") + select * from salesorderheader_TEMP + on conflict ("salesorderid") + do update set + "revisionnumber" = EXCLUDED."revisionnumber", + "orderdate" = EXCLUDED."orderdate", + "duedate" = EXCLUDED."duedate", + "shipdate" = EXCLUDED."shipdate", + "status" = EXCLUDED."status", + "onlineorderflag" = EXCLUDED."onlineorderflag", + "purchaseordernumber" = EXCLUDED."purchaseordernumber", + "accountnumber" = EXCLUDED."accountnumber", + "customerid" = EXCLUDED."customerid", + "salespersonid" = EXCLUDED."salespersonid", + "territoryid" = EXCLUDED."territoryid", + "billtoaddressid" = EXCLUDED."billtoaddressid", + "shiptoaddressid" = EXCLUDED."shiptoaddressid", + "shipmethodid" = EXCLUDED."shipmethodid", + "creditcardid" = EXCLUDED."creditcardid", + "creditcardapprovalcode" = EXCLUDED."creditcardapprovalcode", + "currencyrateid" = EXCLUDED."currencyrateid", + "subtotal" = EXCLUDED."subtotal", + "taxamt" = EXCLUDED."taxamt", + "freight" = EXCLUDED."freight", + "totaldue" = EXCLUDED."totaldue", + "comment" = EXCLUDED."comment", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table salesorderheader_TEMP;""".executeUpdate() + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/salesorderheader/SalesorderheaderRepoMock.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/salesorderheader/SalesorderheaderRepoMock.scala index e72ac1386e..ba7cbbc6a5 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/salesorderheader/SalesorderheaderRepoMock.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/salesorderheader/SalesorderheaderRepoMock.scala @@ -87,4 +87,17 @@ class SalesorderheaderRepoMock(toRow: Function1[SalesorderheaderRowUnsaved, Sale map.put(unsaved.salesorderid, unsaved): @nowarn unsaved } + override def upsertBatch(unsaved: Iterable[SalesorderheaderRow])(implicit c: Connection): List[SalesorderheaderRow] = { + unsaved.map { row => + map += (row.salesorderid -> row) + row + }.toList + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[SalesorderheaderRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + unsaved.foreach { row => + map += (row.salesorderid -> row) + } + unsaved.size + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/salesorderheadersalesreason/SalesorderheadersalesreasonRepo.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/salesorderheadersalesreason/SalesorderheadersalesreasonRepo.scala index a180bb4b18..af2bcd495e 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/salesorderheadersalesreason/SalesorderheadersalesreasonRepo.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/salesorderheadersalesreason/SalesorderheadersalesreasonRepo.scala @@ -29,4 +29,7 @@ trait SalesorderheadersalesreasonRepo { def update: UpdateBuilder[SalesorderheadersalesreasonFields, SalesorderheadersalesreasonRow] def update(row: SalesorderheadersalesreasonRow)(implicit c: Connection): Boolean def upsert(unsaved: SalesorderheadersalesreasonRow)(implicit c: Connection): SalesorderheadersalesreasonRow + def upsertBatch(unsaved: Iterable[SalesorderheadersalesreasonRow])(implicit c: Connection): List[SalesorderheadersalesreasonRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Iterator[SalesorderheadersalesreasonRow], batchSize: Int = 10000)(implicit c: Connection): Int } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/salesorderheadersalesreason/SalesorderheadersalesreasonRepoImpl.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/salesorderheadersalesreason/SalesorderheadersalesreasonRepoImpl.scala index 31804a9eb4..156b993dac 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/salesorderheadersalesreason/SalesorderheadersalesreasonRepoImpl.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/salesorderheadersalesreason/SalesorderheadersalesreasonRepoImpl.scala @@ -11,6 +11,7 @@ import adventureworks.customtypes.Defaulted import adventureworks.customtypes.TypoLocalDateTime import adventureworks.sales.salesorderheader.SalesorderheaderId import adventureworks.sales.salesreason.SalesreasonId +import anorm.BatchSql import anorm.NamedParameter import anorm.ParameterValue import anorm.RowParser @@ -18,6 +19,7 @@ import anorm.SQL import anorm.SimpleSql import anorm.SqlStringInterpolation import java.sql.Connection +import scala.annotation.nowarn import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder import typo.dsl.SelectBuilderSql @@ -133,4 +135,40 @@ class SalesorderheadersalesreasonRepoImpl extends SalesorderheadersalesreasonRep .executeInsert(SalesorderheadersalesreasonRow.rowParser(1).single) } + override def upsertBatch(unsaved: Iterable[SalesorderheadersalesreasonRow])(implicit c: Connection): List[SalesorderheadersalesreasonRow] = { + def toNamedParameter(row: SalesorderheadersalesreasonRow): List[NamedParameter] = List( + NamedParameter("salesorderid", ParameterValue(row.salesorderid, null, SalesorderheaderId.toStatement)), + NamedParameter("salesreasonid", ParameterValue(row.salesreasonid, null, SalesreasonId.toStatement)), + NamedParameter("modifieddate", ParameterValue(row.modifieddate, null, TypoLocalDateTime.toStatement)) + ) + unsaved.toList match { + case Nil => Nil + case head :: rest => + new anorm.adventureworks.ExecuteReturningSyntax.Ops( + BatchSql( + s"""insert into sales.salesorderheadersalesreason("salesorderid", "salesreasonid", "modifieddate") + values ({salesorderid}::int4, {salesreasonid}::int4, {modifieddate}::timestamp) + on conflict ("salesorderid", "salesreasonid") + do update set + "modifieddate" = EXCLUDED."modifieddate" + returning "salesorderid", "salesreasonid", "modifieddate"::text + """, + toNamedParameter(head), + rest.map(toNamedParameter)* + ) + ).executeReturning(SalesorderheadersalesreasonRow.rowParser(1).*) + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[SalesorderheadersalesreasonRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + SQL"create temporary table salesorderheadersalesreason_TEMP (like sales.salesorderheadersalesreason) on commit drop".execute(): @nowarn + streamingInsert(s"""copy salesorderheadersalesreason_TEMP("salesorderid", "salesreasonid", "modifieddate") from stdin""", batchSize, unsaved)(SalesorderheadersalesreasonRow.text, c): @nowarn + SQL"""insert into sales.salesorderheadersalesreason("salesorderid", "salesreasonid", "modifieddate") + select * from salesorderheadersalesreason_TEMP + on conflict ("salesorderid", "salesreasonid") + do update set + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table salesorderheadersalesreason_TEMP;""".executeUpdate() + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/salesorderheadersalesreason/SalesorderheadersalesreasonRepoMock.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/salesorderheadersalesreason/SalesorderheadersalesreasonRepoMock.scala index 986b889e5a..2e5885ff34 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/salesorderheadersalesreason/SalesorderheadersalesreasonRepoMock.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/salesorderheadersalesreason/SalesorderheadersalesreasonRepoMock.scala @@ -87,4 +87,17 @@ class SalesorderheadersalesreasonRepoMock(toRow: Function1[Salesorderheadersales map.put(unsaved.compositeId, unsaved): @nowarn unsaved } + override def upsertBatch(unsaved: Iterable[SalesorderheadersalesreasonRow])(implicit c: Connection): List[SalesorderheadersalesreasonRow] = { + unsaved.map { row => + map += (row.compositeId -> row) + row + }.toList + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[SalesorderheadersalesreasonRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + unsaved.foreach { row => + map += (row.compositeId -> row) + } + unsaved.size + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/salesperson/SalespersonRepo.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/salesperson/SalespersonRepo.scala index 4b97eb12be..a4a6408d4b 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/salesperson/SalespersonRepo.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/salesperson/SalespersonRepo.scala @@ -30,4 +30,7 @@ trait SalespersonRepo { def update: UpdateBuilder[SalespersonFields, SalespersonRow] def update(row: SalespersonRow)(implicit c: Connection): Boolean def upsert(unsaved: SalespersonRow)(implicit c: Connection): SalespersonRow + def upsertBatch(unsaved: Iterable[SalespersonRow])(implicit c: Connection): List[SalespersonRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Iterator[SalespersonRow], batchSize: Int = 10000)(implicit c: Connection): Int } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/salesperson/SalespersonRepoImpl.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/salesperson/SalespersonRepoImpl.scala index 449aba6f3e..cd62aa4c33 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/salesperson/SalespersonRepoImpl.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/salesperson/SalespersonRepoImpl.scala @@ -12,6 +12,7 @@ import adventureworks.customtypes.TypoLocalDateTime import adventureworks.customtypes.TypoUUID import adventureworks.person.businessentity.BusinessentityId import adventureworks.sales.salesterritory.SalesterritoryId +import anorm.BatchSql import anorm.NamedParameter import anorm.ParameterMetaData import anorm.ParameterValue @@ -21,6 +22,7 @@ import anorm.SimpleSql import anorm.SqlStringInterpolation import anorm.ToStatement import java.sql.Connection +import scala.annotation.nowarn import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder import typo.dsl.SelectBuilderSql @@ -171,4 +173,60 @@ class SalespersonRepoImpl extends SalespersonRepo { .executeInsert(SalespersonRow.rowParser(1).single) } + override def upsertBatch(unsaved: Iterable[SalespersonRow])(implicit c: Connection): List[SalespersonRow] = { + def toNamedParameter(row: SalespersonRow): List[NamedParameter] = List( + NamedParameter("businessentityid", ParameterValue(row.businessentityid, null, BusinessentityId.toStatement)), + NamedParameter("territoryid", ParameterValue(row.territoryid, null, ToStatement.optionToStatement(SalesterritoryId.toStatement, SalesterritoryId.parameterMetadata))), + NamedParameter("salesquota", ParameterValue(row.salesquota, null, ToStatement.optionToStatement(ToStatement.scalaBigDecimalToStatement, ParameterMetaData.BigDecimalParameterMetaData))), + NamedParameter("bonus", ParameterValue(row.bonus, null, ToStatement.scalaBigDecimalToStatement)), + NamedParameter("commissionpct", ParameterValue(row.commissionpct, null, ToStatement.scalaBigDecimalToStatement)), + NamedParameter("salesytd", ParameterValue(row.salesytd, null, ToStatement.scalaBigDecimalToStatement)), + NamedParameter("saleslastyear", ParameterValue(row.saleslastyear, null, ToStatement.scalaBigDecimalToStatement)), + NamedParameter("rowguid", ParameterValue(row.rowguid, null, TypoUUID.toStatement)), + NamedParameter("modifieddate", ParameterValue(row.modifieddate, null, TypoLocalDateTime.toStatement)) + ) + unsaved.toList match { + case Nil => Nil + case head :: rest => + new anorm.adventureworks.ExecuteReturningSyntax.Ops( + BatchSql( + s"""insert into sales.salesperson("businessentityid", "territoryid", "salesquota", "bonus", "commissionpct", "salesytd", "saleslastyear", "rowguid", "modifieddate") + values ({businessentityid}::int4, {territoryid}::int4, {salesquota}::numeric, {bonus}::numeric, {commissionpct}::numeric, {salesytd}::numeric, {saleslastyear}::numeric, {rowguid}::uuid, {modifieddate}::timestamp) + on conflict ("businessentityid") + do update set + "territoryid" = EXCLUDED."territoryid", + "salesquota" = EXCLUDED."salesquota", + "bonus" = EXCLUDED."bonus", + "commissionpct" = EXCLUDED."commissionpct", + "salesytd" = EXCLUDED."salesytd", + "saleslastyear" = EXCLUDED."saleslastyear", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + returning "businessentityid", "territoryid", "salesquota", "bonus", "commissionpct", "salesytd", "saleslastyear", "rowguid", "modifieddate"::text + """, + toNamedParameter(head), + rest.map(toNamedParameter)* + ) + ).executeReturning(SalespersonRow.rowParser(1).*) + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[SalespersonRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + SQL"create temporary table salesperson_TEMP (like sales.salesperson) on commit drop".execute(): @nowarn + streamingInsert(s"""copy salesperson_TEMP("businessentityid", "territoryid", "salesquota", "bonus", "commissionpct", "salesytd", "saleslastyear", "rowguid", "modifieddate") from stdin""", batchSize, unsaved)(SalespersonRow.text, c): @nowarn + SQL"""insert into sales.salesperson("businessentityid", "territoryid", "salesquota", "bonus", "commissionpct", "salesytd", "saleslastyear", "rowguid", "modifieddate") + select * from salesperson_TEMP + on conflict ("businessentityid") + do update set + "territoryid" = EXCLUDED."territoryid", + "salesquota" = EXCLUDED."salesquota", + "bonus" = EXCLUDED."bonus", + "commissionpct" = EXCLUDED."commissionpct", + "salesytd" = EXCLUDED."salesytd", + "saleslastyear" = EXCLUDED."saleslastyear", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table salesperson_TEMP;""".executeUpdate() + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/salesperson/SalespersonRepoMock.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/salesperson/SalespersonRepoMock.scala index 0a057283e8..e85ebc3e75 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/salesperson/SalespersonRepoMock.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/salesperson/SalespersonRepoMock.scala @@ -88,4 +88,17 @@ class SalespersonRepoMock(toRow: Function1[SalespersonRowUnsaved, SalespersonRow map.put(unsaved.businessentityid, unsaved): @nowarn unsaved } + override def upsertBatch(unsaved: Iterable[SalespersonRow])(implicit c: Connection): List[SalespersonRow] = { + unsaved.map { row => + map += (row.businessentityid -> row) + row + }.toList + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[SalespersonRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + unsaved.foreach { row => + map += (row.businessentityid -> row) + } + unsaved.size + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/salespersonquotahistory/SalespersonquotahistoryRepo.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/salespersonquotahistory/SalespersonquotahistoryRepo.scala index 07ec8d4a06..ed542cb20f 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/salespersonquotahistory/SalespersonquotahistoryRepo.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/salespersonquotahistory/SalespersonquotahistoryRepo.scala @@ -29,4 +29,7 @@ trait SalespersonquotahistoryRepo { def update: UpdateBuilder[SalespersonquotahistoryFields, SalespersonquotahistoryRow] def update(row: SalespersonquotahistoryRow)(implicit c: Connection): Boolean def upsert(unsaved: SalespersonquotahistoryRow)(implicit c: Connection): SalespersonquotahistoryRow + def upsertBatch(unsaved: Iterable[SalespersonquotahistoryRow])(implicit c: Connection): List[SalespersonquotahistoryRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Iterator[SalespersonquotahistoryRow], batchSize: Int = 10000)(implicit c: Connection): Int } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/salespersonquotahistory/SalespersonquotahistoryRepoImpl.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/salespersonquotahistory/SalespersonquotahistoryRepoImpl.scala index 722bcc9fe1..ead9ed0685 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/salespersonquotahistory/SalespersonquotahistoryRepoImpl.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/salespersonquotahistory/SalespersonquotahistoryRepoImpl.scala @@ -11,6 +11,7 @@ import adventureworks.customtypes.Defaulted import adventureworks.customtypes.TypoLocalDateTime import adventureworks.customtypes.TypoUUID import adventureworks.person.businessentity.BusinessentityId +import anorm.BatchSql import anorm.NamedParameter import anorm.ParameterValue import anorm.RowParser @@ -19,6 +20,7 @@ import anorm.SimpleSql import anorm.SqlStringInterpolation import anorm.ToStatement import java.sql.Connection +import scala.annotation.nowarn import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder import typo.dsl.SelectBuilderSql @@ -145,4 +147,46 @@ class SalespersonquotahistoryRepoImpl extends SalespersonquotahistoryRepo { .executeInsert(SalespersonquotahistoryRow.rowParser(1).single) } + override def upsertBatch(unsaved: Iterable[SalespersonquotahistoryRow])(implicit c: Connection): List[SalespersonquotahistoryRow] = { + def toNamedParameter(row: SalespersonquotahistoryRow): List[NamedParameter] = List( + NamedParameter("businessentityid", ParameterValue(row.businessentityid, null, BusinessentityId.toStatement)), + NamedParameter("quotadate", ParameterValue(row.quotadate, null, TypoLocalDateTime.toStatement)), + NamedParameter("salesquota", ParameterValue(row.salesquota, null, ToStatement.scalaBigDecimalToStatement)), + NamedParameter("rowguid", ParameterValue(row.rowguid, null, TypoUUID.toStatement)), + NamedParameter("modifieddate", ParameterValue(row.modifieddate, null, TypoLocalDateTime.toStatement)) + ) + unsaved.toList match { + case Nil => Nil + case head :: rest => + new anorm.adventureworks.ExecuteReturningSyntax.Ops( + BatchSql( + s"""insert into sales.salespersonquotahistory("businessentityid", "quotadate", "salesquota", "rowguid", "modifieddate") + values ({businessentityid}::int4, {quotadate}::timestamp, {salesquota}::numeric, {rowguid}::uuid, {modifieddate}::timestamp) + on conflict ("businessentityid", "quotadate") + do update set + "salesquota" = EXCLUDED."salesquota", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + returning "businessentityid", "quotadate"::text, "salesquota", "rowguid", "modifieddate"::text + """, + toNamedParameter(head), + rest.map(toNamedParameter)* + ) + ).executeReturning(SalespersonquotahistoryRow.rowParser(1).*) + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[SalespersonquotahistoryRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + SQL"create temporary table salespersonquotahistory_TEMP (like sales.salespersonquotahistory) on commit drop".execute(): @nowarn + streamingInsert(s"""copy salespersonquotahistory_TEMP("businessentityid", "quotadate", "salesquota", "rowguid", "modifieddate") from stdin""", batchSize, unsaved)(SalespersonquotahistoryRow.text, c): @nowarn + SQL"""insert into sales.salespersonquotahistory("businessentityid", "quotadate", "salesquota", "rowguid", "modifieddate") + select * from salespersonquotahistory_TEMP + on conflict ("businessentityid", "quotadate") + do update set + "salesquota" = EXCLUDED."salesquota", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table salespersonquotahistory_TEMP;""".executeUpdate() + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/salespersonquotahistory/SalespersonquotahistoryRepoMock.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/salespersonquotahistory/SalespersonquotahistoryRepoMock.scala index a16a8e84b9..a48b8a6cc6 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/salespersonquotahistory/SalespersonquotahistoryRepoMock.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/salespersonquotahistory/SalespersonquotahistoryRepoMock.scala @@ -87,4 +87,17 @@ class SalespersonquotahistoryRepoMock(toRow: Function1[SalespersonquotahistoryRo map.put(unsaved.compositeId, unsaved): @nowarn unsaved } + override def upsertBatch(unsaved: Iterable[SalespersonquotahistoryRow])(implicit c: Connection): List[SalespersonquotahistoryRow] = { + unsaved.map { row => + map += (row.compositeId -> row) + row + }.toList + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[SalespersonquotahistoryRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + unsaved.foreach { row => + map += (row.compositeId -> row) + } + unsaved.size + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/salesreason/SalesreasonRepo.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/salesreason/SalesreasonRepo.scala index 16b7c307ba..9bf894689b 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/salesreason/SalesreasonRepo.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/salesreason/SalesreasonRepo.scala @@ -29,4 +29,7 @@ trait SalesreasonRepo { def update: UpdateBuilder[SalesreasonFields, SalesreasonRow] def update(row: SalesreasonRow)(implicit c: Connection): Boolean def upsert(unsaved: SalesreasonRow)(implicit c: Connection): SalesreasonRow + def upsertBatch(unsaved: Iterable[SalesreasonRow])(implicit c: Connection): List[SalesreasonRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Iterator[SalesreasonRow], batchSize: Int = 10000)(implicit c: Connection): Int } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/salesreason/SalesreasonRepoImpl.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/salesreason/SalesreasonRepoImpl.scala index 5cae0a888d..29d790336d 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/salesreason/SalesreasonRepoImpl.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/salesreason/SalesreasonRepoImpl.scala @@ -10,6 +10,7 @@ package salesreason import adventureworks.customtypes.Defaulted import adventureworks.customtypes.TypoLocalDateTime import adventureworks.public.Name +import anorm.BatchSql import anorm.NamedParameter import anorm.ParameterValue import anorm.RowParser @@ -17,6 +18,7 @@ import anorm.SQL import anorm.SimpleSql import anorm.SqlStringInterpolation import java.sql.Connection +import scala.annotation.nowarn import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder import typo.dsl.SelectBuilderSql @@ -135,4 +137,45 @@ class SalesreasonRepoImpl extends SalesreasonRepo { .executeInsert(SalesreasonRow.rowParser(1).single) } + override def upsertBatch(unsaved: Iterable[SalesreasonRow])(implicit c: Connection): List[SalesreasonRow] = { + def toNamedParameter(row: SalesreasonRow): List[NamedParameter] = List( + NamedParameter("salesreasonid", ParameterValue(row.salesreasonid, null, SalesreasonId.toStatement)), + NamedParameter("name", ParameterValue(row.name, null, Name.toStatement)), + NamedParameter("reasontype", ParameterValue(row.reasontype, null, Name.toStatement)), + NamedParameter("modifieddate", ParameterValue(row.modifieddate, null, TypoLocalDateTime.toStatement)) + ) + unsaved.toList match { + case Nil => Nil + case head :: rest => + new anorm.adventureworks.ExecuteReturningSyntax.Ops( + BatchSql( + s"""insert into sales.salesreason("salesreasonid", "name", "reasontype", "modifieddate") + values ({salesreasonid}::int4, {name}::varchar, {reasontype}::varchar, {modifieddate}::timestamp) + on conflict ("salesreasonid") + do update set + "name" = EXCLUDED."name", + "reasontype" = EXCLUDED."reasontype", + "modifieddate" = EXCLUDED."modifieddate" + returning "salesreasonid", "name", "reasontype", "modifieddate"::text + """, + toNamedParameter(head), + rest.map(toNamedParameter)* + ) + ).executeReturning(SalesreasonRow.rowParser(1).*) + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[SalesreasonRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + SQL"create temporary table salesreason_TEMP (like sales.salesreason) on commit drop".execute(): @nowarn + streamingInsert(s"""copy salesreason_TEMP("salesreasonid", "name", "reasontype", "modifieddate") from stdin""", batchSize, unsaved)(SalesreasonRow.text, c): @nowarn + SQL"""insert into sales.salesreason("salesreasonid", "name", "reasontype", "modifieddate") + select * from salesreason_TEMP + on conflict ("salesreasonid") + do update set + "name" = EXCLUDED."name", + "reasontype" = EXCLUDED."reasontype", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table salesreason_TEMP;""".executeUpdate() + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/salesreason/SalesreasonRepoMock.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/salesreason/SalesreasonRepoMock.scala index a9926ec349..c318e40fa8 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/salesreason/SalesreasonRepoMock.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/salesreason/SalesreasonRepoMock.scala @@ -87,4 +87,17 @@ class SalesreasonRepoMock(toRow: Function1[SalesreasonRowUnsaved, SalesreasonRow map.put(unsaved.salesreasonid, unsaved): @nowarn unsaved } + override def upsertBatch(unsaved: Iterable[SalesreasonRow])(implicit c: Connection): List[SalesreasonRow] = { + unsaved.map { row => + map += (row.salesreasonid -> row) + row + }.toList + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[SalesreasonRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + unsaved.foreach { row => + map += (row.salesreasonid -> row) + } + unsaved.size + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/salestaxrate/SalestaxrateRepo.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/salestaxrate/SalestaxrateRepo.scala index cd72e6ef17..4bbc19adb7 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/salestaxrate/SalestaxrateRepo.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/salestaxrate/SalestaxrateRepo.scala @@ -29,4 +29,7 @@ trait SalestaxrateRepo { def update: UpdateBuilder[SalestaxrateFields, SalestaxrateRow] def update(row: SalestaxrateRow)(implicit c: Connection): Boolean def upsert(unsaved: SalestaxrateRow)(implicit c: Connection): SalestaxrateRow + def upsertBatch(unsaved: Iterable[SalestaxrateRow])(implicit c: Connection): List[SalestaxrateRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Iterator[SalestaxrateRow], batchSize: Int = 10000)(implicit c: Connection): Int } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/salestaxrate/SalestaxrateRepoImpl.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/salestaxrate/SalestaxrateRepoImpl.scala index b965bdfd24..03a0621a99 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/salestaxrate/SalestaxrateRepoImpl.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/salestaxrate/SalestaxrateRepoImpl.scala @@ -13,6 +13,7 @@ import adventureworks.customtypes.TypoShort import adventureworks.customtypes.TypoUUID import adventureworks.person.stateprovince.StateprovinceId import adventureworks.public.Name +import anorm.BatchSql import anorm.NamedParameter import anorm.ParameterValue import anorm.RowParser @@ -21,6 +22,7 @@ import anorm.SimpleSql import anorm.SqlStringInterpolation import anorm.ToStatement import java.sql.Connection +import scala.annotation.nowarn import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder import typo.dsl.SelectBuilderSql @@ -157,4 +159,54 @@ class SalestaxrateRepoImpl extends SalestaxrateRepo { .executeInsert(SalestaxrateRow.rowParser(1).single) } + override def upsertBatch(unsaved: Iterable[SalestaxrateRow])(implicit c: Connection): List[SalestaxrateRow] = { + def toNamedParameter(row: SalestaxrateRow): List[NamedParameter] = List( + NamedParameter("salestaxrateid", ParameterValue(row.salestaxrateid, null, SalestaxrateId.toStatement)), + NamedParameter("stateprovinceid", ParameterValue(row.stateprovinceid, null, StateprovinceId.toStatement)), + NamedParameter("taxtype", ParameterValue(row.taxtype, null, TypoShort.toStatement)), + NamedParameter("taxrate", ParameterValue(row.taxrate, null, ToStatement.scalaBigDecimalToStatement)), + NamedParameter("name", ParameterValue(row.name, null, Name.toStatement)), + NamedParameter("rowguid", ParameterValue(row.rowguid, null, TypoUUID.toStatement)), + NamedParameter("modifieddate", ParameterValue(row.modifieddate, null, TypoLocalDateTime.toStatement)) + ) + unsaved.toList match { + case Nil => Nil + case head :: rest => + new anorm.adventureworks.ExecuteReturningSyntax.Ops( + BatchSql( + s"""insert into sales.salestaxrate("salestaxrateid", "stateprovinceid", "taxtype", "taxrate", "name", "rowguid", "modifieddate") + values ({salestaxrateid}::int4, {stateprovinceid}::int4, {taxtype}::int2, {taxrate}::numeric, {name}::varchar, {rowguid}::uuid, {modifieddate}::timestamp) + on conflict ("salestaxrateid") + do update set + "stateprovinceid" = EXCLUDED."stateprovinceid", + "taxtype" = EXCLUDED."taxtype", + "taxrate" = EXCLUDED."taxrate", + "name" = EXCLUDED."name", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + returning "salestaxrateid", "stateprovinceid", "taxtype", "taxrate", "name", "rowguid", "modifieddate"::text + """, + toNamedParameter(head), + rest.map(toNamedParameter)* + ) + ).executeReturning(SalestaxrateRow.rowParser(1).*) + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[SalestaxrateRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + SQL"create temporary table salestaxrate_TEMP (like sales.salestaxrate) on commit drop".execute(): @nowarn + streamingInsert(s"""copy salestaxrate_TEMP("salestaxrateid", "stateprovinceid", "taxtype", "taxrate", "name", "rowguid", "modifieddate") from stdin""", batchSize, unsaved)(SalestaxrateRow.text, c): @nowarn + SQL"""insert into sales.salestaxrate("salestaxrateid", "stateprovinceid", "taxtype", "taxrate", "name", "rowguid", "modifieddate") + select * from salestaxrate_TEMP + on conflict ("salestaxrateid") + do update set + "stateprovinceid" = EXCLUDED."stateprovinceid", + "taxtype" = EXCLUDED."taxtype", + "taxrate" = EXCLUDED."taxrate", + "name" = EXCLUDED."name", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table salestaxrate_TEMP;""".executeUpdate() + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/salestaxrate/SalestaxrateRepoMock.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/salestaxrate/SalestaxrateRepoMock.scala index 48ef8c5c19..2f0a9fc175 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/salestaxrate/SalestaxrateRepoMock.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/salestaxrate/SalestaxrateRepoMock.scala @@ -87,4 +87,17 @@ class SalestaxrateRepoMock(toRow: Function1[SalestaxrateRowUnsaved, Salestaxrate map.put(unsaved.salestaxrateid, unsaved): @nowarn unsaved } + override def upsertBatch(unsaved: Iterable[SalestaxrateRow])(implicit c: Connection): List[SalestaxrateRow] = { + unsaved.map { row => + map += (row.salestaxrateid -> row) + row + }.toList + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[SalestaxrateRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + unsaved.foreach { row => + map += (row.salestaxrateid -> row) + } + unsaved.size + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/salesterritory/SalesterritoryRepo.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/salesterritory/SalesterritoryRepo.scala index eac709b2fd..a589bdd6f8 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/salesterritory/SalesterritoryRepo.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/salesterritory/SalesterritoryRepo.scala @@ -29,4 +29,7 @@ trait SalesterritoryRepo { def update: UpdateBuilder[SalesterritoryFields, SalesterritoryRow] def update(row: SalesterritoryRow)(implicit c: Connection): Boolean def upsert(unsaved: SalesterritoryRow)(implicit c: Connection): SalesterritoryRow + def upsertBatch(unsaved: Iterable[SalesterritoryRow])(implicit c: Connection): List[SalesterritoryRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Iterator[SalesterritoryRow], batchSize: Int = 10000)(implicit c: Connection): Int } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/salesterritory/SalesterritoryRepoImpl.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/salesterritory/SalesterritoryRepoImpl.scala index 5174fa28bb..05c48271bf 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/salesterritory/SalesterritoryRepoImpl.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/salesterritory/SalesterritoryRepoImpl.scala @@ -12,6 +12,7 @@ import adventureworks.customtypes.TypoLocalDateTime import adventureworks.customtypes.TypoUUID import adventureworks.person.countryregion.CountryregionId import adventureworks.public.Name +import anorm.BatchSql import anorm.NamedParameter import anorm.ParameterValue import anorm.RowParser @@ -20,6 +21,7 @@ import anorm.SimpleSql import anorm.SqlStringInterpolation import anorm.ToStatement import java.sql.Connection +import scala.annotation.nowarn import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder import typo.dsl.SelectBuilderSql @@ -177,4 +179,63 @@ class SalesterritoryRepoImpl extends SalesterritoryRepo { .executeInsert(SalesterritoryRow.rowParser(1).single) } + override def upsertBatch(unsaved: Iterable[SalesterritoryRow])(implicit c: Connection): List[SalesterritoryRow] = { + def toNamedParameter(row: SalesterritoryRow): List[NamedParameter] = List( + NamedParameter("territoryid", ParameterValue(row.territoryid, null, SalesterritoryId.toStatement)), + NamedParameter("name", ParameterValue(row.name, null, Name.toStatement)), + NamedParameter("countryregioncode", ParameterValue(row.countryregioncode, null, CountryregionId.toStatement)), + NamedParameter("group", ParameterValue(row.group, null, ToStatement.stringToStatement)), + NamedParameter("salesytd", ParameterValue(row.salesytd, null, ToStatement.scalaBigDecimalToStatement)), + NamedParameter("saleslastyear", ParameterValue(row.saleslastyear, null, ToStatement.scalaBigDecimalToStatement)), + NamedParameter("costytd", ParameterValue(row.costytd, null, ToStatement.scalaBigDecimalToStatement)), + NamedParameter("costlastyear", ParameterValue(row.costlastyear, null, ToStatement.scalaBigDecimalToStatement)), + NamedParameter("rowguid", ParameterValue(row.rowguid, null, TypoUUID.toStatement)), + NamedParameter("modifieddate", ParameterValue(row.modifieddate, null, TypoLocalDateTime.toStatement)) + ) + unsaved.toList match { + case Nil => Nil + case head :: rest => + new anorm.adventureworks.ExecuteReturningSyntax.Ops( + BatchSql( + s"""insert into sales.salesterritory("territoryid", "name", "countryregioncode", "group", "salesytd", "saleslastyear", "costytd", "costlastyear", "rowguid", "modifieddate") + values ({territoryid}::int4, {name}::varchar, {countryregioncode}, {group}, {salesytd}::numeric, {saleslastyear}::numeric, {costytd}::numeric, {costlastyear}::numeric, {rowguid}::uuid, {modifieddate}::timestamp) + on conflict ("territoryid") + do update set + "name" = EXCLUDED."name", + "countryregioncode" = EXCLUDED."countryregioncode", + "group" = EXCLUDED."group", + "salesytd" = EXCLUDED."salesytd", + "saleslastyear" = EXCLUDED."saleslastyear", + "costytd" = EXCLUDED."costytd", + "costlastyear" = EXCLUDED."costlastyear", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + returning "territoryid", "name", "countryregioncode", "group", "salesytd", "saleslastyear", "costytd", "costlastyear", "rowguid", "modifieddate"::text + """, + toNamedParameter(head), + rest.map(toNamedParameter)* + ) + ).executeReturning(SalesterritoryRow.rowParser(1).*) + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[SalesterritoryRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + SQL"create temporary table salesterritory_TEMP (like sales.salesterritory) on commit drop".execute(): @nowarn + streamingInsert(s"""copy salesterritory_TEMP("territoryid", "name", "countryregioncode", "group", "salesytd", "saleslastyear", "costytd", "costlastyear", "rowguid", "modifieddate") from stdin""", batchSize, unsaved)(SalesterritoryRow.text, c): @nowarn + SQL"""insert into sales.salesterritory("territoryid", "name", "countryregioncode", "group", "salesytd", "saleslastyear", "costytd", "costlastyear", "rowguid", "modifieddate") + select * from salesterritory_TEMP + on conflict ("territoryid") + do update set + "name" = EXCLUDED."name", + "countryregioncode" = EXCLUDED."countryregioncode", + "group" = EXCLUDED."group", + "salesytd" = EXCLUDED."salesytd", + "saleslastyear" = EXCLUDED."saleslastyear", + "costytd" = EXCLUDED."costytd", + "costlastyear" = EXCLUDED."costlastyear", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table salesterritory_TEMP;""".executeUpdate() + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/salesterritory/SalesterritoryRepoMock.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/salesterritory/SalesterritoryRepoMock.scala index 214dff433a..aa2ee9e26a 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/salesterritory/SalesterritoryRepoMock.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/salesterritory/SalesterritoryRepoMock.scala @@ -87,4 +87,17 @@ class SalesterritoryRepoMock(toRow: Function1[SalesterritoryRowUnsaved, Salester map.put(unsaved.territoryid, unsaved): @nowarn unsaved } + override def upsertBatch(unsaved: Iterable[SalesterritoryRow])(implicit c: Connection): List[SalesterritoryRow] = { + unsaved.map { row => + map += (row.territoryid -> row) + row + }.toList + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[SalesterritoryRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + unsaved.foreach { row => + map += (row.territoryid -> row) + } + unsaved.size + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/salesterritoryhistory/SalesterritoryhistoryRepo.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/salesterritoryhistory/SalesterritoryhistoryRepo.scala index e1b55d5696..18089225f7 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/salesterritoryhistory/SalesterritoryhistoryRepo.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/salesterritoryhistory/SalesterritoryhistoryRepo.scala @@ -29,4 +29,7 @@ trait SalesterritoryhistoryRepo { def update: UpdateBuilder[SalesterritoryhistoryFields, SalesterritoryhistoryRow] def update(row: SalesterritoryhistoryRow)(implicit c: Connection): Boolean def upsert(unsaved: SalesterritoryhistoryRow)(implicit c: Connection): SalesterritoryhistoryRow + def upsertBatch(unsaved: Iterable[SalesterritoryhistoryRow])(implicit c: Connection): List[SalesterritoryhistoryRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Iterator[SalesterritoryhistoryRow], batchSize: Int = 10000)(implicit c: Connection): Int } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/salesterritoryhistory/SalesterritoryhistoryRepoImpl.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/salesterritoryhistory/SalesterritoryhistoryRepoImpl.scala index 5d8edda1cf..74cc47eb6a 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/salesterritoryhistory/SalesterritoryhistoryRepoImpl.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/salesterritoryhistory/SalesterritoryhistoryRepoImpl.scala @@ -12,6 +12,7 @@ import adventureworks.customtypes.TypoLocalDateTime import adventureworks.customtypes.TypoUUID import adventureworks.person.businessentity.BusinessentityId import adventureworks.sales.salesterritory.SalesterritoryId +import anorm.BatchSql import anorm.NamedParameter import anorm.ParameterValue import anorm.RowParser @@ -20,6 +21,7 @@ import anorm.SimpleSql import anorm.SqlStringInterpolation import anorm.ToStatement import java.sql.Connection +import scala.annotation.nowarn import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder import typo.dsl.SelectBuilderSql @@ -150,4 +152,47 @@ class SalesterritoryhistoryRepoImpl extends SalesterritoryhistoryRepo { .executeInsert(SalesterritoryhistoryRow.rowParser(1).single) } + override def upsertBatch(unsaved: Iterable[SalesterritoryhistoryRow])(implicit c: Connection): List[SalesterritoryhistoryRow] = { + def toNamedParameter(row: SalesterritoryhistoryRow): List[NamedParameter] = List( + NamedParameter("businessentityid", ParameterValue(row.businessentityid, null, BusinessentityId.toStatement)), + NamedParameter("territoryid", ParameterValue(row.territoryid, null, SalesterritoryId.toStatement)), + NamedParameter("startdate", ParameterValue(row.startdate, null, TypoLocalDateTime.toStatement)), + NamedParameter("enddate", ParameterValue(row.enddate, null, ToStatement.optionToStatement(TypoLocalDateTime.toStatement, TypoLocalDateTime.parameterMetadata))), + NamedParameter("rowguid", ParameterValue(row.rowguid, null, TypoUUID.toStatement)), + NamedParameter("modifieddate", ParameterValue(row.modifieddate, null, TypoLocalDateTime.toStatement)) + ) + unsaved.toList match { + case Nil => Nil + case head :: rest => + new anorm.adventureworks.ExecuteReturningSyntax.Ops( + BatchSql( + s"""insert into sales.salesterritoryhistory("businessentityid", "territoryid", "startdate", "enddate", "rowguid", "modifieddate") + values ({businessentityid}::int4, {territoryid}::int4, {startdate}::timestamp, {enddate}::timestamp, {rowguid}::uuid, {modifieddate}::timestamp) + on conflict ("businessentityid", "startdate", "territoryid") + do update set + "enddate" = EXCLUDED."enddate", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + returning "businessentityid", "territoryid", "startdate"::text, "enddate"::text, "rowguid", "modifieddate"::text + """, + toNamedParameter(head), + rest.map(toNamedParameter)* + ) + ).executeReturning(SalesterritoryhistoryRow.rowParser(1).*) + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[SalesterritoryhistoryRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + SQL"create temporary table salesterritoryhistory_TEMP (like sales.salesterritoryhistory) on commit drop".execute(): @nowarn + streamingInsert(s"""copy salesterritoryhistory_TEMP("businessentityid", "territoryid", "startdate", "enddate", "rowguid", "modifieddate") from stdin""", batchSize, unsaved)(SalesterritoryhistoryRow.text, c): @nowarn + SQL"""insert into sales.salesterritoryhistory("businessentityid", "territoryid", "startdate", "enddate", "rowguid", "modifieddate") + select * from salesterritoryhistory_TEMP + on conflict ("businessentityid", "startdate", "territoryid") + do update set + "enddate" = EXCLUDED."enddate", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table salesterritoryhistory_TEMP;""".executeUpdate() + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/salesterritoryhistory/SalesterritoryhistoryRepoMock.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/salesterritoryhistory/SalesterritoryhistoryRepoMock.scala index 4d90d0ac31..3163857d09 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/salesterritoryhistory/SalesterritoryhistoryRepoMock.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/salesterritoryhistory/SalesterritoryhistoryRepoMock.scala @@ -87,4 +87,17 @@ class SalesterritoryhistoryRepoMock(toRow: Function1[SalesterritoryhistoryRowUns map.put(unsaved.compositeId, unsaved): @nowarn unsaved } + override def upsertBatch(unsaved: Iterable[SalesterritoryhistoryRow])(implicit c: Connection): List[SalesterritoryhistoryRow] = { + unsaved.map { row => + map += (row.compositeId -> row) + row + }.toList + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[SalesterritoryhistoryRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + unsaved.foreach { row => + map += (row.compositeId -> row) + } + unsaved.size + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/shoppingcartitem/ShoppingcartitemRepo.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/shoppingcartitem/ShoppingcartitemRepo.scala index fd54f6963b..521676b4dc 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/shoppingcartitem/ShoppingcartitemRepo.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/shoppingcartitem/ShoppingcartitemRepo.scala @@ -29,4 +29,7 @@ trait ShoppingcartitemRepo { def update: UpdateBuilder[ShoppingcartitemFields, ShoppingcartitemRow] def update(row: ShoppingcartitemRow)(implicit c: Connection): Boolean def upsert(unsaved: ShoppingcartitemRow)(implicit c: Connection): ShoppingcartitemRow + def upsertBatch(unsaved: Iterable[ShoppingcartitemRow])(implicit c: Connection): List[ShoppingcartitemRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Iterator[ShoppingcartitemRow], batchSize: Int = 10000)(implicit c: Connection): Int } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/shoppingcartitem/ShoppingcartitemRepoImpl.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/shoppingcartitem/ShoppingcartitemRepoImpl.scala index 43897887f5..45632f36df 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/shoppingcartitem/ShoppingcartitemRepoImpl.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/shoppingcartitem/ShoppingcartitemRepoImpl.scala @@ -10,6 +10,7 @@ package shoppingcartitem import adventureworks.customtypes.Defaulted import adventureworks.customtypes.TypoLocalDateTime import adventureworks.production.product.ProductId +import anorm.BatchSql import anorm.NamedParameter import anorm.ParameterValue import anorm.RowParser @@ -18,6 +19,7 @@ import anorm.SimpleSql import anorm.SqlStringInterpolation import anorm.ToStatement import java.sql.Connection +import scala.annotation.nowarn import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder import typo.dsl.SelectBuilderSql @@ -150,4 +152,51 @@ class ShoppingcartitemRepoImpl extends ShoppingcartitemRepo { .executeInsert(ShoppingcartitemRow.rowParser(1).single) } + override def upsertBatch(unsaved: Iterable[ShoppingcartitemRow])(implicit c: Connection): List[ShoppingcartitemRow] = { + def toNamedParameter(row: ShoppingcartitemRow): List[NamedParameter] = List( + NamedParameter("shoppingcartitemid", ParameterValue(row.shoppingcartitemid, null, ShoppingcartitemId.toStatement)), + NamedParameter("shoppingcartid", ParameterValue(row.shoppingcartid, null, ToStatement.stringToStatement)), + NamedParameter("quantity", ParameterValue(row.quantity, null, ToStatement.intToStatement)), + NamedParameter("productid", ParameterValue(row.productid, null, ProductId.toStatement)), + NamedParameter("datecreated", ParameterValue(row.datecreated, null, TypoLocalDateTime.toStatement)), + NamedParameter("modifieddate", ParameterValue(row.modifieddate, null, TypoLocalDateTime.toStatement)) + ) + unsaved.toList match { + case Nil => Nil + case head :: rest => + new anorm.adventureworks.ExecuteReturningSyntax.Ops( + BatchSql( + s"""insert into sales.shoppingcartitem("shoppingcartitemid", "shoppingcartid", "quantity", "productid", "datecreated", "modifieddate") + values ({shoppingcartitemid}::int4, {shoppingcartid}, {quantity}::int4, {productid}::int4, {datecreated}::timestamp, {modifieddate}::timestamp) + on conflict ("shoppingcartitemid") + do update set + "shoppingcartid" = EXCLUDED."shoppingcartid", + "quantity" = EXCLUDED."quantity", + "productid" = EXCLUDED."productid", + "datecreated" = EXCLUDED."datecreated", + "modifieddate" = EXCLUDED."modifieddate" + returning "shoppingcartitemid", "shoppingcartid", "quantity", "productid", "datecreated"::text, "modifieddate"::text + """, + toNamedParameter(head), + rest.map(toNamedParameter)* + ) + ).executeReturning(ShoppingcartitemRow.rowParser(1).*) + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[ShoppingcartitemRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + SQL"create temporary table shoppingcartitem_TEMP (like sales.shoppingcartitem) on commit drop".execute(): @nowarn + streamingInsert(s"""copy shoppingcartitem_TEMP("shoppingcartitemid", "shoppingcartid", "quantity", "productid", "datecreated", "modifieddate") from stdin""", batchSize, unsaved)(ShoppingcartitemRow.text, c): @nowarn + SQL"""insert into sales.shoppingcartitem("shoppingcartitemid", "shoppingcartid", "quantity", "productid", "datecreated", "modifieddate") + select * from shoppingcartitem_TEMP + on conflict ("shoppingcartitemid") + do update set + "shoppingcartid" = EXCLUDED."shoppingcartid", + "quantity" = EXCLUDED."quantity", + "productid" = EXCLUDED."productid", + "datecreated" = EXCLUDED."datecreated", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table shoppingcartitem_TEMP;""".executeUpdate() + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/shoppingcartitem/ShoppingcartitemRepoMock.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/shoppingcartitem/ShoppingcartitemRepoMock.scala index 9c5120481e..6ac6f7838b 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/shoppingcartitem/ShoppingcartitemRepoMock.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/shoppingcartitem/ShoppingcartitemRepoMock.scala @@ -87,4 +87,17 @@ class ShoppingcartitemRepoMock(toRow: Function1[ShoppingcartitemRowUnsaved, Shop map.put(unsaved.shoppingcartitemid, unsaved): @nowarn unsaved } + override def upsertBatch(unsaved: Iterable[ShoppingcartitemRow])(implicit c: Connection): List[ShoppingcartitemRow] = { + unsaved.map { row => + map += (row.shoppingcartitemid -> row) + row + }.toList + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[ShoppingcartitemRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + unsaved.foreach { row => + map += (row.shoppingcartitemid -> row) + } + unsaved.size + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/specialoffer/SpecialofferRepo.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/specialoffer/SpecialofferRepo.scala index f3af8fbdca..0f2b28cbed 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/specialoffer/SpecialofferRepo.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/specialoffer/SpecialofferRepo.scala @@ -29,4 +29,7 @@ trait SpecialofferRepo { def update: UpdateBuilder[SpecialofferFields, SpecialofferRow] def update(row: SpecialofferRow)(implicit c: Connection): Boolean def upsert(unsaved: SpecialofferRow)(implicit c: Connection): SpecialofferRow + def upsertBatch(unsaved: Iterable[SpecialofferRow])(implicit c: Connection): List[SpecialofferRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Iterator[SpecialofferRow], batchSize: Int = 10000)(implicit c: Connection): Int } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/specialoffer/SpecialofferRepoImpl.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/specialoffer/SpecialofferRepoImpl.scala index 08a94e9b46..56a8c0edbe 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/specialoffer/SpecialofferRepoImpl.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/specialoffer/SpecialofferRepoImpl.scala @@ -10,6 +10,7 @@ package specialoffer import adventureworks.customtypes.Defaulted import adventureworks.customtypes.TypoLocalDateTime import adventureworks.customtypes.TypoUUID +import anorm.BatchSql import anorm.NamedParameter import anorm.ParameterMetaData import anorm.ParameterValue @@ -19,6 +20,7 @@ import anorm.SimpleSql import anorm.SqlStringInterpolation import anorm.ToStatement import java.sql.Connection +import scala.annotation.nowarn import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder import typo.dsl.SelectBuilderSql @@ -174,4 +176,66 @@ class SpecialofferRepoImpl extends SpecialofferRepo { .executeInsert(SpecialofferRow.rowParser(1).single) } + override def upsertBatch(unsaved: Iterable[SpecialofferRow])(implicit c: Connection): List[SpecialofferRow] = { + def toNamedParameter(row: SpecialofferRow): List[NamedParameter] = List( + NamedParameter("specialofferid", ParameterValue(row.specialofferid, null, SpecialofferId.toStatement)), + NamedParameter("description", ParameterValue(row.description, null, ToStatement.stringToStatement)), + NamedParameter("discountpct", ParameterValue(row.discountpct, null, ToStatement.scalaBigDecimalToStatement)), + NamedParameter("type", ParameterValue(row.`type`, null, ToStatement.stringToStatement)), + NamedParameter("category", ParameterValue(row.category, null, ToStatement.stringToStatement)), + NamedParameter("startdate", ParameterValue(row.startdate, null, TypoLocalDateTime.toStatement)), + NamedParameter("enddate", ParameterValue(row.enddate, null, TypoLocalDateTime.toStatement)), + NamedParameter("minqty", ParameterValue(row.minqty, null, ToStatement.intToStatement)), + NamedParameter("maxqty", ParameterValue(row.maxqty, null, ToStatement.optionToStatement(ToStatement.intToStatement, ParameterMetaData.IntParameterMetaData))), + NamedParameter("rowguid", ParameterValue(row.rowguid, null, TypoUUID.toStatement)), + NamedParameter("modifieddate", ParameterValue(row.modifieddate, null, TypoLocalDateTime.toStatement)) + ) + unsaved.toList match { + case Nil => Nil + case head :: rest => + new anorm.adventureworks.ExecuteReturningSyntax.Ops( + BatchSql( + s"""insert into sales.specialoffer("specialofferid", "description", "discountpct", "type", "category", "startdate", "enddate", "minqty", "maxqty", "rowguid", "modifieddate") + values ({specialofferid}::int4, {description}, {discountpct}::numeric, {type}, {category}, {startdate}::timestamp, {enddate}::timestamp, {minqty}::int4, {maxqty}::int4, {rowguid}::uuid, {modifieddate}::timestamp) + on conflict ("specialofferid") + do update set + "description" = EXCLUDED."description", + "discountpct" = EXCLUDED."discountpct", + "type" = EXCLUDED."type", + "category" = EXCLUDED."category", + "startdate" = EXCLUDED."startdate", + "enddate" = EXCLUDED."enddate", + "minqty" = EXCLUDED."minqty", + "maxqty" = EXCLUDED."maxqty", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + returning "specialofferid", "description", "discountpct", "type", "category", "startdate"::text, "enddate"::text, "minqty", "maxqty", "rowguid", "modifieddate"::text + """, + toNamedParameter(head), + rest.map(toNamedParameter)* + ) + ).executeReturning(SpecialofferRow.rowParser(1).*) + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[SpecialofferRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + SQL"create temporary table specialoffer_TEMP (like sales.specialoffer) on commit drop".execute(): @nowarn + streamingInsert(s"""copy specialoffer_TEMP("specialofferid", "description", "discountpct", "type", "category", "startdate", "enddate", "minqty", "maxqty", "rowguid", "modifieddate") from stdin""", batchSize, unsaved)(SpecialofferRow.text, c): @nowarn + SQL"""insert into sales.specialoffer("specialofferid", "description", "discountpct", "type", "category", "startdate", "enddate", "minqty", "maxqty", "rowguid", "modifieddate") + select * from specialoffer_TEMP + on conflict ("specialofferid") + do update set + "description" = EXCLUDED."description", + "discountpct" = EXCLUDED."discountpct", + "type" = EXCLUDED."type", + "category" = EXCLUDED."category", + "startdate" = EXCLUDED."startdate", + "enddate" = EXCLUDED."enddate", + "minqty" = EXCLUDED."minqty", + "maxqty" = EXCLUDED."maxqty", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table specialoffer_TEMP;""".executeUpdate() + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/specialoffer/SpecialofferRepoMock.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/specialoffer/SpecialofferRepoMock.scala index 7b2d8749c0..a9ff1c23a9 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/specialoffer/SpecialofferRepoMock.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/specialoffer/SpecialofferRepoMock.scala @@ -87,4 +87,17 @@ class SpecialofferRepoMock(toRow: Function1[SpecialofferRowUnsaved, Specialoffer map.put(unsaved.specialofferid, unsaved): @nowarn unsaved } + override def upsertBatch(unsaved: Iterable[SpecialofferRow])(implicit c: Connection): List[SpecialofferRow] = { + unsaved.map { row => + map += (row.specialofferid -> row) + row + }.toList + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[SpecialofferRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + unsaved.foreach { row => + map += (row.specialofferid -> row) + } + unsaved.size + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/specialofferproduct/SpecialofferproductRepo.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/specialofferproduct/SpecialofferproductRepo.scala index 3eb44c9bf3..b2ee0e0b37 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/specialofferproduct/SpecialofferproductRepo.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/specialofferproduct/SpecialofferproductRepo.scala @@ -29,4 +29,7 @@ trait SpecialofferproductRepo { def update: UpdateBuilder[SpecialofferproductFields, SpecialofferproductRow] def update(row: SpecialofferproductRow)(implicit c: Connection): Boolean def upsert(unsaved: SpecialofferproductRow)(implicit c: Connection): SpecialofferproductRow + def upsertBatch(unsaved: Iterable[SpecialofferproductRow])(implicit c: Connection): List[SpecialofferproductRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Iterator[SpecialofferproductRow], batchSize: Int = 10000)(implicit c: Connection): Int } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/specialofferproduct/SpecialofferproductRepoImpl.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/specialofferproduct/SpecialofferproductRepoImpl.scala index 79b327f682..f89f9a814d 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/specialofferproduct/SpecialofferproductRepoImpl.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/specialofferproduct/SpecialofferproductRepoImpl.scala @@ -12,6 +12,7 @@ import adventureworks.customtypes.TypoLocalDateTime import adventureworks.customtypes.TypoUUID import adventureworks.production.product.ProductId import adventureworks.sales.specialoffer.SpecialofferId +import anorm.BatchSql import anorm.NamedParameter import anorm.ParameterValue import anorm.RowParser @@ -19,6 +20,7 @@ import anorm.SQL import anorm.SimpleSql import anorm.SqlStringInterpolation import java.sql.Connection +import scala.annotation.nowarn import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder import typo.dsl.SelectBuilderSql @@ -141,4 +143,43 @@ class SpecialofferproductRepoImpl extends SpecialofferproductRepo { .executeInsert(SpecialofferproductRow.rowParser(1).single) } + override def upsertBatch(unsaved: Iterable[SpecialofferproductRow])(implicit c: Connection): List[SpecialofferproductRow] = { + def toNamedParameter(row: SpecialofferproductRow): List[NamedParameter] = List( + NamedParameter("specialofferid", ParameterValue(row.specialofferid, null, SpecialofferId.toStatement)), + NamedParameter("productid", ParameterValue(row.productid, null, ProductId.toStatement)), + NamedParameter("rowguid", ParameterValue(row.rowguid, null, TypoUUID.toStatement)), + NamedParameter("modifieddate", ParameterValue(row.modifieddate, null, TypoLocalDateTime.toStatement)) + ) + unsaved.toList match { + case Nil => Nil + case head :: rest => + new anorm.adventureworks.ExecuteReturningSyntax.Ops( + BatchSql( + s"""insert into sales.specialofferproduct("specialofferid", "productid", "rowguid", "modifieddate") + values ({specialofferid}::int4, {productid}::int4, {rowguid}::uuid, {modifieddate}::timestamp) + on conflict ("specialofferid", "productid") + do update set + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + returning "specialofferid", "productid", "rowguid", "modifieddate"::text + """, + toNamedParameter(head), + rest.map(toNamedParameter)* + ) + ).executeReturning(SpecialofferproductRow.rowParser(1).*) + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[SpecialofferproductRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + SQL"create temporary table specialofferproduct_TEMP (like sales.specialofferproduct) on commit drop".execute(): @nowarn + streamingInsert(s"""copy specialofferproduct_TEMP("specialofferid", "productid", "rowguid", "modifieddate") from stdin""", batchSize, unsaved)(SpecialofferproductRow.text, c): @nowarn + SQL"""insert into sales.specialofferproduct("specialofferid", "productid", "rowguid", "modifieddate") + select * from specialofferproduct_TEMP + on conflict ("specialofferid", "productid") + do update set + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table specialofferproduct_TEMP;""".executeUpdate() + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/specialofferproduct/SpecialofferproductRepoMock.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/specialofferproduct/SpecialofferproductRepoMock.scala index 3b4b30e624..72eb67df5b 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/specialofferproduct/SpecialofferproductRepoMock.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/specialofferproduct/SpecialofferproductRepoMock.scala @@ -87,4 +87,17 @@ class SpecialofferproductRepoMock(toRow: Function1[SpecialofferproductRowUnsaved map.put(unsaved.compositeId, unsaved): @nowarn unsaved } + override def upsertBatch(unsaved: Iterable[SpecialofferproductRow])(implicit c: Connection): List[SpecialofferproductRow] = { + unsaved.map { row => + map += (row.compositeId -> row) + row + }.toList + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[SpecialofferproductRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + unsaved.foreach { row => + map += (row.compositeId -> row) + } + unsaved.size + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/store/StoreRepo.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/store/StoreRepo.scala index b8432187b9..403d3e9786 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/store/StoreRepo.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/store/StoreRepo.scala @@ -30,4 +30,7 @@ trait StoreRepo { def update: UpdateBuilder[StoreFields, StoreRow] def update(row: StoreRow)(implicit c: Connection): Boolean def upsert(unsaved: StoreRow)(implicit c: Connection): StoreRow + def upsertBatch(unsaved: Iterable[StoreRow])(implicit c: Connection): List[StoreRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Iterator[StoreRow], batchSize: Int = 10000)(implicit c: Connection): Int } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/store/StoreRepoImpl.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/store/StoreRepoImpl.scala index 574da19b89..bfdaf83c54 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/store/StoreRepoImpl.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/store/StoreRepoImpl.scala @@ -13,6 +13,7 @@ import adventureworks.customtypes.TypoUUID import adventureworks.customtypes.TypoXml import adventureworks.person.businessentity.BusinessentityId import adventureworks.public.Name +import anorm.BatchSql import anorm.NamedParameter import anorm.ParameterValue import anorm.RowParser @@ -21,6 +22,7 @@ import anorm.SimpleSql import anorm.SqlStringInterpolation import anorm.ToStatement import java.sql.Connection +import scala.annotation.nowarn import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder import typo.dsl.SelectBuilderSql @@ -147,4 +149,51 @@ class StoreRepoImpl extends StoreRepo { .executeInsert(StoreRow.rowParser(1).single) } + override def upsertBatch(unsaved: Iterable[StoreRow])(implicit c: Connection): List[StoreRow] = { + def toNamedParameter(row: StoreRow): List[NamedParameter] = List( + NamedParameter("businessentityid", ParameterValue(row.businessentityid, null, BusinessentityId.toStatement)), + NamedParameter("name", ParameterValue(row.name, null, Name.toStatement)), + NamedParameter("salespersonid", ParameterValue(row.salespersonid, null, ToStatement.optionToStatement(BusinessentityId.toStatement, BusinessentityId.parameterMetadata))), + NamedParameter("demographics", ParameterValue(row.demographics, null, ToStatement.optionToStatement(TypoXml.toStatement, TypoXml.parameterMetadata))), + NamedParameter("rowguid", ParameterValue(row.rowguid, null, TypoUUID.toStatement)), + NamedParameter("modifieddate", ParameterValue(row.modifieddate, null, TypoLocalDateTime.toStatement)) + ) + unsaved.toList match { + case Nil => Nil + case head :: rest => + new anorm.adventureworks.ExecuteReturningSyntax.Ops( + BatchSql( + s"""insert into sales.store("businessentityid", "name", "salespersonid", "demographics", "rowguid", "modifieddate") + values ({businessentityid}::int4, {name}::varchar, {salespersonid}::int4, {demographics}::xml, {rowguid}::uuid, {modifieddate}::timestamp) + on conflict ("businessentityid") + do update set + "name" = EXCLUDED."name", + "salespersonid" = EXCLUDED."salespersonid", + "demographics" = EXCLUDED."demographics", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + returning "businessentityid", "name", "salespersonid", "demographics", "rowguid", "modifieddate"::text + """, + toNamedParameter(head), + rest.map(toNamedParameter)* + ) + ).executeReturning(StoreRow.rowParser(1).*) + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[StoreRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + SQL"create temporary table store_TEMP (like sales.store) on commit drop".execute(): @nowarn + streamingInsert(s"""copy store_TEMP("businessentityid", "name", "salespersonid", "demographics", "rowguid", "modifieddate") from stdin""", batchSize, unsaved)(StoreRow.text, c): @nowarn + SQL"""insert into sales.store("businessentityid", "name", "salespersonid", "demographics", "rowguid", "modifieddate") + select * from store_TEMP + on conflict ("businessentityid") + do update set + "name" = EXCLUDED."name", + "salespersonid" = EXCLUDED."salespersonid", + "demographics" = EXCLUDED."demographics", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table store_TEMP;""".executeUpdate() + } } diff --git a/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/store/StoreRepoMock.scala b/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/store/StoreRepoMock.scala index 4a992b8b43..545c28a5b6 100644 --- a/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/store/StoreRepoMock.scala +++ b/typo-tester-anorm/generated-and-checked-in/adventureworks/sales/store/StoreRepoMock.scala @@ -88,4 +88,17 @@ class StoreRepoMock(toRow: Function1[StoreRowUnsaved, StoreRow], map.put(unsaved.businessentityid, unsaved): @nowarn unsaved } + override def upsertBatch(unsaved: Iterable[StoreRow])(implicit c: Connection): List[StoreRow] = { + unsaved.map { row => + map += (row.businessentityid -> row) + row + }.toList + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Iterator[StoreRow], batchSize: Int = 10000)(implicit c: Connection): Int = { + unsaved.foreach { row => + map += (row.businessentityid -> row) + } + unsaved.size + } } diff --git a/typo-tester-anorm/generated-and-checked-in/anorm/adventureworks/ExecuteReturningSyntax.scala b/typo-tester-anorm/generated-and-checked-in/anorm/adventureworks/ExecuteReturningSyntax.scala new file mode 100644 index 0000000000..a522317272 --- /dev/null +++ b/typo-tester-anorm/generated-and-checked-in/anorm/adventureworks/ExecuteReturningSyntax.scala @@ -0,0 +1,28 @@ +/** + * File has been automatically generated by `typo`. + * + * IF YOU CHANGE THIS FILE YOUR CHANGES WILL BE OVERWRITTEN. + */ +package anorm +package adventureworks + +import java.sql.Connection +import resource.managed + +object ExecuteReturningSyntax { + /* add executeReturning to anorm. it needs to be inside the package, because everything is hidden */ + implicit class Ops(batchSql: BatchSql) { + def executeReturning[T](parser: ResultSetParser[T])(implicit c: Connection): T = + managed(batchSql.getFilledStatement(c, getGeneratedKeys = true))(using StatementResource, statementClassTag).acquireAndGet { ps => + ps.executeBatch() + Sql + .asTry( + parser, + managed(ps.getGeneratedKeys)(using ResultSetResource, resultSetClassTag), + onFirstRow = false, + ColumnAliaser.empty + ) + .get + } + } +} diff --git a/typo-tester-anorm/src/scala/adventureworks/production/product/RepoTest.scala b/typo-tester-anorm/src/scala/adventureworks/production/product/RepoTest.scala new file mode 100644 index 0000000000..53e94f0687 --- /dev/null +++ b/typo-tester-anorm/src/scala/adventureworks/production/product/RepoTest.scala @@ -0,0 +1,43 @@ +package adventureworks.production.product + +import adventureworks.customtypes.* +import adventureworks.production.unitmeasure.* +import adventureworks.public.Name +import adventureworks.{SnapshotTest, withConnection} +import org.scalatest.Assertion + +import scala.annotation.nowarn + +class RepoTest extends SnapshotTest { + def upsertStreaming(unitmeasureRepo: UnitmeasureRepo): Assertion = + withConnection { implicit c => + val um1 = UnitmeasureRow(unitmeasurecode = UnitmeasureId("kg1"), name = Name("name1"), TypoLocalDateTime.now) + val um2 = UnitmeasureRow(unitmeasurecode = UnitmeasureId("kg2"), name = Name("name2"), TypoLocalDateTime.now) + unitmeasureRepo.upsertStreaming(Iterator(um1, um2)): @nowarn + assert(List(um1, um2) == unitmeasureRepo.selectAll.sortBy(_.name)): @nowarn + val um1a = um1.copy(name = Name("name1a")) + val um2a = um2.copy(name = Name("name2a")) + unitmeasureRepo.upsertStreaming(Iterator(um1a, um2a)): @nowarn + assert(List(um1a, um2a) == unitmeasureRepo.selectAll.sortBy(_.name)) + } + + def upsertBatch(unitmeasureRepo: UnitmeasureRepo): Assertion = + withConnection { implicit c => + val um1 = UnitmeasureRow(unitmeasurecode = UnitmeasureId("kg1"), name = Name("name1"), TypoLocalDateTime.now) + val um2 = UnitmeasureRow(unitmeasurecode = UnitmeasureId("kg2"), name = Name("name2"), TypoLocalDateTime.now) + val initial = unitmeasureRepo.upsertBatch(List(um1, um2)) + assert(List(um1, um2) == initial.sortBy(_.name)): @nowarn + val um1a = um1.copy(name = Name("name1a")) + val um2a = um2.copy(name = Name("name2a")) + val returned = unitmeasureRepo.upsertBatch(List(um1a, um2a)) + assert(List(um1a, um2a) == returned.sortBy(_.name)): @nowarn + val all = unitmeasureRepo.selectAll + assert(List(um1a, um2a) == all.sortBy(_.name)) + } + + test("upsertStreaming in-memory")(upsertStreaming(new UnitmeasureRepoMock(_.toRow(TypoLocalDateTime.now)))) + test("upsertStreaming pg")(upsertStreaming(new UnitmeasureRepoImpl)) + + test("upsertBatch in-memory")(upsertBatch(new UnitmeasureRepoMock(_.toRow(TypoLocalDateTime.now)))) + test("upsertBatch pg")(upsertBatch(new UnitmeasureRepoImpl)) +} diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/humanresources/department/DepartmentRepo.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/humanresources/department/DepartmentRepo.scala index a0ad086c78..69cbb13011 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/humanresources/department/DepartmentRepo.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/humanresources/department/DepartmentRepo.scala @@ -30,4 +30,7 @@ trait DepartmentRepo { def update: UpdateBuilder[DepartmentFields, DepartmentRow] def update(row: DepartmentRow): ConnectionIO[Boolean] def upsert(unsaved: DepartmentRow): ConnectionIO[DepartmentRow] + def upsertBatch(unsaved: List[DepartmentRow]): Stream[ConnectionIO, DepartmentRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Stream[ConnectionIO, DepartmentRow], batchSize: Int = 10000): ConnectionIO[Int] } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/humanresources/department/DepartmentRepoImpl.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/humanresources/department/DepartmentRepoImpl.scala index 1a287e7530..87042d6bbb 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/humanresources/department/DepartmentRepoImpl.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/humanresources/department/DepartmentRepoImpl.scala @@ -10,12 +10,14 @@ package department import adventureworks.customtypes.Defaulted import adventureworks.customtypes.TypoLocalDateTime import adventureworks.public.Name +import cats.instances.list.catsStdInstancesForList import doobie.free.connection.ConnectionIO import doobie.postgres.syntax.FragmentOps import doobie.syntax.SqlInterpolator.SingleFragment.fromWrite import doobie.syntax.string.toSqlInterpolator import doobie.util.Write import doobie.util.fragment.Fragment +import doobie.util.update.Update import fs2.Stream import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder @@ -121,4 +123,33 @@ class DepartmentRepoImpl extends DepartmentRepo { returning "departmentid", "name", "groupname", "modifieddate"::text """.query(using DepartmentRow.read).unique } + override def upsertBatch(unsaved: List[DepartmentRow]): Stream[ConnectionIO, DepartmentRow] = { + Update[DepartmentRow]( + s"""insert into humanresources.department("departmentid", "name", "groupname", "modifieddate") + values (?::int4,?::varchar,?::varchar,?::timestamp) + on conflict ("departmentid") + do update set + "name" = EXCLUDED."name", + "groupname" = EXCLUDED."groupname", + "modifieddate" = EXCLUDED."modifieddate" + returning "departmentid", "name", "groupname", "modifieddate"::text""" + )(using DepartmentRow.write) + .updateManyWithGeneratedKeys[DepartmentRow]("departmentid", "name", "groupname", "modifieddate")(unsaved)(using catsStdInstancesForList, DepartmentRow.read) + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, DepartmentRow], batchSize: Int = 10000): ConnectionIO[Int] = { + for { + _ <- sql"create temporary table department_TEMP (like humanresources.department) on commit drop".update.run + _ <- new FragmentOps(sql"""copy department_TEMP("departmentid", "name", "groupname", "modifieddate") from stdin""").copyIn(unsaved, batchSize)(using DepartmentRow.text) + res <- sql"""insert into humanresources.department("departmentid", "name", "groupname", "modifieddate") + select * from department_TEMP + on conflict ("departmentid") + do update set + "name" = EXCLUDED."name", + "groupname" = EXCLUDED."groupname", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table department_TEMP;""".update.run + } yield res + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/humanresources/department/DepartmentRepoMock.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/humanresources/department/DepartmentRepoMock.scala index 49e4bbf5e5..15d1de65d1 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/humanresources/department/DepartmentRepoMock.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/humanresources/department/DepartmentRepoMock.scala @@ -105,4 +105,23 @@ class DepartmentRepoMock(toRow: Function1[DepartmentRowUnsaved, DepartmentRow], unsaved } } + override def upsertBatch(unsaved: List[DepartmentRow]): Stream[ConnectionIO, DepartmentRow] = { + Stream.emits { + unsaved.map { row => + map += (row.departmentid -> row) + row + } + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, DepartmentRow], batchSize: Int = 10000): ConnectionIO[Int] = { + unsaved.compile.toList.map { rows => + var num = 0 + rows.foreach { row => + map += (row.departmentid -> row) + num += 1 + } + num + } + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/humanresources/department/DepartmentRow.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/humanresources/department/DepartmentRow.scala index 179b76cb64..04a7497214 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/humanresources/department/DepartmentRow.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/humanresources/department/DepartmentRow.scala @@ -13,6 +13,7 @@ import adventureworks.public.Name import doobie.enumerated.Nullability import doobie.postgres.Text import doobie.util.Read +import doobie.util.Write import io.circe.Decoder import io.circe.Encoder import java.sql.ResultSet @@ -62,4 +63,23 @@ object DepartmentRow { sb.append(Text.DELIMETER) TypoLocalDateTime.text.unsafeEncode(row.modifieddate, sb) } + implicit lazy val write: Write[DepartmentRow] = new Write[DepartmentRow]( + puts = List((DepartmentId.put, Nullability.NoNulls), + (Name.put, Nullability.NoNulls), + (Name.put, Nullability.NoNulls), + (TypoLocalDateTime.put, Nullability.NoNulls)), + toList = x => List(x.departmentid, x.name, x.groupname, x.modifieddate), + unsafeSet = (rs, i, a) => { + DepartmentId.put.unsafeSetNonNullable(rs, i + 0, a.departmentid) + Name.put.unsafeSetNonNullable(rs, i + 1, a.name) + Name.put.unsafeSetNonNullable(rs, i + 2, a.groupname) + TypoLocalDateTime.put.unsafeSetNonNullable(rs, i + 3, a.modifieddate) + }, + unsafeUpdate = (ps, i, a) => { + DepartmentId.put.unsafeUpdateNonNullable(ps, i + 0, a.departmentid) + Name.put.unsafeUpdateNonNullable(ps, i + 1, a.name) + Name.put.unsafeUpdateNonNullable(ps, i + 2, a.groupname) + TypoLocalDateTime.put.unsafeUpdateNonNullable(ps, i + 3, a.modifieddate) + } + ) } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/humanresources/employee/EmployeeRepo.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/humanresources/employee/EmployeeRepo.scala index 0a7187f89f..3dd3d9fde4 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/humanresources/employee/EmployeeRepo.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/humanresources/employee/EmployeeRepo.scala @@ -31,4 +31,7 @@ trait EmployeeRepo { def update: UpdateBuilder[EmployeeFields, EmployeeRow] def update(row: EmployeeRow): ConnectionIO[Boolean] def upsert(unsaved: EmployeeRow): ConnectionIO[EmployeeRow] + def upsertBatch(unsaved: List[EmployeeRow]): Stream[ConnectionIO, EmployeeRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Stream[ConnectionIO, EmployeeRow], batchSize: Int = 10000): ConnectionIO[Int] } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/humanresources/employee/EmployeeRepoImpl.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/humanresources/employee/EmployeeRepoImpl.scala index f92c807578..75b3b70e98 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/humanresources/employee/EmployeeRepoImpl.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/humanresources/employee/EmployeeRepoImpl.scala @@ -14,6 +14,7 @@ import adventureworks.customtypes.TypoShort import adventureworks.customtypes.TypoUUID import adventureworks.person.businessentity.BusinessentityId import adventureworks.public.Flag +import cats.instances.list.catsStdInstancesForList import doobie.free.connection.ConnectionIO import doobie.postgres.syntax.FragmentOps import doobie.syntax.SqlInterpolator.SingleFragment.fromWrite @@ -21,6 +22,7 @@ import doobie.syntax.string.toSqlInterpolator import doobie.util.Write import doobie.util.fragment.Fragment import doobie.util.meta.Meta +import doobie.util.update.Update import fs2.Stream import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder @@ -185,4 +187,55 @@ class EmployeeRepoImpl extends EmployeeRepo { returning "businessentityid", "nationalidnumber", "loginid", "jobtitle", "birthdate"::text, "maritalstatus", "gender", "hiredate"::text, "salariedflag", "vacationhours", "sickleavehours", "currentflag", "rowguid", "modifieddate"::text, "organizationnode" """.query(using EmployeeRow.read).unique } + override def upsertBatch(unsaved: List[EmployeeRow]): Stream[ConnectionIO, EmployeeRow] = { + Update[EmployeeRow]( + s"""insert into humanresources.employee("businessentityid", "nationalidnumber", "loginid", "jobtitle", "birthdate", "maritalstatus", "gender", "hiredate", "salariedflag", "vacationhours", "sickleavehours", "currentflag", "rowguid", "modifieddate", "organizationnode") + values (?::int4,?,?,?,?::date,?::bpchar,?::bpchar,?::date,?::bool,?::int2,?::int2,?::bool,?::uuid,?::timestamp,?) + on conflict ("businessentityid") + do update set + "nationalidnumber" = EXCLUDED."nationalidnumber", + "loginid" = EXCLUDED."loginid", + "jobtitle" = EXCLUDED."jobtitle", + "birthdate" = EXCLUDED."birthdate", + "maritalstatus" = EXCLUDED."maritalstatus", + "gender" = EXCLUDED."gender", + "hiredate" = EXCLUDED."hiredate", + "salariedflag" = EXCLUDED."salariedflag", + "vacationhours" = EXCLUDED."vacationhours", + "sickleavehours" = EXCLUDED."sickleavehours", + "currentflag" = EXCLUDED."currentflag", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate", + "organizationnode" = EXCLUDED."organizationnode" + returning "businessentityid", "nationalidnumber", "loginid", "jobtitle", "birthdate"::text, "maritalstatus", "gender", "hiredate"::text, "salariedflag", "vacationhours", "sickleavehours", "currentflag", "rowguid", "modifieddate"::text, "organizationnode"""" + )(using EmployeeRow.write) + .updateManyWithGeneratedKeys[EmployeeRow]("businessentityid", "nationalidnumber", "loginid", "jobtitle", "birthdate", "maritalstatus", "gender", "hiredate", "salariedflag", "vacationhours", "sickleavehours", "currentflag", "rowguid", "modifieddate", "organizationnode")(unsaved)(using catsStdInstancesForList, EmployeeRow.read) + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, EmployeeRow], batchSize: Int = 10000): ConnectionIO[Int] = { + for { + _ <- sql"create temporary table employee_TEMP (like humanresources.employee) on commit drop".update.run + _ <- new FragmentOps(sql"""copy employee_TEMP("businessentityid", "nationalidnumber", "loginid", "jobtitle", "birthdate", "maritalstatus", "gender", "hiredate", "salariedflag", "vacationhours", "sickleavehours", "currentflag", "rowguid", "modifieddate", "organizationnode") from stdin""").copyIn(unsaved, batchSize)(using EmployeeRow.text) + res <- sql"""insert into humanresources.employee("businessentityid", "nationalidnumber", "loginid", "jobtitle", "birthdate", "maritalstatus", "gender", "hiredate", "salariedflag", "vacationhours", "sickleavehours", "currentflag", "rowguid", "modifieddate", "organizationnode") + select * from employee_TEMP + on conflict ("businessentityid") + do update set + "nationalidnumber" = EXCLUDED."nationalidnumber", + "loginid" = EXCLUDED."loginid", + "jobtitle" = EXCLUDED."jobtitle", + "birthdate" = EXCLUDED."birthdate", + "maritalstatus" = EXCLUDED."maritalstatus", + "gender" = EXCLUDED."gender", + "hiredate" = EXCLUDED."hiredate", + "salariedflag" = EXCLUDED."salariedflag", + "vacationhours" = EXCLUDED."vacationhours", + "sickleavehours" = EXCLUDED."sickleavehours", + "currentflag" = EXCLUDED."currentflag", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate", + "organizationnode" = EXCLUDED."organizationnode" + ; + drop table employee_TEMP;""".update.run + } yield res + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/humanresources/employee/EmployeeRepoMock.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/humanresources/employee/EmployeeRepoMock.scala index 9a044c1ed1..a421b6f15a 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/humanresources/employee/EmployeeRepoMock.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/humanresources/employee/EmployeeRepoMock.scala @@ -106,4 +106,23 @@ class EmployeeRepoMock(toRow: Function1[EmployeeRowUnsaved, EmployeeRow], unsaved } } + override def upsertBatch(unsaved: List[EmployeeRow]): Stream[ConnectionIO, EmployeeRow] = { + Stream.emits { + unsaved.map { row => + map += (row.businessentityid -> row) + row + } + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, EmployeeRow], batchSize: Int = 10000): ConnectionIO[Int] = { + unsaved.compile.toList.map { rows => + var num = 0 + rows.foreach { row => + map += (row.businessentityid -> row) + num += 1 + } + num + } + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/humanresources/employee/EmployeeRow.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/humanresources/employee/EmployeeRow.scala index 5b20dee8c4..511735f8db 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/humanresources/employee/EmployeeRow.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/humanresources/employee/EmployeeRow.scala @@ -17,6 +17,7 @@ import adventureworks.public.Flag import doobie.enumerated.Nullability import doobie.postgres.Text import doobie.util.Read +import doobie.util.Write import doobie.util.meta.Meta import io.circe.Decoder import io.circe.Encoder @@ -144,4 +145,56 @@ object EmployeeRow { sb.append(Text.DELIMETER) Text.option(Text.stringInstance).unsafeEncode(row.organizationnode, sb) } + implicit lazy val write: Write[EmployeeRow] = new Write[EmployeeRow]( + puts = List((BusinessentityId.put, Nullability.NoNulls), + (Meta.StringMeta.put, Nullability.NoNulls), + (Meta.StringMeta.put, Nullability.NoNulls), + (Meta.StringMeta.put, Nullability.NoNulls), + (TypoLocalDate.put, Nullability.NoNulls), + (Meta.StringMeta.put, Nullability.NoNulls), + (Meta.StringMeta.put, Nullability.NoNulls), + (TypoLocalDate.put, Nullability.NoNulls), + (Flag.put, Nullability.NoNulls), + (TypoShort.put, Nullability.NoNulls), + (TypoShort.put, Nullability.NoNulls), + (Flag.put, Nullability.NoNulls), + (TypoUUID.put, Nullability.NoNulls), + (TypoLocalDateTime.put, Nullability.NoNulls), + (Meta.StringMeta.put, Nullability.Nullable)), + toList = x => List(x.businessentityid, x.nationalidnumber, x.loginid, x.jobtitle, x.birthdate, x.maritalstatus, x.gender, x.hiredate, x.salariedflag, x.vacationhours, x.sickleavehours, x.currentflag, x.rowguid, x.modifieddate, x.organizationnode), + unsafeSet = (rs, i, a) => { + BusinessentityId.put.unsafeSetNonNullable(rs, i + 0, a.businessentityid) + Meta.StringMeta.put.unsafeSetNonNullable(rs, i + 1, a.nationalidnumber) + Meta.StringMeta.put.unsafeSetNonNullable(rs, i + 2, a.loginid) + Meta.StringMeta.put.unsafeSetNonNullable(rs, i + 3, a.jobtitle) + TypoLocalDate.put.unsafeSetNonNullable(rs, i + 4, a.birthdate) + Meta.StringMeta.put.unsafeSetNonNullable(rs, i + 5, a.maritalstatus) + Meta.StringMeta.put.unsafeSetNonNullable(rs, i + 6, a.gender) + TypoLocalDate.put.unsafeSetNonNullable(rs, i + 7, a.hiredate) + Flag.put.unsafeSetNonNullable(rs, i + 8, a.salariedflag) + TypoShort.put.unsafeSetNonNullable(rs, i + 9, a.vacationhours) + TypoShort.put.unsafeSetNonNullable(rs, i + 10, a.sickleavehours) + Flag.put.unsafeSetNonNullable(rs, i + 11, a.currentflag) + TypoUUID.put.unsafeSetNonNullable(rs, i + 12, a.rowguid) + TypoLocalDateTime.put.unsafeSetNonNullable(rs, i + 13, a.modifieddate) + Meta.StringMeta.put.unsafeSetNullable(rs, i + 14, a.organizationnode) + }, + unsafeUpdate = (ps, i, a) => { + BusinessentityId.put.unsafeUpdateNonNullable(ps, i + 0, a.businessentityid) + Meta.StringMeta.put.unsafeUpdateNonNullable(ps, i + 1, a.nationalidnumber) + Meta.StringMeta.put.unsafeUpdateNonNullable(ps, i + 2, a.loginid) + Meta.StringMeta.put.unsafeUpdateNonNullable(ps, i + 3, a.jobtitle) + TypoLocalDate.put.unsafeUpdateNonNullable(ps, i + 4, a.birthdate) + Meta.StringMeta.put.unsafeUpdateNonNullable(ps, i + 5, a.maritalstatus) + Meta.StringMeta.put.unsafeUpdateNonNullable(ps, i + 6, a.gender) + TypoLocalDate.put.unsafeUpdateNonNullable(ps, i + 7, a.hiredate) + Flag.put.unsafeUpdateNonNullable(ps, i + 8, a.salariedflag) + TypoShort.put.unsafeUpdateNonNullable(ps, i + 9, a.vacationhours) + TypoShort.put.unsafeUpdateNonNullable(ps, i + 10, a.sickleavehours) + Flag.put.unsafeUpdateNonNullable(ps, i + 11, a.currentflag) + TypoUUID.put.unsafeUpdateNonNullable(ps, i + 12, a.rowguid) + TypoLocalDateTime.put.unsafeUpdateNonNullable(ps, i + 13, a.modifieddate) + Meta.StringMeta.put.unsafeUpdateNullable(ps, i + 14, a.organizationnode) + } + ) } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/humanresources/employeedepartmenthistory/EmployeedepartmenthistoryRepo.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/humanresources/employeedepartmenthistory/EmployeedepartmenthistoryRepo.scala index 3b99c7ec54..17fb8442be 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/humanresources/employeedepartmenthistory/EmployeedepartmenthistoryRepo.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/humanresources/employeedepartmenthistory/EmployeedepartmenthistoryRepo.scala @@ -30,4 +30,7 @@ trait EmployeedepartmenthistoryRepo { def update: UpdateBuilder[EmployeedepartmenthistoryFields, EmployeedepartmenthistoryRow] def update(row: EmployeedepartmenthistoryRow): ConnectionIO[Boolean] def upsert(unsaved: EmployeedepartmenthistoryRow): ConnectionIO[EmployeedepartmenthistoryRow] + def upsertBatch(unsaved: List[EmployeedepartmenthistoryRow]): Stream[ConnectionIO, EmployeedepartmenthistoryRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Stream[ConnectionIO, EmployeedepartmenthistoryRow], batchSize: Int = 10000): ConnectionIO[Int] } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/humanresources/employeedepartmenthistory/EmployeedepartmenthistoryRepoImpl.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/humanresources/employeedepartmenthistory/EmployeedepartmenthistoryRepoImpl.scala index 567b7c1be1..4ad24020e9 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/humanresources/employeedepartmenthistory/EmployeedepartmenthistoryRepoImpl.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/humanresources/employeedepartmenthistory/EmployeedepartmenthistoryRepoImpl.scala @@ -13,12 +13,14 @@ import adventureworks.customtypes.TypoLocalDateTime import adventureworks.humanresources.department.DepartmentId import adventureworks.humanresources.shift.ShiftId import adventureworks.person.businessentity.BusinessentityId +import cats.instances.list.catsStdInstancesForList import doobie.free.connection.ConnectionIO import doobie.postgres.syntax.FragmentOps import doobie.syntax.SqlInterpolator.SingleFragment.fromWrite import doobie.syntax.string.toSqlInterpolator import doobie.util.Write import doobie.util.fragment.Fragment +import doobie.util.update.Update import fs2.Stream import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder @@ -141,4 +143,31 @@ class EmployeedepartmenthistoryRepoImpl extends EmployeedepartmenthistoryRepo { returning "businessentityid", "departmentid", "shiftid", "startdate"::text, "enddate"::text, "modifieddate"::text """.query(using EmployeedepartmenthistoryRow.read).unique } + override def upsertBatch(unsaved: List[EmployeedepartmenthistoryRow]): Stream[ConnectionIO, EmployeedepartmenthistoryRow] = { + Update[EmployeedepartmenthistoryRow]( + s"""insert into humanresources.employeedepartmenthistory("businessentityid", "departmentid", "shiftid", "startdate", "enddate", "modifieddate") + values (?::int4,?::int2,?::int2,?::date,?::date,?::timestamp) + on conflict ("businessentityid", "startdate", "departmentid", "shiftid") + do update set + "enddate" = EXCLUDED."enddate", + "modifieddate" = EXCLUDED."modifieddate" + returning "businessentityid", "departmentid", "shiftid", "startdate"::text, "enddate"::text, "modifieddate"::text""" + )(using EmployeedepartmenthistoryRow.write) + .updateManyWithGeneratedKeys[EmployeedepartmenthistoryRow]("businessentityid", "departmentid", "shiftid", "startdate", "enddate", "modifieddate")(unsaved)(using catsStdInstancesForList, EmployeedepartmenthistoryRow.read) + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, EmployeedepartmenthistoryRow], batchSize: Int = 10000): ConnectionIO[Int] = { + for { + _ <- sql"create temporary table employeedepartmenthistory_TEMP (like humanresources.employeedepartmenthistory) on commit drop".update.run + _ <- new FragmentOps(sql"""copy employeedepartmenthistory_TEMP("businessentityid", "departmentid", "shiftid", "startdate", "enddate", "modifieddate") from stdin""").copyIn(unsaved, batchSize)(using EmployeedepartmenthistoryRow.text) + res <- sql"""insert into humanresources.employeedepartmenthistory("businessentityid", "departmentid", "shiftid", "startdate", "enddate", "modifieddate") + select * from employeedepartmenthistory_TEMP + on conflict ("businessentityid", "startdate", "departmentid", "shiftid") + do update set + "enddate" = EXCLUDED."enddate", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table employeedepartmenthistory_TEMP;""".update.run + } yield res + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/humanresources/employeedepartmenthistory/EmployeedepartmenthistoryRepoMock.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/humanresources/employeedepartmenthistory/EmployeedepartmenthistoryRepoMock.scala index ae958486dd..a223708a87 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/humanresources/employeedepartmenthistory/EmployeedepartmenthistoryRepoMock.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/humanresources/employeedepartmenthistory/EmployeedepartmenthistoryRepoMock.scala @@ -105,4 +105,23 @@ class EmployeedepartmenthistoryRepoMock(toRow: Function1[Employeedepartmenthisto unsaved } } + override def upsertBatch(unsaved: List[EmployeedepartmenthistoryRow]): Stream[ConnectionIO, EmployeedepartmenthistoryRow] = { + Stream.emits { + unsaved.map { row => + map += (row.compositeId -> row) + row + } + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, EmployeedepartmenthistoryRow], batchSize: Int = 10000): ConnectionIO[Int] = { + unsaved.compile.toList.map { rows => + var num = 0 + rows.foreach { row => + map += (row.compositeId -> row) + num += 1 + } + num + } + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/humanresources/employeedepartmenthistory/EmployeedepartmenthistoryRow.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/humanresources/employeedepartmenthistory/EmployeedepartmenthistoryRow.scala index 84bceb6f11..ffd88e5fdb 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/humanresources/employeedepartmenthistory/EmployeedepartmenthistoryRow.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/humanresources/employeedepartmenthistory/EmployeedepartmenthistoryRow.scala @@ -16,6 +16,7 @@ import adventureworks.person.businessentity.BusinessentityId import doobie.enumerated.Nullability import doobie.postgres.Text import doobie.util.Read +import doobie.util.Write import io.circe.Decoder import io.circe.Encoder import java.sql.ResultSet @@ -84,4 +85,29 @@ object EmployeedepartmenthistoryRow { sb.append(Text.DELIMETER) TypoLocalDateTime.text.unsafeEncode(row.modifieddate, sb) } + implicit lazy val write: Write[EmployeedepartmenthistoryRow] = new Write[EmployeedepartmenthistoryRow]( + puts = List((BusinessentityId.put, Nullability.NoNulls), + (DepartmentId.put, Nullability.NoNulls), + (ShiftId.put, Nullability.NoNulls), + (TypoLocalDate.put, Nullability.NoNulls), + (TypoLocalDate.put, Nullability.Nullable), + (TypoLocalDateTime.put, Nullability.NoNulls)), + toList = x => List(x.businessentityid, x.departmentid, x.shiftid, x.startdate, x.enddate, x.modifieddate), + unsafeSet = (rs, i, a) => { + BusinessentityId.put.unsafeSetNonNullable(rs, i + 0, a.businessentityid) + DepartmentId.put.unsafeSetNonNullable(rs, i + 1, a.departmentid) + ShiftId.put.unsafeSetNonNullable(rs, i + 2, a.shiftid) + TypoLocalDate.put.unsafeSetNonNullable(rs, i + 3, a.startdate) + TypoLocalDate.put.unsafeSetNullable(rs, i + 4, a.enddate) + TypoLocalDateTime.put.unsafeSetNonNullable(rs, i + 5, a.modifieddate) + }, + unsafeUpdate = (ps, i, a) => { + BusinessentityId.put.unsafeUpdateNonNullable(ps, i + 0, a.businessentityid) + DepartmentId.put.unsafeUpdateNonNullable(ps, i + 1, a.departmentid) + ShiftId.put.unsafeUpdateNonNullable(ps, i + 2, a.shiftid) + TypoLocalDate.put.unsafeUpdateNonNullable(ps, i + 3, a.startdate) + TypoLocalDate.put.unsafeUpdateNullable(ps, i + 4, a.enddate) + TypoLocalDateTime.put.unsafeUpdateNonNullable(ps, i + 5, a.modifieddate) + } + ) } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/humanresources/employeepayhistory/EmployeepayhistoryRepo.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/humanresources/employeepayhistory/EmployeepayhistoryRepo.scala index 4da02bcb1e..9d64f2793e 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/humanresources/employeepayhistory/EmployeepayhistoryRepo.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/humanresources/employeepayhistory/EmployeepayhistoryRepo.scala @@ -30,4 +30,7 @@ trait EmployeepayhistoryRepo { def update: UpdateBuilder[EmployeepayhistoryFields, EmployeepayhistoryRow] def update(row: EmployeepayhistoryRow): ConnectionIO[Boolean] def upsert(unsaved: EmployeepayhistoryRow): ConnectionIO[EmployeepayhistoryRow] + def upsertBatch(unsaved: List[EmployeepayhistoryRow]): Stream[ConnectionIO, EmployeepayhistoryRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Stream[ConnectionIO, EmployeepayhistoryRow], batchSize: Int = 10000): ConnectionIO[Int] } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/humanresources/employeepayhistory/EmployeepayhistoryRepoImpl.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/humanresources/employeepayhistory/EmployeepayhistoryRepoImpl.scala index e4d294c94a..266edb8d1e 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/humanresources/employeepayhistory/EmployeepayhistoryRepoImpl.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/humanresources/employeepayhistory/EmployeepayhistoryRepoImpl.scala @@ -11,6 +11,7 @@ import adventureworks.customtypes.Defaulted import adventureworks.customtypes.TypoLocalDateTime import adventureworks.customtypes.TypoShort import adventureworks.person.businessentity.BusinessentityId +import cats.instances.list.catsStdInstancesForList import doobie.free.connection.ConnectionIO import doobie.postgres.syntax.FragmentOps import doobie.syntax.SqlInterpolator.SingleFragment.fromWrite @@ -18,6 +19,7 @@ import doobie.syntax.string.toSqlInterpolator import doobie.util.Write import doobie.util.fragment.Fragment import doobie.util.meta.Meta +import doobie.util.update.Update import fs2.Stream import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder @@ -136,4 +138,33 @@ class EmployeepayhistoryRepoImpl extends EmployeepayhistoryRepo { returning "businessentityid", "ratechangedate"::text, "rate", "payfrequency", "modifieddate"::text """.query(using EmployeepayhistoryRow.read).unique } + override def upsertBatch(unsaved: List[EmployeepayhistoryRow]): Stream[ConnectionIO, EmployeepayhistoryRow] = { + Update[EmployeepayhistoryRow]( + s"""insert into humanresources.employeepayhistory("businessentityid", "ratechangedate", "rate", "payfrequency", "modifieddate") + values (?::int4,?::timestamp,?::numeric,?::int2,?::timestamp) + on conflict ("businessentityid", "ratechangedate") + do update set + "rate" = EXCLUDED."rate", + "payfrequency" = EXCLUDED."payfrequency", + "modifieddate" = EXCLUDED."modifieddate" + returning "businessentityid", "ratechangedate"::text, "rate", "payfrequency", "modifieddate"::text""" + )(using EmployeepayhistoryRow.write) + .updateManyWithGeneratedKeys[EmployeepayhistoryRow]("businessentityid", "ratechangedate", "rate", "payfrequency", "modifieddate")(unsaved)(using catsStdInstancesForList, EmployeepayhistoryRow.read) + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, EmployeepayhistoryRow], batchSize: Int = 10000): ConnectionIO[Int] = { + for { + _ <- sql"create temporary table employeepayhistory_TEMP (like humanresources.employeepayhistory) on commit drop".update.run + _ <- new FragmentOps(sql"""copy employeepayhistory_TEMP("businessentityid", "ratechangedate", "rate", "payfrequency", "modifieddate") from stdin""").copyIn(unsaved, batchSize)(using EmployeepayhistoryRow.text) + res <- sql"""insert into humanresources.employeepayhistory("businessentityid", "ratechangedate", "rate", "payfrequency", "modifieddate") + select * from employeepayhistory_TEMP + on conflict ("businessentityid", "ratechangedate") + do update set + "rate" = EXCLUDED."rate", + "payfrequency" = EXCLUDED."payfrequency", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table employeepayhistory_TEMP;""".update.run + } yield res + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/humanresources/employeepayhistory/EmployeepayhistoryRepoMock.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/humanresources/employeepayhistory/EmployeepayhistoryRepoMock.scala index b00f33f632..b8ea23b612 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/humanresources/employeepayhistory/EmployeepayhistoryRepoMock.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/humanresources/employeepayhistory/EmployeepayhistoryRepoMock.scala @@ -105,4 +105,23 @@ class EmployeepayhistoryRepoMock(toRow: Function1[EmployeepayhistoryRowUnsaved, unsaved } } + override def upsertBatch(unsaved: List[EmployeepayhistoryRow]): Stream[ConnectionIO, EmployeepayhistoryRow] = { + Stream.emits { + unsaved.map { row => + map += (row.compositeId -> row) + row + } + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, EmployeepayhistoryRow], batchSize: Int = 10000): ConnectionIO[Int] = { + unsaved.compile.toList.map { rows => + var num = 0 + rows.foreach { row => + map += (row.compositeId -> row) + num += 1 + } + num + } + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/humanresources/employeepayhistory/EmployeepayhistoryRow.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/humanresources/employeepayhistory/EmployeepayhistoryRow.scala index 9aee5dc5e0..a1513bb79c 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/humanresources/employeepayhistory/EmployeepayhistoryRow.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/humanresources/employeepayhistory/EmployeepayhistoryRow.scala @@ -14,6 +14,7 @@ import adventureworks.person.businessentity.BusinessentityId import doobie.enumerated.Nullability import doobie.postgres.Text import doobie.util.Read +import doobie.util.Write import doobie.util.meta.Meta import io.circe.Decoder import io.circe.Encoder @@ -75,4 +76,26 @@ object EmployeepayhistoryRow { sb.append(Text.DELIMETER) TypoLocalDateTime.text.unsafeEncode(row.modifieddate, sb) } + implicit lazy val write: Write[EmployeepayhistoryRow] = new Write[EmployeepayhistoryRow]( + puts = List((BusinessentityId.put, Nullability.NoNulls), + (TypoLocalDateTime.put, Nullability.NoNulls), + (Meta.ScalaBigDecimalMeta.put, Nullability.NoNulls), + (TypoShort.put, Nullability.NoNulls), + (TypoLocalDateTime.put, Nullability.NoNulls)), + toList = x => List(x.businessentityid, x.ratechangedate, x.rate, x.payfrequency, x.modifieddate), + unsafeSet = (rs, i, a) => { + BusinessentityId.put.unsafeSetNonNullable(rs, i + 0, a.businessentityid) + TypoLocalDateTime.put.unsafeSetNonNullable(rs, i + 1, a.ratechangedate) + Meta.ScalaBigDecimalMeta.put.unsafeSetNonNullable(rs, i + 2, a.rate) + TypoShort.put.unsafeSetNonNullable(rs, i + 3, a.payfrequency) + TypoLocalDateTime.put.unsafeSetNonNullable(rs, i + 4, a.modifieddate) + }, + unsafeUpdate = (ps, i, a) => { + BusinessentityId.put.unsafeUpdateNonNullable(ps, i + 0, a.businessentityid) + TypoLocalDateTime.put.unsafeUpdateNonNullable(ps, i + 1, a.ratechangedate) + Meta.ScalaBigDecimalMeta.put.unsafeUpdateNonNullable(ps, i + 2, a.rate) + TypoShort.put.unsafeUpdateNonNullable(ps, i + 3, a.payfrequency) + TypoLocalDateTime.put.unsafeUpdateNonNullable(ps, i + 4, a.modifieddate) + } + ) } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/humanresources/jobcandidate/JobcandidateRepo.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/humanresources/jobcandidate/JobcandidateRepo.scala index 2855a8944b..8d868805c7 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/humanresources/jobcandidate/JobcandidateRepo.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/humanresources/jobcandidate/JobcandidateRepo.scala @@ -30,4 +30,7 @@ trait JobcandidateRepo { def update: UpdateBuilder[JobcandidateFields, JobcandidateRow] def update(row: JobcandidateRow): ConnectionIO[Boolean] def upsert(unsaved: JobcandidateRow): ConnectionIO[JobcandidateRow] + def upsertBatch(unsaved: List[JobcandidateRow]): Stream[ConnectionIO, JobcandidateRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Stream[ConnectionIO, JobcandidateRow], batchSize: Int = 10000): ConnectionIO[Int] } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/humanresources/jobcandidate/JobcandidateRepoImpl.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/humanresources/jobcandidate/JobcandidateRepoImpl.scala index 5284c26e6c..fce1440ca9 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/humanresources/jobcandidate/JobcandidateRepoImpl.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/humanresources/jobcandidate/JobcandidateRepoImpl.scala @@ -11,12 +11,14 @@ import adventureworks.customtypes.Defaulted import adventureworks.customtypes.TypoLocalDateTime import adventureworks.customtypes.TypoXml import adventureworks.person.businessentity.BusinessentityId +import cats.instances.list.catsStdInstancesForList import doobie.free.connection.ConnectionIO import doobie.postgres.syntax.FragmentOps import doobie.syntax.SqlInterpolator.SingleFragment.fromWrite import doobie.syntax.string.toSqlInterpolator import doobie.util.Write import doobie.util.fragment.Fragment +import doobie.util.update.Update import fs2.Stream import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder @@ -122,4 +124,33 @@ class JobcandidateRepoImpl extends JobcandidateRepo { returning "jobcandidateid", "businessentityid", "resume", "modifieddate"::text """.query(using JobcandidateRow.read).unique } + override def upsertBatch(unsaved: List[JobcandidateRow]): Stream[ConnectionIO, JobcandidateRow] = { + Update[JobcandidateRow]( + s"""insert into humanresources.jobcandidate("jobcandidateid", "businessentityid", "resume", "modifieddate") + values (?::int4,?::int4,?::xml,?::timestamp) + on conflict ("jobcandidateid") + do update set + "businessentityid" = EXCLUDED."businessentityid", + "resume" = EXCLUDED."resume", + "modifieddate" = EXCLUDED."modifieddate" + returning "jobcandidateid", "businessentityid", "resume", "modifieddate"::text""" + )(using JobcandidateRow.write) + .updateManyWithGeneratedKeys[JobcandidateRow]("jobcandidateid", "businessentityid", "resume", "modifieddate")(unsaved)(using catsStdInstancesForList, JobcandidateRow.read) + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, JobcandidateRow], batchSize: Int = 10000): ConnectionIO[Int] = { + for { + _ <- sql"create temporary table jobcandidate_TEMP (like humanresources.jobcandidate) on commit drop".update.run + _ <- new FragmentOps(sql"""copy jobcandidate_TEMP("jobcandidateid", "businessentityid", "resume", "modifieddate") from stdin""").copyIn(unsaved, batchSize)(using JobcandidateRow.text) + res <- sql"""insert into humanresources.jobcandidate("jobcandidateid", "businessentityid", "resume", "modifieddate") + select * from jobcandidate_TEMP + on conflict ("jobcandidateid") + do update set + "businessentityid" = EXCLUDED."businessentityid", + "resume" = EXCLUDED."resume", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table jobcandidate_TEMP;""".update.run + } yield res + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/humanresources/jobcandidate/JobcandidateRepoMock.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/humanresources/jobcandidate/JobcandidateRepoMock.scala index 79e5ba231c..4b8aa2132f 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/humanresources/jobcandidate/JobcandidateRepoMock.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/humanresources/jobcandidate/JobcandidateRepoMock.scala @@ -105,4 +105,23 @@ class JobcandidateRepoMock(toRow: Function1[JobcandidateRowUnsaved, Jobcandidate unsaved } } + override def upsertBatch(unsaved: List[JobcandidateRow]): Stream[ConnectionIO, JobcandidateRow] = { + Stream.emits { + unsaved.map { row => + map += (row.jobcandidateid -> row) + row + } + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, JobcandidateRow], batchSize: Int = 10000): ConnectionIO[Int] = { + unsaved.compile.toList.map { rows => + var num = 0 + rows.foreach { row => + map += (row.jobcandidateid -> row) + num += 1 + } + num + } + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/humanresources/jobcandidate/JobcandidateRow.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/humanresources/jobcandidate/JobcandidateRow.scala index 7ac24bdf9c..6ad7150ec9 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/humanresources/jobcandidate/JobcandidateRow.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/humanresources/jobcandidate/JobcandidateRow.scala @@ -14,6 +14,7 @@ import adventureworks.person.businessentity.BusinessentityId import doobie.enumerated.Nullability import doobie.postgres.Text import doobie.util.Read +import doobie.util.Write import io.circe.Decoder import io.circe.Encoder import java.sql.ResultSet @@ -64,4 +65,23 @@ object JobcandidateRow { sb.append(Text.DELIMETER) TypoLocalDateTime.text.unsafeEncode(row.modifieddate, sb) } + implicit lazy val write: Write[JobcandidateRow] = new Write[JobcandidateRow]( + puts = List((JobcandidateId.put, Nullability.NoNulls), + (BusinessentityId.put, Nullability.Nullable), + (TypoXml.put, Nullability.Nullable), + (TypoLocalDateTime.put, Nullability.NoNulls)), + toList = x => List(x.jobcandidateid, x.businessentityid, x.resume, x.modifieddate), + unsafeSet = (rs, i, a) => { + JobcandidateId.put.unsafeSetNonNullable(rs, i + 0, a.jobcandidateid) + BusinessentityId.put.unsafeSetNullable(rs, i + 1, a.businessentityid) + TypoXml.put.unsafeSetNullable(rs, i + 2, a.resume) + TypoLocalDateTime.put.unsafeSetNonNullable(rs, i + 3, a.modifieddate) + }, + unsafeUpdate = (ps, i, a) => { + JobcandidateId.put.unsafeUpdateNonNullable(ps, i + 0, a.jobcandidateid) + BusinessentityId.put.unsafeUpdateNullable(ps, i + 1, a.businessentityid) + TypoXml.put.unsafeUpdateNullable(ps, i + 2, a.resume) + TypoLocalDateTime.put.unsafeUpdateNonNullable(ps, i + 3, a.modifieddate) + } + ) } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/humanresources/shift/ShiftRepo.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/humanresources/shift/ShiftRepo.scala index 36648c9c8c..705fb98065 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/humanresources/shift/ShiftRepo.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/humanresources/shift/ShiftRepo.scala @@ -30,4 +30,7 @@ trait ShiftRepo { def update: UpdateBuilder[ShiftFields, ShiftRow] def update(row: ShiftRow): ConnectionIO[Boolean] def upsert(unsaved: ShiftRow): ConnectionIO[ShiftRow] + def upsertBatch(unsaved: List[ShiftRow]): Stream[ConnectionIO, ShiftRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Stream[ConnectionIO, ShiftRow], batchSize: Int = 10000): ConnectionIO[Int] } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/humanresources/shift/ShiftRepoImpl.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/humanresources/shift/ShiftRepoImpl.scala index 57e5bfaf92..6ca1d52f90 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/humanresources/shift/ShiftRepoImpl.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/humanresources/shift/ShiftRepoImpl.scala @@ -11,12 +11,14 @@ import adventureworks.customtypes.Defaulted import adventureworks.customtypes.TypoLocalDateTime import adventureworks.customtypes.TypoLocalTime import adventureworks.public.Name +import cats.instances.list.catsStdInstancesForList import doobie.free.connection.ConnectionIO import doobie.postgres.syntax.FragmentOps import doobie.syntax.SqlInterpolator.SingleFragment.fromWrite import doobie.syntax.string.toSqlInterpolator import doobie.util.Write import doobie.util.fragment.Fragment +import doobie.util.update.Update import fs2.Stream import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder @@ -126,4 +128,35 @@ class ShiftRepoImpl extends ShiftRepo { returning "shiftid", "name", "starttime"::text, "endtime"::text, "modifieddate"::text """.query(using ShiftRow.read).unique } + override def upsertBatch(unsaved: List[ShiftRow]): Stream[ConnectionIO, ShiftRow] = { + Update[ShiftRow]( + s"""insert into humanresources.shift("shiftid", "name", "starttime", "endtime", "modifieddate") + values (?::int4,?::varchar,?::time,?::time,?::timestamp) + on conflict ("shiftid") + do update set + "name" = EXCLUDED."name", + "starttime" = EXCLUDED."starttime", + "endtime" = EXCLUDED."endtime", + "modifieddate" = EXCLUDED."modifieddate" + returning "shiftid", "name", "starttime"::text, "endtime"::text, "modifieddate"::text""" + )(using ShiftRow.write) + .updateManyWithGeneratedKeys[ShiftRow]("shiftid", "name", "starttime", "endtime", "modifieddate")(unsaved)(using catsStdInstancesForList, ShiftRow.read) + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, ShiftRow], batchSize: Int = 10000): ConnectionIO[Int] = { + for { + _ <- sql"create temporary table shift_TEMP (like humanresources.shift) on commit drop".update.run + _ <- new FragmentOps(sql"""copy shift_TEMP("shiftid", "name", "starttime", "endtime", "modifieddate") from stdin""").copyIn(unsaved, batchSize)(using ShiftRow.text) + res <- sql"""insert into humanresources.shift("shiftid", "name", "starttime", "endtime", "modifieddate") + select * from shift_TEMP + on conflict ("shiftid") + do update set + "name" = EXCLUDED."name", + "starttime" = EXCLUDED."starttime", + "endtime" = EXCLUDED."endtime", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table shift_TEMP;""".update.run + } yield res + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/humanresources/shift/ShiftRepoMock.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/humanresources/shift/ShiftRepoMock.scala index 7fdca63f1f..e12fb39c8c 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/humanresources/shift/ShiftRepoMock.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/humanresources/shift/ShiftRepoMock.scala @@ -105,4 +105,23 @@ class ShiftRepoMock(toRow: Function1[ShiftRowUnsaved, ShiftRow], unsaved } } + override def upsertBatch(unsaved: List[ShiftRow]): Stream[ConnectionIO, ShiftRow] = { + Stream.emits { + unsaved.map { row => + map += (row.shiftid -> row) + row + } + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, ShiftRow], batchSize: Int = 10000): ConnectionIO[Int] = { + unsaved.compile.toList.map { rows => + var num = 0 + rows.foreach { row => + map += (row.shiftid -> row) + num += 1 + } + num + } + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/humanresources/shift/ShiftRow.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/humanresources/shift/ShiftRow.scala index 62f47349bd..f3fd65c89f 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/humanresources/shift/ShiftRow.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/humanresources/shift/ShiftRow.scala @@ -14,6 +14,7 @@ import adventureworks.public.Name import doobie.enumerated.Nullability import doobie.postgres.Text import doobie.util.Read +import doobie.util.Write import io.circe.Decoder import io.circe.Encoder import java.sql.ResultSet @@ -69,4 +70,26 @@ object ShiftRow { sb.append(Text.DELIMETER) TypoLocalDateTime.text.unsafeEncode(row.modifieddate, sb) } + implicit lazy val write: Write[ShiftRow] = new Write[ShiftRow]( + puts = List((ShiftId.put, Nullability.NoNulls), + (Name.put, Nullability.NoNulls), + (TypoLocalTime.put, Nullability.NoNulls), + (TypoLocalTime.put, Nullability.NoNulls), + (TypoLocalDateTime.put, Nullability.NoNulls)), + toList = x => List(x.shiftid, x.name, x.starttime, x.endtime, x.modifieddate), + unsafeSet = (rs, i, a) => { + ShiftId.put.unsafeSetNonNullable(rs, i + 0, a.shiftid) + Name.put.unsafeSetNonNullable(rs, i + 1, a.name) + TypoLocalTime.put.unsafeSetNonNullable(rs, i + 2, a.starttime) + TypoLocalTime.put.unsafeSetNonNullable(rs, i + 3, a.endtime) + TypoLocalDateTime.put.unsafeSetNonNullable(rs, i + 4, a.modifieddate) + }, + unsafeUpdate = (ps, i, a) => { + ShiftId.put.unsafeUpdateNonNullable(ps, i + 0, a.shiftid) + Name.put.unsafeUpdateNonNullable(ps, i + 1, a.name) + TypoLocalTime.put.unsafeUpdateNonNullable(ps, i + 2, a.starttime) + TypoLocalTime.put.unsafeUpdateNonNullable(ps, i + 3, a.endtime) + TypoLocalDateTime.put.unsafeUpdateNonNullable(ps, i + 4, a.modifieddate) + } + ) } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/information_schema/CardinalNumber.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/information_schema/CardinalNumber.scala new file mode 100644 index 0000000000..09a489e761 --- /dev/null +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/information_schema/CardinalNumber.scala @@ -0,0 +1,34 @@ +/** + * File has been automatically generated by `typo`. + * + * IF YOU CHANGE THIS FILE YOUR CHANGES WILL BE OVERWRITTEN. + */ +package adventureworks +package information_schema + +import doobie.postgres.Text +import doobie.util.Get +import doobie.util.Put +import doobie.util.meta.Meta +import io.circe.Decoder +import io.circe.Encoder +import typo.dsl.Bijection + +/** Domain `information_schema.cardinal_number` + * Constraint: CHECK ((VALUE >= 0)) + */ +case class CardinalNumber(value: Int) +object CardinalNumber { + implicit lazy val arrayGet: Get[Array[CardinalNumber]] = adventureworks.IntegerArrayMeta.get.map(_.map(CardinalNumber.apply)) + implicit lazy val arrayPut: Put[Array[CardinalNumber]] = adventureworks.IntegerArrayMeta.put.contramap(_.map(_.value)) + implicit lazy val bijection: Bijection[CardinalNumber, Int] = Bijection[CardinalNumber, Int](_.value)(CardinalNumber.apply) + implicit lazy val decoder: Decoder[CardinalNumber] = Decoder.decodeInt.map(CardinalNumber.apply) + implicit lazy val encoder: Encoder[CardinalNumber] = Encoder.encodeInt.contramap(_.value) + implicit lazy val get: Get[CardinalNumber] = Meta.IntMeta.get.map(CardinalNumber.apply) + implicit lazy val ordering: Ordering[CardinalNumber] = Ordering.by(_.value) + implicit lazy val put: Put[CardinalNumber] = Meta.IntMeta.put.contramap(_.value) + implicit lazy val text: Text[CardinalNumber] = new Text[CardinalNumber] { + override def unsafeEncode(v: CardinalNumber, sb: StringBuilder) = Text.intInstance.unsafeEncode(v.value, sb) + override def unsafeArrayEncode(v: CardinalNumber, sb: StringBuilder) = Text.intInstance.unsafeArrayEncode(v.value, sb) + } +} \ No newline at end of file diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/information_schema/CharacterData.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/information_schema/CharacterData.scala new file mode 100644 index 0000000000..b3e025d1c3 --- /dev/null +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/information_schema/CharacterData.scala @@ -0,0 +1,34 @@ +/** + * File has been automatically generated by `typo`. + * + * IF YOU CHANGE THIS FILE YOUR CHANGES WILL BE OVERWRITTEN. + */ +package adventureworks +package information_schema + +import doobie.postgres.Text +import doobie.util.Get +import doobie.util.Put +import doobie.util.meta.Meta +import io.circe.Decoder +import io.circe.Encoder +import typo.dsl.Bijection + +/** Domain `information_schema.character_data` + * No constraint + */ +case class CharacterData(value: String) +object CharacterData { + implicit lazy val arrayGet: Get[Array[CharacterData]] = adventureworks.StringArrayMeta.get.map(_.map(CharacterData.apply)) + implicit lazy val arrayPut: Put[Array[CharacterData]] = adventureworks.StringArrayMeta.put.contramap(_.map(_.value)) + implicit lazy val bijection: Bijection[CharacterData, String] = Bijection[CharacterData, String](_.value)(CharacterData.apply) + implicit lazy val decoder: Decoder[CharacterData] = Decoder.decodeString.map(CharacterData.apply) + implicit lazy val encoder: Encoder[CharacterData] = Encoder.encodeString.contramap(_.value) + implicit lazy val get: Get[CharacterData] = Meta.StringMeta.get.map(CharacterData.apply) + implicit lazy val ordering: Ordering[CharacterData] = Ordering.by(_.value) + implicit lazy val put: Put[CharacterData] = Meta.StringMeta.put.contramap(_.value) + implicit lazy val text: Text[CharacterData] = new Text[CharacterData] { + override def unsafeEncode(v: CharacterData, sb: StringBuilder) = Text.stringInstance.unsafeEncode(v.value, sb) + override def unsafeArrayEncode(v: CharacterData, sb: StringBuilder) = Text.stringInstance.unsafeArrayEncode(v.value, sb) + } +} \ No newline at end of file diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/information_schema/SqlIdentifier.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/information_schema/SqlIdentifier.scala new file mode 100644 index 0000000000..ece5efcd6c --- /dev/null +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/information_schema/SqlIdentifier.scala @@ -0,0 +1,34 @@ +/** + * File has been automatically generated by `typo`. + * + * IF YOU CHANGE THIS FILE YOUR CHANGES WILL BE OVERWRITTEN. + */ +package adventureworks +package information_schema + +import doobie.postgres.Text +import doobie.util.Get +import doobie.util.Put +import doobie.util.meta.Meta +import io.circe.Decoder +import io.circe.Encoder +import typo.dsl.Bijection + +/** Domain `information_schema.sql_identifier` + * No constraint + */ +case class SqlIdentifier(value: String) +object SqlIdentifier { + implicit lazy val arrayGet: Get[Array[SqlIdentifier]] = adventureworks.StringArrayMeta.get.map(_.map(SqlIdentifier.apply)) + implicit lazy val arrayPut: Put[Array[SqlIdentifier]] = adventureworks.StringArrayMeta.put.contramap(_.map(_.value)) + implicit lazy val bijection: Bijection[SqlIdentifier, String] = Bijection[SqlIdentifier, String](_.value)(SqlIdentifier.apply) + implicit lazy val decoder: Decoder[SqlIdentifier] = Decoder.decodeString.map(SqlIdentifier.apply) + implicit lazy val encoder: Encoder[SqlIdentifier] = Encoder.encodeString.contramap(_.value) + implicit lazy val get: Get[SqlIdentifier] = Meta.StringMeta.get.map(SqlIdentifier.apply) + implicit lazy val ordering: Ordering[SqlIdentifier] = Ordering.by(_.value) + implicit lazy val put: Put[SqlIdentifier] = Meta.StringMeta.put.contramap(_.value) + implicit lazy val text: Text[SqlIdentifier] = new Text[SqlIdentifier] { + override def unsafeEncode(v: SqlIdentifier, sb: StringBuilder) = Text.stringInstance.unsafeEncode(v.value, sb) + override def unsafeArrayEncode(v: SqlIdentifier, sb: StringBuilder) = Text.stringInstance.unsafeArrayEncode(v.value, sb) + } +} \ No newline at end of file diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/information_schema/TimeStamp.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/information_schema/TimeStamp.scala new file mode 100644 index 0000000000..63904efc4a --- /dev/null +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/information_schema/TimeStamp.scala @@ -0,0 +1,34 @@ +/** + * File has been automatically generated by `typo`. + * + * IF YOU CHANGE THIS FILE YOUR CHANGES WILL BE OVERWRITTEN. + */ +package adventureworks +package information_schema + +import adventureworks.customtypes.TypoInstant +import doobie.postgres.Text +import doobie.util.Get +import doobie.util.Put +import io.circe.Decoder +import io.circe.Encoder +import typo.dsl.Bijection + +/** Domain `information_schema.time_stamp` + * No constraint + */ +case class TimeStamp(value: TypoInstant) +object TimeStamp { + implicit lazy val arrayGet: Get[Array[TimeStamp]] = TypoInstant.arrayGet.map(_.map(TimeStamp.apply)) + implicit lazy val arrayPut: Put[Array[TimeStamp]] = TypoInstant.arrayPut.contramap(_.map(_.value)) + implicit lazy val bijection: Bijection[TimeStamp, TypoInstant] = Bijection[TimeStamp, TypoInstant](_.value)(TimeStamp.apply) + implicit lazy val decoder: Decoder[TimeStamp] = TypoInstant.decoder.map(TimeStamp.apply) + implicit lazy val encoder: Encoder[TimeStamp] = TypoInstant.encoder.contramap(_.value) + implicit lazy val get: Get[TimeStamp] = TypoInstant.get.map(TimeStamp.apply) + implicit def ordering(implicit O0: Ordering[TypoInstant]): Ordering[TimeStamp] = Ordering.by(_.value) + implicit lazy val put: Put[TimeStamp] = TypoInstant.put.contramap(_.value) + implicit lazy val text: Text[TimeStamp] = new Text[TimeStamp] { + override def unsafeEncode(v: TimeStamp, sb: StringBuilder) = TypoInstant.text.unsafeEncode(v.value, sb) + override def unsafeArrayEncode(v: TimeStamp, sb: StringBuilder) = TypoInstant.text.unsafeArrayEncode(v.value, sb) + } +} \ No newline at end of file diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/information_schema/YesOrNo.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/information_schema/YesOrNo.scala new file mode 100644 index 0000000000..caa7bdddae --- /dev/null +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/information_schema/YesOrNo.scala @@ -0,0 +1,34 @@ +/** + * File has been automatically generated by `typo`. + * + * IF YOU CHANGE THIS FILE YOUR CHANGES WILL BE OVERWRITTEN. + */ +package adventureworks +package information_schema + +import doobie.postgres.Text +import doobie.util.Get +import doobie.util.Put +import doobie.util.meta.Meta +import io.circe.Decoder +import io.circe.Encoder +import typo.dsl.Bijection + +/** Domain `information_schema.yes_or_no` + * Constraint: CHECK (((VALUE)::text = ANY ((ARRAY['YES'::character varying, 'NO'::character varying])::text[]))) + */ +case class YesOrNo(value: String) +object YesOrNo { + implicit lazy val arrayGet: Get[Array[YesOrNo]] = adventureworks.StringArrayMeta.get.map(_.map(YesOrNo.apply)) + implicit lazy val arrayPut: Put[Array[YesOrNo]] = adventureworks.StringArrayMeta.put.contramap(_.map(_.value)) + implicit lazy val bijection: Bijection[YesOrNo, String] = Bijection[YesOrNo, String](_.value)(YesOrNo.apply) + implicit lazy val decoder: Decoder[YesOrNo] = Decoder.decodeString.map(YesOrNo.apply) + implicit lazy val encoder: Encoder[YesOrNo] = Encoder.encodeString.contramap(_.value) + implicit lazy val get: Get[YesOrNo] = Meta.StringMeta.get.map(YesOrNo.apply) + implicit lazy val ordering: Ordering[YesOrNo] = Ordering.by(_.value) + implicit lazy val put: Put[YesOrNo] = Meta.StringMeta.put.contramap(_.value) + implicit lazy val text: Text[YesOrNo] = new Text[YesOrNo] { + override def unsafeEncode(v: YesOrNo, sb: StringBuilder) = Text.stringInstance.unsafeEncode(v.value, sb) + override def unsafeArrayEncode(v: YesOrNo, sb: StringBuilder) = Text.stringInstance.unsafeArrayEncode(v.value, sb) + } +} \ No newline at end of file diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/person/address/AddressRepo.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/person/address/AddressRepo.scala index 6489fe2c9b..9e52b7795f 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/person/address/AddressRepo.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/person/address/AddressRepo.scala @@ -30,4 +30,7 @@ trait AddressRepo { def update: UpdateBuilder[AddressFields, AddressRow] def update(row: AddressRow): ConnectionIO[Boolean] def upsert(unsaved: AddressRow): ConnectionIO[AddressRow] + def upsertBatch(unsaved: List[AddressRow]): Stream[ConnectionIO, AddressRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Stream[ConnectionIO, AddressRow], batchSize: Int = 10000): ConnectionIO[Int] } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/person/address/AddressRepoImpl.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/person/address/AddressRepoImpl.scala index 68ecf92e23..fd489d8c52 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/person/address/AddressRepoImpl.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/person/address/AddressRepoImpl.scala @@ -12,6 +12,7 @@ import adventureworks.customtypes.TypoBytea import adventureworks.customtypes.TypoLocalDateTime import adventureworks.customtypes.TypoUUID import adventureworks.person.stateprovince.StateprovinceId +import cats.instances.list.catsStdInstancesForList import doobie.free.connection.ConnectionIO import doobie.postgres.syntax.FragmentOps import doobie.syntax.SqlInterpolator.SingleFragment.fromWrite @@ -19,6 +20,7 @@ import doobie.syntax.string.toSqlInterpolator import doobie.util.Write import doobie.util.fragment.Fragment import doobie.util.meta.Meta +import doobie.util.update.Update import fs2.Stream import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder @@ -147,4 +149,43 @@ class AddressRepoImpl extends AddressRepo { returning "addressid", "addressline1", "addressline2", "city", "stateprovinceid", "postalcode", "spatiallocation", "rowguid", "modifieddate"::text """.query(using AddressRow.read).unique } + override def upsertBatch(unsaved: List[AddressRow]): Stream[ConnectionIO, AddressRow] = { + Update[AddressRow]( + s"""insert into person.address("addressid", "addressline1", "addressline2", "city", "stateprovinceid", "postalcode", "spatiallocation", "rowguid", "modifieddate") + values (?::int4,?,?,?,?::int4,?,?::bytea,?::uuid,?::timestamp) + on conflict ("addressid") + do update set + "addressline1" = EXCLUDED."addressline1", + "addressline2" = EXCLUDED."addressline2", + "city" = EXCLUDED."city", + "stateprovinceid" = EXCLUDED."stateprovinceid", + "postalcode" = EXCLUDED."postalcode", + "spatiallocation" = EXCLUDED."spatiallocation", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + returning "addressid", "addressline1", "addressline2", "city", "stateprovinceid", "postalcode", "spatiallocation", "rowguid", "modifieddate"::text""" + )(using AddressRow.write) + .updateManyWithGeneratedKeys[AddressRow]("addressid", "addressline1", "addressline2", "city", "stateprovinceid", "postalcode", "spatiallocation", "rowguid", "modifieddate")(unsaved)(using catsStdInstancesForList, AddressRow.read) + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, AddressRow], batchSize: Int = 10000): ConnectionIO[Int] = { + for { + _ <- sql"create temporary table address_TEMP (like person.address) on commit drop".update.run + _ <- new FragmentOps(sql"""copy address_TEMP("addressid", "addressline1", "addressline2", "city", "stateprovinceid", "postalcode", "spatiallocation", "rowguid", "modifieddate") from stdin""").copyIn(unsaved, batchSize)(using AddressRow.text) + res <- sql"""insert into person.address("addressid", "addressline1", "addressline2", "city", "stateprovinceid", "postalcode", "spatiallocation", "rowguid", "modifieddate") + select * from address_TEMP + on conflict ("addressid") + do update set + "addressline1" = EXCLUDED."addressline1", + "addressline2" = EXCLUDED."addressline2", + "city" = EXCLUDED."city", + "stateprovinceid" = EXCLUDED."stateprovinceid", + "postalcode" = EXCLUDED."postalcode", + "spatiallocation" = EXCLUDED."spatiallocation", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table address_TEMP;""".update.run + } yield res + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/person/address/AddressRepoMock.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/person/address/AddressRepoMock.scala index 2c7ccd842c..756b29b0a3 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/person/address/AddressRepoMock.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/person/address/AddressRepoMock.scala @@ -105,4 +105,23 @@ class AddressRepoMock(toRow: Function1[AddressRowUnsaved, AddressRow], unsaved } } + override def upsertBatch(unsaved: List[AddressRow]): Stream[ConnectionIO, AddressRow] = { + Stream.emits { + unsaved.map { row => + map += (row.addressid -> row) + row + } + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, AddressRow], batchSize: Int = 10000): ConnectionIO[Int] = { + unsaved.compile.toList.map { rows => + var num = 0 + rows.foreach { row => + map += (row.addressid -> row) + num += 1 + } + num + } + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/person/address/AddressRow.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/person/address/AddressRow.scala index a9b561f1f0..e025c97675 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/person/address/AddressRow.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/person/address/AddressRow.scala @@ -15,6 +15,7 @@ import adventureworks.person.stateprovince.StateprovinceId import doobie.enumerated.Nullability import doobie.postgres.Text import doobie.util.Read +import doobie.util.Write import doobie.util.meta.Meta import io.circe.Decoder import io.circe.Encoder @@ -96,4 +97,38 @@ object AddressRow { sb.append(Text.DELIMETER) TypoLocalDateTime.text.unsafeEncode(row.modifieddate, sb) } + implicit lazy val write: Write[AddressRow] = new Write[AddressRow]( + puts = List((AddressId.put, Nullability.NoNulls), + (Meta.StringMeta.put, Nullability.NoNulls), + (Meta.StringMeta.put, Nullability.Nullable), + (Meta.StringMeta.put, Nullability.NoNulls), + (StateprovinceId.put, Nullability.NoNulls), + (Meta.StringMeta.put, Nullability.NoNulls), + (TypoBytea.put, Nullability.Nullable), + (TypoUUID.put, Nullability.NoNulls), + (TypoLocalDateTime.put, Nullability.NoNulls)), + toList = x => List(x.addressid, x.addressline1, x.addressline2, x.city, x.stateprovinceid, x.postalcode, x.spatiallocation, x.rowguid, x.modifieddate), + unsafeSet = (rs, i, a) => { + AddressId.put.unsafeSetNonNullable(rs, i + 0, a.addressid) + Meta.StringMeta.put.unsafeSetNonNullable(rs, i + 1, a.addressline1) + Meta.StringMeta.put.unsafeSetNullable(rs, i + 2, a.addressline2) + Meta.StringMeta.put.unsafeSetNonNullable(rs, i + 3, a.city) + StateprovinceId.put.unsafeSetNonNullable(rs, i + 4, a.stateprovinceid) + Meta.StringMeta.put.unsafeSetNonNullable(rs, i + 5, a.postalcode) + TypoBytea.put.unsafeSetNullable(rs, i + 6, a.spatiallocation) + TypoUUID.put.unsafeSetNonNullable(rs, i + 7, a.rowguid) + TypoLocalDateTime.put.unsafeSetNonNullable(rs, i + 8, a.modifieddate) + }, + unsafeUpdate = (ps, i, a) => { + AddressId.put.unsafeUpdateNonNullable(ps, i + 0, a.addressid) + Meta.StringMeta.put.unsafeUpdateNonNullable(ps, i + 1, a.addressline1) + Meta.StringMeta.put.unsafeUpdateNullable(ps, i + 2, a.addressline2) + Meta.StringMeta.put.unsafeUpdateNonNullable(ps, i + 3, a.city) + StateprovinceId.put.unsafeUpdateNonNullable(ps, i + 4, a.stateprovinceid) + Meta.StringMeta.put.unsafeUpdateNonNullable(ps, i + 5, a.postalcode) + TypoBytea.put.unsafeUpdateNullable(ps, i + 6, a.spatiallocation) + TypoUUID.put.unsafeUpdateNonNullable(ps, i + 7, a.rowguid) + TypoLocalDateTime.put.unsafeUpdateNonNullable(ps, i + 8, a.modifieddate) + } + ) } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/person/addresstype/AddresstypeRepo.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/person/addresstype/AddresstypeRepo.scala index b1f569f9f1..989b32ee78 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/person/addresstype/AddresstypeRepo.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/person/addresstype/AddresstypeRepo.scala @@ -30,4 +30,7 @@ trait AddresstypeRepo { def update: UpdateBuilder[AddresstypeFields, AddresstypeRow] def update(row: AddresstypeRow): ConnectionIO[Boolean] def upsert(unsaved: AddresstypeRow): ConnectionIO[AddresstypeRow] + def upsertBatch(unsaved: List[AddresstypeRow]): Stream[ConnectionIO, AddresstypeRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Stream[ConnectionIO, AddresstypeRow], batchSize: Int = 10000): ConnectionIO[Int] } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/person/addresstype/AddresstypeRepoImpl.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/person/addresstype/AddresstypeRepoImpl.scala index 6903106a03..1d2792a48c 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/person/addresstype/AddresstypeRepoImpl.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/person/addresstype/AddresstypeRepoImpl.scala @@ -11,12 +11,14 @@ import adventureworks.customtypes.Defaulted import adventureworks.customtypes.TypoLocalDateTime import adventureworks.customtypes.TypoUUID import adventureworks.public.Name +import cats.instances.list.catsStdInstancesForList import doobie.free.connection.ConnectionIO import doobie.postgres.syntax.FragmentOps import doobie.syntax.SqlInterpolator.SingleFragment.fromWrite import doobie.syntax.string.toSqlInterpolator import doobie.util.Write import doobie.util.fragment.Fragment +import doobie.util.update.Update import fs2.Stream import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder @@ -125,4 +127,33 @@ class AddresstypeRepoImpl extends AddresstypeRepo { returning "addresstypeid", "name", "rowguid", "modifieddate"::text """.query(using AddresstypeRow.read).unique } + override def upsertBatch(unsaved: List[AddresstypeRow]): Stream[ConnectionIO, AddresstypeRow] = { + Update[AddresstypeRow]( + s"""insert into person.addresstype("addresstypeid", "name", "rowguid", "modifieddate") + values (?::int4,?::varchar,?::uuid,?::timestamp) + on conflict ("addresstypeid") + do update set + "name" = EXCLUDED."name", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + returning "addresstypeid", "name", "rowguid", "modifieddate"::text""" + )(using AddresstypeRow.write) + .updateManyWithGeneratedKeys[AddresstypeRow]("addresstypeid", "name", "rowguid", "modifieddate")(unsaved)(using catsStdInstancesForList, AddresstypeRow.read) + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, AddresstypeRow], batchSize: Int = 10000): ConnectionIO[Int] = { + for { + _ <- sql"create temporary table addresstype_TEMP (like person.addresstype) on commit drop".update.run + _ <- new FragmentOps(sql"""copy addresstype_TEMP("addresstypeid", "name", "rowguid", "modifieddate") from stdin""").copyIn(unsaved, batchSize)(using AddresstypeRow.text) + res <- sql"""insert into person.addresstype("addresstypeid", "name", "rowguid", "modifieddate") + select * from addresstype_TEMP + on conflict ("addresstypeid") + do update set + "name" = EXCLUDED."name", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table addresstype_TEMP;""".update.run + } yield res + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/person/addresstype/AddresstypeRepoMock.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/person/addresstype/AddresstypeRepoMock.scala index dfac5baa3b..966ce5824a 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/person/addresstype/AddresstypeRepoMock.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/person/addresstype/AddresstypeRepoMock.scala @@ -105,4 +105,23 @@ class AddresstypeRepoMock(toRow: Function1[AddresstypeRowUnsaved, AddresstypeRow unsaved } } + override def upsertBatch(unsaved: List[AddresstypeRow]): Stream[ConnectionIO, AddresstypeRow] = { + Stream.emits { + unsaved.map { row => + map += (row.addresstypeid -> row) + row + } + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, AddresstypeRow], batchSize: Int = 10000): ConnectionIO[Int] = { + unsaved.compile.toList.map { rows => + var num = 0 + rows.foreach { row => + map += (row.addresstypeid -> row) + num += 1 + } + num + } + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/person/addresstype/AddresstypeRow.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/person/addresstype/AddresstypeRow.scala index 5b299747b8..cbc276eb14 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/person/addresstype/AddresstypeRow.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/person/addresstype/AddresstypeRow.scala @@ -14,6 +14,7 @@ import adventureworks.public.Name import doobie.enumerated.Nullability import doobie.postgres.Text import doobie.util.Read +import doobie.util.Write import io.circe.Decoder import io.circe.Encoder import java.sql.ResultSet @@ -63,4 +64,23 @@ object AddresstypeRow { sb.append(Text.DELIMETER) TypoLocalDateTime.text.unsafeEncode(row.modifieddate, sb) } + implicit lazy val write: Write[AddresstypeRow] = new Write[AddresstypeRow]( + puts = List((AddresstypeId.put, Nullability.NoNulls), + (Name.put, Nullability.NoNulls), + (TypoUUID.put, Nullability.NoNulls), + (TypoLocalDateTime.put, Nullability.NoNulls)), + toList = x => List(x.addresstypeid, x.name, x.rowguid, x.modifieddate), + unsafeSet = (rs, i, a) => { + AddresstypeId.put.unsafeSetNonNullable(rs, i + 0, a.addresstypeid) + Name.put.unsafeSetNonNullable(rs, i + 1, a.name) + TypoUUID.put.unsafeSetNonNullable(rs, i + 2, a.rowguid) + TypoLocalDateTime.put.unsafeSetNonNullable(rs, i + 3, a.modifieddate) + }, + unsafeUpdate = (ps, i, a) => { + AddresstypeId.put.unsafeUpdateNonNullable(ps, i + 0, a.addresstypeid) + Name.put.unsafeUpdateNonNullable(ps, i + 1, a.name) + TypoUUID.put.unsafeUpdateNonNullable(ps, i + 2, a.rowguid) + TypoLocalDateTime.put.unsafeUpdateNonNullable(ps, i + 3, a.modifieddate) + } + ) } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/person/businessentity/BusinessentityRepo.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/person/businessentity/BusinessentityRepo.scala index 6394b5241b..19824a3860 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/person/businessentity/BusinessentityRepo.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/person/businessentity/BusinessentityRepo.scala @@ -30,4 +30,7 @@ trait BusinessentityRepo { def update: UpdateBuilder[BusinessentityFields, BusinessentityRow] def update(row: BusinessentityRow): ConnectionIO[Boolean] def upsert(unsaved: BusinessentityRow): ConnectionIO[BusinessentityRow] + def upsertBatch(unsaved: List[BusinessentityRow]): Stream[ConnectionIO, BusinessentityRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Stream[ConnectionIO, BusinessentityRow], batchSize: Int = 10000): ConnectionIO[Int] } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/person/businessentity/BusinessentityRepoImpl.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/person/businessentity/BusinessentityRepoImpl.scala index f0efbba264..0e6e7f8b14 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/person/businessentity/BusinessentityRepoImpl.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/person/businessentity/BusinessentityRepoImpl.scala @@ -10,12 +10,14 @@ package businessentity import adventureworks.customtypes.Defaulted import adventureworks.customtypes.TypoLocalDateTime import adventureworks.customtypes.TypoUUID +import cats.instances.list.catsStdInstancesForList import doobie.free.connection.ConnectionIO import doobie.postgres.syntax.FragmentOps import doobie.syntax.SqlInterpolator.SingleFragment.fromWrite import doobie.syntax.string.toSqlInterpolator import doobie.util.Write import doobie.util.fragment.Fragment +import doobie.util.update.Update import fs2.Stream import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder @@ -120,4 +122,31 @@ class BusinessentityRepoImpl extends BusinessentityRepo { returning "businessentityid", "rowguid", "modifieddate"::text """.query(using BusinessentityRow.read).unique } + override def upsertBatch(unsaved: List[BusinessentityRow]): Stream[ConnectionIO, BusinessentityRow] = { + Update[BusinessentityRow]( + s"""insert into person.businessentity("businessentityid", "rowguid", "modifieddate") + values (?::int4,?::uuid,?::timestamp) + on conflict ("businessentityid") + do update set + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + returning "businessentityid", "rowguid", "modifieddate"::text""" + )(using BusinessentityRow.write) + .updateManyWithGeneratedKeys[BusinessentityRow]("businessentityid", "rowguid", "modifieddate")(unsaved)(using catsStdInstancesForList, BusinessentityRow.read) + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, BusinessentityRow], batchSize: Int = 10000): ConnectionIO[Int] = { + for { + _ <- sql"create temporary table businessentity_TEMP (like person.businessentity) on commit drop".update.run + _ <- new FragmentOps(sql"""copy businessentity_TEMP("businessentityid", "rowguid", "modifieddate") from stdin""").copyIn(unsaved, batchSize)(using BusinessentityRow.text) + res <- sql"""insert into person.businessentity("businessentityid", "rowguid", "modifieddate") + select * from businessentity_TEMP + on conflict ("businessentityid") + do update set + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table businessentity_TEMP;""".update.run + } yield res + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/person/businessentity/BusinessentityRepoMock.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/person/businessentity/BusinessentityRepoMock.scala index a17b71c53b..e094cc6858 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/person/businessentity/BusinessentityRepoMock.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/person/businessentity/BusinessentityRepoMock.scala @@ -105,4 +105,23 @@ class BusinessentityRepoMock(toRow: Function1[BusinessentityRowUnsaved, Business unsaved } } + override def upsertBatch(unsaved: List[BusinessentityRow]): Stream[ConnectionIO, BusinessentityRow] = { + Stream.emits { + unsaved.map { row => + map += (row.businessentityid -> row) + row + } + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, BusinessentityRow], batchSize: Int = 10000): ConnectionIO[Int] = { + unsaved.compile.toList.map { rows => + var num = 0 + rows.foreach { row => + map += (row.businessentityid -> row) + num += 1 + } + num + } + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/person/businessentity/BusinessentityRow.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/person/businessentity/BusinessentityRow.scala index c85943b51b..b6f78424d5 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/person/businessentity/BusinessentityRow.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/person/businessentity/BusinessentityRow.scala @@ -13,6 +13,7 @@ import adventureworks.customtypes.TypoUUID import doobie.enumerated.Nullability import doobie.postgres.Text import doobie.util.Read +import doobie.util.Write import io.circe.Decoder import io.circe.Encoder import java.sql.ResultSet @@ -56,4 +57,20 @@ object BusinessentityRow { sb.append(Text.DELIMETER) TypoLocalDateTime.text.unsafeEncode(row.modifieddate, sb) } + implicit lazy val write: Write[BusinessentityRow] = new Write[BusinessentityRow]( + puts = List((BusinessentityId.put, Nullability.NoNulls), + (TypoUUID.put, Nullability.NoNulls), + (TypoLocalDateTime.put, Nullability.NoNulls)), + toList = x => List(x.businessentityid, x.rowguid, x.modifieddate), + unsafeSet = (rs, i, a) => { + BusinessentityId.put.unsafeSetNonNullable(rs, i + 0, a.businessentityid) + TypoUUID.put.unsafeSetNonNullable(rs, i + 1, a.rowguid) + TypoLocalDateTime.put.unsafeSetNonNullable(rs, i + 2, a.modifieddate) + }, + unsafeUpdate = (ps, i, a) => { + BusinessentityId.put.unsafeUpdateNonNullable(ps, i + 0, a.businessentityid) + TypoUUID.put.unsafeUpdateNonNullable(ps, i + 1, a.rowguid) + TypoLocalDateTime.put.unsafeUpdateNonNullable(ps, i + 2, a.modifieddate) + } + ) } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/person/businessentityaddress/BusinessentityaddressRepo.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/person/businessentityaddress/BusinessentityaddressRepo.scala index 1dca0ffc38..933d24768e 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/person/businessentityaddress/BusinessentityaddressRepo.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/person/businessentityaddress/BusinessentityaddressRepo.scala @@ -30,4 +30,7 @@ trait BusinessentityaddressRepo { def update: UpdateBuilder[BusinessentityaddressFields, BusinessentityaddressRow] def update(row: BusinessentityaddressRow): ConnectionIO[Boolean] def upsert(unsaved: BusinessentityaddressRow): ConnectionIO[BusinessentityaddressRow] + def upsertBatch(unsaved: List[BusinessentityaddressRow]): Stream[ConnectionIO, BusinessentityaddressRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Stream[ConnectionIO, BusinessentityaddressRow], batchSize: Int = 10000): ConnectionIO[Int] } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/person/businessentityaddress/BusinessentityaddressRepoImpl.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/person/businessentityaddress/BusinessentityaddressRepoImpl.scala index 6b5c39ede2..1784258a2e 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/person/businessentityaddress/BusinessentityaddressRepoImpl.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/person/businessentityaddress/BusinessentityaddressRepoImpl.scala @@ -13,12 +13,14 @@ import adventureworks.customtypes.TypoUUID import adventureworks.person.address.AddressId import adventureworks.person.addresstype.AddresstypeId import adventureworks.person.businessentity.BusinessentityId +import cats.instances.list.catsStdInstancesForList import doobie.free.connection.ConnectionIO import doobie.postgres.syntax.FragmentOps import doobie.syntax.SqlInterpolator.SingleFragment.fromWrite import doobie.syntax.string.toSqlInterpolator import doobie.util.Write import doobie.util.fragment.Fragment +import doobie.util.update.Update import fs2.Stream import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder @@ -140,4 +142,31 @@ class BusinessentityaddressRepoImpl extends BusinessentityaddressRepo { returning "businessentityid", "addressid", "addresstypeid", "rowguid", "modifieddate"::text """.query(using BusinessentityaddressRow.read).unique } + override def upsertBatch(unsaved: List[BusinessentityaddressRow]): Stream[ConnectionIO, BusinessentityaddressRow] = { + Update[BusinessentityaddressRow]( + s"""insert into person.businessentityaddress("businessentityid", "addressid", "addresstypeid", "rowguid", "modifieddate") + values (?::int4,?::int4,?::int4,?::uuid,?::timestamp) + on conflict ("businessentityid", "addressid", "addresstypeid") + do update set + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + returning "businessentityid", "addressid", "addresstypeid", "rowguid", "modifieddate"::text""" + )(using BusinessentityaddressRow.write) + .updateManyWithGeneratedKeys[BusinessentityaddressRow]("businessentityid", "addressid", "addresstypeid", "rowguid", "modifieddate")(unsaved)(using catsStdInstancesForList, BusinessentityaddressRow.read) + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, BusinessentityaddressRow], batchSize: Int = 10000): ConnectionIO[Int] = { + for { + _ <- sql"create temporary table businessentityaddress_TEMP (like person.businessentityaddress) on commit drop".update.run + _ <- new FragmentOps(sql"""copy businessentityaddress_TEMP("businessentityid", "addressid", "addresstypeid", "rowguid", "modifieddate") from stdin""").copyIn(unsaved, batchSize)(using BusinessentityaddressRow.text) + res <- sql"""insert into person.businessentityaddress("businessentityid", "addressid", "addresstypeid", "rowguid", "modifieddate") + select * from businessentityaddress_TEMP + on conflict ("businessentityid", "addressid", "addresstypeid") + do update set + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table businessentityaddress_TEMP;""".update.run + } yield res + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/person/businessentityaddress/BusinessentityaddressRepoMock.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/person/businessentityaddress/BusinessentityaddressRepoMock.scala index 23237f5438..8605223dea 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/person/businessentityaddress/BusinessentityaddressRepoMock.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/person/businessentityaddress/BusinessentityaddressRepoMock.scala @@ -105,4 +105,23 @@ class BusinessentityaddressRepoMock(toRow: Function1[BusinessentityaddressRowUns unsaved } } + override def upsertBatch(unsaved: List[BusinessentityaddressRow]): Stream[ConnectionIO, BusinessentityaddressRow] = { + Stream.emits { + unsaved.map { row => + map += (row.compositeId -> row) + row + } + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, BusinessentityaddressRow], batchSize: Int = 10000): ConnectionIO[Int] = { + unsaved.compile.toList.map { rows => + var num = 0 + rows.foreach { row => + map += (row.compositeId -> row) + num += 1 + } + num + } + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/person/businessentityaddress/BusinessentityaddressRow.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/person/businessentityaddress/BusinessentityaddressRow.scala index bc0366f0c9..0071d0b70a 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/person/businessentityaddress/BusinessentityaddressRow.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/person/businessentityaddress/BusinessentityaddressRow.scala @@ -16,6 +16,7 @@ import adventureworks.person.businessentity.BusinessentityId import doobie.enumerated.Nullability import doobie.postgres.Text import doobie.util.Read +import doobie.util.Write import io.circe.Decoder import io.circe.Encoder import java.sql.ResultSet @@ -76,4 +77,26 @@ object BusinessentityaddressRow { sb.append(Text.DELIMETER) TypoLocalDateTime.text.unsafeEncode(row.modifieddate, sb) } + implicit lazy val write: Write[BusinessentityaddressRow] = new Write[BusinessentityaddressRow]( + puts = List((BusinessentityId.put, Nullability.NoNulls), + (AddressId.put, Nullability.NoNulls), + (AddresstypeId.put, Nullability.NoNulls), + (TypoUUID.put, Nullability.NoNulls), + (TypoLocalDateTime.put, Nullability.NoNulls)), + toList = x => List(x.businessentityid, x.addressid, x.addresstypeid, x.rowguid, x.modifieddate), + unsafeSet = (rs, i, a) => { + BusinessentityId.put.unsafeSetNonNullable(rs, i + 0, a.businessentityid) + AddressId.put.unsafeSetNonNullable(rs, i + 1, a.addressid) + AddresstypeId.put.unsafeSetNonNullable(rs, i + 2, a.addresstypeid) + TypoUUID.put.unsafeSetNonNullable(rs, i + 3, a.rowguid) + TypoLocalDateTime.put.unsafeSetNonNullable(rs, i + 4, a.modifieddate) + }, + unsafeUpdate = (ps, i, a) => { + BusinessentityId.put.unsafeUpdateNonNullable(ps, i + 0, a.businessentityid) + AddressId.put.unsafeUpdateNonNullable(ps, i + 1, a.addressid) + AddresstypeId.put.unsafeUpdateNonNullable(ps, i + 2, a.addresstypeid) + TypoUUID.put.unsafeUpdateNonNullable(ps, i + 3, a.rowguid) + TypoLocalDateTime.put.unsafeUpdateNonNullable(ps, i + 4, a.modifieddate) + } + ) } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/person/businessentitycontact/BusinessentitycontactRepo.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/person/businessentitycontact/BusinessentitycontactRepo.scala index abffd5b6f6..161a0a1b05 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/person/businessentitycontact/BusinessentitycontactRepo.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/person/businessentitycontact/BusinessentitycontactRepo.scala @@ -30,4 +30,7 @@ trait BusinessentitycontactRepo { def update: UpdateBuilder[BusinessentitycontactFields, BusinessentitycontactRow] def update(row: BusinessentitycontactRow): ConnectionIO[Boolean] def upsert(unsaved: BusinessentitycontactRow): ConnectionIO[BusinessentitycontactRow] + def upsertBatch(unsaved: List[BusinessentitycontactRow]): Stream[ConnectionIO, BusinessentitycontactRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Stream[ConnectionIO, BusinessentitycontactRow], batchSize: Int = 10000): ConnectionIO[Int] } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/person/businessentitycontact/BusinessentitycontactRepoImpl.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/person/businessentitycontact/BusinessentitycontactRepoImpl.scala index 85342187b6..8069625d6e 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/person/businessentitycontact/BusinessentitycontactRepoImpl.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/person/businessentitycontact/BusinessentitycontactRepoImpl.scala @@ -12,12 +12,14 @@ import adventureworks.customtypes.TypoLocalDateTime import adventureworks.customtypes.TypoUUID import adventureworks.person.businessentity.BusinessentityId import adventureworks.person.contacttype.ContacttypeId +import cats.instances.list.catsStdInstancesForList import doobie.free.connection.ConnectionIO import doobie.postgres.syntax.FragmentOps import doobie.syntax.SqlInterpolator.SingleFragment.fromWrite import doobie.syntax.string.toSqlInterpolator import doobie.util.Write import doobie.util.fragment.Fragment +import doobie.util.update.Update import fs2.Stream import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder @@ -139,4 +141,31 @@ class BusinessentitycontactRepoImpl extends BusinessentitycontactRepo { returning "businessentityid", "personid", "contacttypeid", "rowguid", "modifieddate"::text """.query(using BusinessentitycontactRow.read).unique } + override def upsertBatch(unsaved: List[BusinessentitycontactRow]): Stream[ConnectionIO, BusinessentitycontactRow] = { + Update[BusinessentitycontactRow]( + s"""insert into person.businessentitycontact("businessentityid", "personid", "contacttypeid", "rowguid", "modifieddate") + values (?::int4,?::int4,?::int4,?::uuid,?::timestamp) + on conflict ("businessentityid", "personid", "contacttypeid") + do update set + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + returning "businessentityid", "personid", "contacttypeid", "rowguid", "modifieddate"::text""" + )(using BusinessentitycontactRow.write) + .updateManyWithGeneratedKeys[BusinessentitycontactRow]("businessentityid", "personid", "contacttypeid", "rowguid", "modifieddate")(unsaved)(using catsStdInstancesForList, BusinessentitycontactRow.read) + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, BusinessentitycontactRow], batchSize: Int = 10000): ConnectionIO[Int] = { + for { + _ <- sql"create temporary table businessentitycontact_TEMP (like person.businessentitycontact) on commit drop".update.run + _ <- new FragmentOps(sql"""copy businessentitycontact_TEMP("businessentityid", "personid", "contacttypeid", "rowguid", "modifieddate") from stdin""").copyIn(unsaved, batchSize)(using BusinessentitycontactRow.text) + res <- sql"""insert into person.businessentitycontact("businessentityid", "personid", "contacttypeid", "rowguid", "modifieddate") + select * from businessentitycontact_TEMP + on conflict ("businessentityid", "personid", "contacttypeid") + do update set + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table businessentitycontact_TEMP;""".update.run + } yield res + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/person/businessentitycontact/BusinessentitycontactRepoMock.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/person/businessentitycontact/BusinessentitycontactRepoMock.scala index 9bcc51a0e7..3b6ebb89a8 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/person/businessentitycontact/BusinessentitycontactRepoMock.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/person/businessentitycontact/BusinessentitycontactRepoMock.scala @@ -105,4 +105,23 @@ class BusinessentitycontactRepoMock(toRow: Function1[BusinessentitycontactRowUns unsaved } } + override def upsertBatch(unsaved: List[BusinessentitycontactRow]): Stream[ConnectionIO, BusinessentitycontactRow] = { + Stream.emits { + unsaved.map { row => + map += (row.compositeId -> row) + row + } + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, BusinessentitycontactRow], batchSize: Int = 10000): ConnectionIO[Int] = { + unsaved.compile.toList.map { rows => + var num = 0 + rows.foreach { row => + map += (row.compositeId -> row) + num += 1 + } + num + } + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/person/businessentitycontact/BusinessentitycontactRow.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/person/businessentitycontact/BusinessentitycontactRow.scala index d1abcff4c9..848a0fb869 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/person/businessentitycontact/BusinessentitycontactRow.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/person/businessentitycontact/BusinessentitycontactRow.scala @@ -15,6 +15,7 @@ import adventureworks.person.contacttype.ContacttypeId import doobie.enumerated.Nullability import doobie.postgres.Text import doobie.util.Read +import doobie.util.Write import io.circe.Decoder import io.circe.Encoder import java.sql.ResultSet @@ -75,4 +76,26 @@ object BusinessentitycontactRow { sb.append(Text.DELIMETER) TypoLocalDateTime.text.unsafeEncode(row.modifieddate, sb) } + implicit lazy val write: Write[BusinessentitycontactRow] = new Write[BusinessentitycontactRow]( + puts = List((BusinessentityId.put, Nullability.NoNulls), + (BusinessentityId.put, Nullability.NoNulls), + (ContacttypeId.put, Nullability.NoNulls), + (TypoUUID.put, Nullability.NoNulls), + (TypoLocalDateTime.put, Nullability.NoNulls)), + toList = x => List(x.businessentityid, x.personid, x.contacttypeid, x.rowguid, x.modifieddate), + unsafeSet = (rs, i, a) => { + BusinessentityId.put.unsafeSetNonNullable(rs, i + 0, a.businessentityid) + BusinessentityId.put.unsafeSetNonNullable(rs, i + 1, a.personid) + ContacttypeId.put.unsafeSetNonNullable(rs, i + 2, a.contacttypeid) + TypoUUID.put.unsafeSetNonNullable(rs, i + 3, a.rowguid) + TypoLocalDateTime.put.unsafeSetNonNullable(rs, i + 4, a.modifieddate) + }, + unsafeUpdate = (ps, i, a) => { + BusinessentityId.put.unsafeUpdateNonNullable(ps, i + 0, a.businessentityid) + BusinessentityId.put.unsafeUpdateNonNullable(ps, i + 1, a.personid) + ContacttypeId.put.unsafeUpdateNonNullable(ps, i + 2, a.contacttypeid) + TypoUUID.put.unsafeUpdateNonNullable(ps, i + 3, a.rowguid) + TypoLocalDateTime.put.unsafeUpdateNonNullable(ps, i + 4, a.modifieddate) + } + ) } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/person/contacttype/ContacttypeRepo.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/person/contacttype/ContacttypeRepo.scala index 5d7e93a36d..ab505d336d 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/person/contacttype/ContacttypeRepo.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/person/contacttype/ContacttypeRepo.scala @@ -30,4 +30,7 @@ trait ContacttypeRepo { def update: UpdateBuilder[ContacttypeFields, ContacttypeRow] def update(row: ContacttypeRow): ConnectionIO[Boolean] def upsert(unsaved: ContacttypeRow): ConnectionIO[ContacttypeRow] + def upsertBatch(unsaved: List[ContacttypeRow]): Stream[ConnectionIO, ContacttypeRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Stream[ConnectionIO, ContacttypeRow], batchSize: Int = 10000): ConnectionIO[Int] } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/person/contacttype/ContacttypeRepoImpl.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/person/contacttype/ContacttypeRepoImpl.scala index 9b70f69bb7..851c8063ff 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/person/contacttype/ContacttypeRepoImpl.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/person/contacttype/ContacttypeRepoImpl.scala @@ -10,12 +10,14 @@ package contacttype import adventureworks.customtypes.Defaulted import adventureworks.customtypes.TypoLocalDateTime import adventureworks.public.Name +import cats.instances.list.catsStdInstancesForList import doobie.free.connection.ConnectionIO import doobie.postgres.syntax.FragmentOps import doobie.syntax.SqlInterpolator.SingleFragment.fromWrite import doobie.syntax.string.toSqlInterpolator import doobie.util.Write import doobie.util.fragment.Fragment +import doobie.util.update.Update import fs2.Stream import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder @@ -117,4 +119,31 @@ class ContacttypeRepoImpl extends ContacttypeRepo { returning "contacttypeid", "name", "modifieddate"::text """.query(using ContacttypeRow.read).unique } + override def upsertBatch(unsaved: List[ContacttypeRow]): Stream[ConnectionIO, ContacttypeRow] = { + Update[ContacttypeRow]( + s"""insert into person.contacttype("contacttypeid", "name", "modifieddate") + values (?::int4,?::varchar,?::timestamp) + on conflict ("contacttypeid") + do update set + "name" = EXCLUDED."name", + "modifieddate" = EXCLUDED."modifieddate" + returning "contacttypeid", "name", "modifieddate"::text""" + )(using ContacttypeRow.write) + .updateManyWithGeneratedKeys[ContacttypeRow]("contacttypeid", "name", "modifieddate")(unsaved)(using catsStdInstancesForList, ContacttypeRow.read) + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, ContacttypeRow], batchSize: Int = 10000): ConnectionIO[Int] = { + for { + _ <- sql"create temporary table contacttype_TEMP (like person.contacttype) on commit drop".update.run + _ <- new FragmentOps(sql"""copy contacttype_TEMP("contacttypeid", "name", "modifieddate") from stdin""").copyIn(unsaved, batchSize)(using ContacttypeRow.text) + res <- sql"""insert into person.contacttype("contacttypeid", "name", "modifieddate") + select * from contacttype_TEMP + on conflict ("contacttypeid") + do update set + "name" = EXCLUDED."name", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table contacttype_TEMP;""".update.run + } yield res + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/person/contacttype/ContacttypeRepoMock.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/person/contacttype/ContacttypeRepoMock.scala index 0e86ae9c9c..5eb28e2c59 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/person/contacttype/ContacttypeRepoMock.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/person/contacttype/ContacttypeRepoMock.scala @@ -105,4 +105,23 @@ class ContacttypeRepoMock(toRow: Function1[ContacttypeRowUnsaved, ContacttypeRow unsaved } } + override def upsertBatch(unsaved: List[ContacttypeRow]): Stream[ConnectionIO, ContacttypeRow] = { + Stream.emits { + unsaved.map { row => + map += (row.contacttypeid -> row) + row + } + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, ContacttypeRow], batchSize: Int = 10000): ConnectionIO[Int] = { + unsaved.compile.toList.map { rows => + var num = 0 + rows.foreach { row => + map += (row.contacttypeid -> row) + num += 1 + } + num + } + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/person/contacttype/ContacttypeRow.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/person/contacttype/ContacttypeRow.scala index a834a834db..d50da8fef4 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/person/contacttype/ContacttypeRow.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/person/contacttype/ContacttypeRow.scala @@ -13,6 +13,7 @@ import adventureworks.public.Name import doobie.enumerated.Nullability import doobie.postgres.Text import doobie.util.Read +import doobie.util.Write import io.circe.Decoder import io.circe.Encoder import java.sql.ResultSet @@ -56,4 +57,20 @@ object ContacttypeRow { sb.append(Text.DELIMETER) TypoLocalDateTime.text.unsafeEncode(row.modifieddate, sb) } + implicit lazy val write: Write[ContacttypeRow] = new Write[ContacttypeRow]( + puts = List((ContacttypeId.put, Nullability.NoNulls), + (Name.put, Nullability.NoNulls), + (TypoLocalDateTime.put, Nullability.NoNulls)), + toList = x => List(x.contacttypeid, x.name, x.modifieddate), + unsafeSet = (rs, i, a) => { + ContacttypeId.put.unsafeSetNonNullable(rs, i + 0, a.contacttypeid) + Name.put.unsafeSetNonNullable(rs, i + 1, a.name) + TypoLocalDateTime.put.unsafeSetNonNullable(rs, i + 2, a.modifieddate) + }, + unsafeUpdate = (ps, i, a) => { + ContacttypeId.put.unsafeUpdateNonNullable(ps, i + 0, a.contacttypeid) + Name.put.unsafeUpdateNonNullable(ps, i + 1, a.name) + TypoLocalDateTime.put.unsafeUpdateNonNullable(ps, i + 2, a.modifieddate) + } + ) } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/person/countryregion/CountryregionRepo.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/person/countryregion/CountryregionRepo.scala index b4e96ced88..b642804244 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/person/countryregion/CountryregionRepo.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/person/countryregion/CountryregionRepo.scala @@ -30,4 +30,7 @@ trait CountryregionRepo { def update: UpdateBuilder[CountryregionFields, CountryregionRow] def update(row: CountryregionRow): ConnectionIO[Boolean] def upsert(unsaved: CountryregionRow): ConnectionIO[CountryregionRow] + def upsertBatch(unsaved: List[CountryregionRow]): Stream[ConnectionIO, CountryregionRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Stream[ConnectionIO, CountryregionRow], batchSize: Int = 10000): ConnectionIO[Int] } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/person/countryregion/CountryregionRepoImpl.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/person/countryregion/CountryregionRepoImpl.scala index 920daa6e36..b14d6433e4 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/person/countryregion/CountryregionRepoImpl.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/person/countryregion/CountryregionRepoImpl.scala @@ -10,12 +10,14 @@ package countryregion import adventureworks.customtypes.Defaulted import adventureworks.customtypes.TypoLocalDateTime import adventureworks.public.Name +import cats.instances.list.catsStdInstancesForList import doobie.free.connection.ConnectionIO import doobie.postgres.syntax.FragmentOps import doobie.syntax.SqlInterpolator.SingleFragment.fromWrite import doobie.syntax.string.toSqlInterpolator import doobie.util.Write import doobie.util.fragment.Fragment +import doobie.util.update.Update import fs2.Stream import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder @@ -114,4 +116,31 @@ class CountryregionRepoImpl extends CountryregionRepo { returning "countryregioncode", "name", "modifieddate"::text """.query(using CountryregionRow.read).unique } + override def upsertBatch(unsaved: List[CountryregionRow]): Stream[ConnectionIO, CountryregionRow] = { + Update[CountryregionRow]( + s"""insert into person.countryregion("countryregioncode", "name", "modifieddate") + values (?,?::varchar,?::timestamp) + on conflict ("countryregioncode") + do update set + "name" = EXCLUDED."name", + "modifieddate" = EXCLUDED."modifieddate" + returning "countryregioncode", "name", "modifieddate"::text""" + )(using CountryregionRow.write) + .updateManyWithGeneratedKeys[CountryregionRow]("countryregioncode", "name", "modifieddate")(unsaved)(using catsStdInstancesForList, CountryregionRow.read) + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, CountryregionRow], batchSize: Int = 10000): ConnectionIO[Int] = { + for { + _ <- sql"create temporary table countryregion_TEMP (like person.countryregion) on commit drop".update.run + _ <- new FragmentOps(sql"""copy countryregion_TEMP("countryregioncode", "name", "modifieddate") from stdin""").copyIn(unsaved, batchSize)(using CountryregionRow.text) + res <- sql"""insert into person.countryregion("countryregioncode", "name", "modifieddate") + select * from countryregion_TEMP + on conflict ("countryregioncode") + do update set + "name" = EXCLUDED."name", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table countryregion_TEMP;""".update.run + } yield res + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/person/countryregion/CountryregionRepoMock.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/person/countryregion/CountryregionRepoMock.scala index 8b6584f11c..0d0791daac 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/person/countryregion/CountryregionRepoMock.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/person/countryregion/CountryregionRepoMock.scala @@ -105,4 +105,23 @@ class CountryregionRepoMock(toRow: Function1[CountryregionRowUnsaved, Countryreg unsaved } } + override def upsertBatch(unsaved: List[CountryregionRow]): Stream[ConnectionIO, CountryregionRow] = { + Stream.emits { + unsaved.map { row => + map += (row.countryregioncode -> row) + row + } + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, CountryregionRow], batchSize: Int = 10000): ConnectionIO[Int] = { + unsaved.compile.toList.map { rows => + var num = 0 + rows.foreach { row => + map += (row.countryregioncode -> row) + num += 1 + } + num + } + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/person/countryregion/CountryregionRow.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/person/countryregion/CountryregionRow.scala index 0a640016bc..83d4aa4ccf 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/person/countryregion/CountryregionRow.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/person/countryregion/CountryregionRow.scala @@ -13,6 +13,7 @@ import adventureworks.public.Name import doobie.enumerated.Nullability import doobie.postgres.Text import doobie.util.Read +import doobie.util.Write import io.circe.Decoder import io.circe.Encoder import java.sql.ResultSet @@ -55,4 +56,20 @@ object CountryregionRow { sb.append(Text.DELIMETER) TypoLocalDateTime.text.unsafeEncode(row.modifieddate, sb) } + implicit lazy val write: Write[CountryregionRow] = new Write[CountryregionRow]( + puts = List((CountryregionId.put, Nullability.NoNulls), + (Name.put, Nullability.NoNulls), + (TypoLocalDateTime.put, Nullability.NoNulls)), + toList = x => List(x.countryregioncode, x.name, x.modifieddate), + unsafeSet = (rs, i, a) => { + CountryregionId.put.unsafeSetNonNullable(rs, i + 0, a.countryregioncode) + Name.put.unsafeSetNonNullable(rs, i + 1, a.name) + TypoLocalDateTime.put.unsafeSetNonNullable(rs, i + 2, a.modifieddate) + }, + unsafeUpdate = (ps, i, a) => { + CountryregionId.put.unsafeUpdateNonNullable(ps, i + 0, a.countryregioncode) + Name.put.unsafeUpdateNonNullable(ps, i + 1, a.name) + TypoLocalDateTime.put.unsafeUpdateNonNullable(ps, i + 2, a.modifieddate) + } + ) } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/person/emailaddress/EmailaddressRepo.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/person/emailaddress/EmailaddressRepo.scala index d64e2df5c5..8793946663 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/person/emailaddress/EmailaddressRepo.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/person/emailaddress/EmailaddressRepo.scala @@ -30,4 +30,7 @@ trait EmailaddressRepo { def update: UpdateBuilder[EmailaddressFields, EmailaddressRow] def update(row: EmailaddressRow): ConnectionIO[Boolean] def upsert(unsaved: EmailaddressRow): ConnectionIO[EmailaddressRow] + def upsertBatch(unsaved: List[EmailaddressRow]): Stream[ConnectionIO, EmailaddressRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Stream[ConnectionIO, EmailaddressRow], batchSize: Int = 10000): ConnectionIO[Int] } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/person/emailaddress/EmailaddressRepoImpl.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/person/emailaddress/EmailaddressRepoImpl.scala index 6052d1f2f0..8bf8b0412a 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/person/emailaddress/EmailaddressRepoImpl.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/person/emailaddress/EmailaddressRepoImpl.scala @@ -11,6 +11,7 @@ import adventureworks.customtypes.Defaulted import adventureworks.customtypes.TypoLocalDateTime import adventureworks.customtypes.TypoUUID import adventureworks.person.businessentity.BusinessentityId +import cats.instances.list.catsStdInstancesForList import doobie.free.connection.ConnectionIO import doobie.postgres.syntax.FragmentOps import doobie.syntax.SqlInterpolator.SingleFragment.fromWrite @@ -18,6 +19,7 @@ import doobie.syntax.string.toSqlInterpolator import doobie.util.Write import doobie.util.fragment.Fragment import doobie.util.meta.Meta +import doobie.util.update.Update import fs2.Stream import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder @@ -142,4 +144,33 @@ class EmailaddressRepoImpl extends EmailaddressRepo { returning "businessentityid", "emailaddressid", "emailaddress", "rowguid", "modifieddate"::text """.query(using EmailaddressRow.read).unique } + override def upsertBatch(unsaved: List[EmailaddressRow]): Stream[ConnectionIO, EmailaddressRow] = { + Update[EmailaddressRow]( + s"""insert into person.emailaddress("businessentityid", "emailaddressid", "emailaddress", "rowguid", "modifieddate") + values (?::int4,?::int4,?,?::uuid,?::timestamp) + on conflict ("businessentityid", "emailaddressid") + do update set + "emailaddress" = EXCLUDED."emailaddress", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + returning "businessentityid", "emailaddressid", "emailaddress", "rowguid", "modifieddate"::text""" + )(using EmailaddressRow.write) + .updateManyWithGeneratedKeys[EmailaddressRow]("businessentityid", "emailaddressid", "emailaddress", "rowguid", "modifieddate")(unsaved)(using catsStdInstancesForList, EmailaddressRow.read) + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, EmailaddressRow], batchSize: Int = 10000): ConnectionIO[Int] = { + for { + _ <- sql"create temporary table emailaddress_TEMP (like person.emailaddress) on commit drop".update.run + _ <- new FragmentOps(sql"""copy emailaddress_TEMP("businessentityid", "emailaddressid", "emailaddress", "rowguid", "modifieddate") from stdin""").copyIn(unsaved, batchSize)(using EmailaddressRow.text) + res <- sql"""insert into person.emailaddress("businessentityid", "emailaddressid", "emailaddress", "rowguid", "modifieddate") + select * from emailaddress_TEMP + on conflict ("businessentityid", "emailaddressid") + do update set + "emailaddress" = EXCLUDED."emailaddress", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table emailaddress_TEMP;""".update.run + } yield res + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/person/emailaddress/EmailaddressRepoMock.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/person/emailaddress/EmailaddressRepoMock.scala index d8ffa5e1bc..ca0b8e0987 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/person/emailaddress/EmailaddressRepoMock.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/person/emailaddress/EmailaddressRepoMock.scala @@ -105,4 +105,23 @@ class EmailaddressRepoMock(toRow: Function1[EmailaddressRowUnsaved, Emailaddress unsaved } } + override def upsertBatch(unsaved: List[EmailaddressRow]): Stream[ConnectionIO, EmailaddressRow] = { + Stream.emits { + unsaved.map { row => + map += (row.compositeId -> row) + row + } + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, EmailaddressRow], batchSize: Int = 10000): ConnectionIO[Int] = { + unsaved.compile.toList.map { rows => + var num = 0 + rows.foreach { row => + map += (row.compositeId -> row) + num += 1 + } + num + } + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/person/emailaddress/EmailaddressRow.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/person/emailaddress/EmailaddressRow.scala index 37f5c3206f..ed3de2a666 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/person/emailaddress/EmailaddressRow.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/person/emailaddress/EmailaddressRow.scala @@ -14,6 +14,7 @@ import adventureworks.person.businessentity.BusinessentityId import doobie.enumerated.Nullability import doobie.postgres.Text import doobie.util.Read +import doobie.util.Write import doobie.util.meta.Meta import io.circe.Decoder import io.circe.Encoder @@ -74,4 +75,26 @@ object EmailaddressRow { sb.append(Text.DELIMETER) TypoLocalDateTime.text.unsafeEncode(row.modifieddate, sb) } + implicit lazy val write: Write[EmailaddressRow] = new Write[EmailaddressRow]( + puts = List((BusinessentityId.put, Nullability.NoNulls), + (Meta.IntMeta.put, Nullability.NoNulls), + (Meta.StringMeta.put, Nullability.Nullable), + (TypoUUID.put, Nullability.NoNulls), + (TypoLocalDateTime.put, Nullability.NoNulls)), + toList = x => List(x.businessentityid, x.emailaddressid, x.emailaddress, x.rowguid, x.modifieddate), + unsafeSet = (rs, i, a) => { + BusinessentityId.put.unsafeSetNonNullable(rs, i + 0, a.businessentityid) + Meta.IntMeta.put.unsafeSetNonNullable(rs, i + 1, a.emailaddressid) + Meta.StringMeta.put.unsafeSetNullable(rs, i + 2, a.emailaddress) + TypoUUID.put.unsafeSetNonNullable(rs, i + 3, a.rowguid) + TypoLocalDateTime.put.unsafeSetNonNullable(rs, i + 4, a.modifieddate) + }, + unsafeUpdate = (ps, i, a) => { + BusinessentityId.put.unsafeUpdateNonNullable(ps, i + 0, a.businessentityid) + Meta.IntMeta.put.unsafeUpdateNonNullable(ps, i + 1, a.emailaddressid) + Meta.StringMeta.put.unsafeUpdateNullable(ps, i + 2, a.emailaddress) + TypoUUID.put.unsafeUpdateNonNullable(ps, i + 3, a.rowguid) + TypoLocalDateTime.put.unsafeUpdateNonNullable(ps, i + 4, a.modifieddate) + } + ) } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/person/password/PasswordRepo.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/person/password/PasswordRepo.scala index 3a5d28d899..a5d017e756 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/person/password/PasswordRepo.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/person/password/PasswordRepo.scala @@ -31,4 +31,7 @@ trait PasswordRepo { def update: UpdateBuilder[PasswordFields, PasswordRow] def update(row: PasswordRow): ConnectionIO[Boolean] def upsert(unsaved: PasswordRow): ConnectionIO[PasswordRow] + def upsertBatch(unsaved: List[PasswordRow]): Stream[ConnectionIO, PasswordRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Stream[ConnectionIO, PasswordRow], batchSize: Int = 10000): ConnectionIO[Int] } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/person/password/PasswordRepoImpl.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/person/password/PasswordRepoImpl.scala index 97ae0612d0..7695a3c555 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/person/password/PasswordRepoImpl.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/person/password/PasswordRepoImpl.scala @@ -11,6 +11,7 @@ import adventureworks.customtypes.Defaulted import adventureworks.customtypes.TypoLocalDateTime import adventureworks.customtypes.TypoUUID import adventureworks.person.businessentity.BusinessentityId +import cats.instances.list.catsStdInstancesForList import doobie.free.connection.ConnectionIO import doobie.postgres.syntax.FragmentOps import doobie.syntax.SqlInterpolator.SingleFragment.fromWrite @@ -18,6 +19,7 @@ import doobie.syntax.string.toSqlInterpolator import doobie.util.Write import doobie.util.fragment.Fragment import doobie.util.meta.Meta +import doobie.util.update.Update import fs2.Stream import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder @@ -127,4 +129,35 @@ class PasswordRepoImpl extends PasswordRepo { returning "businessentityid", "passwordhash", "passwordsalt", "rowguid", "modifieddate"::text """.query(using PasswordRow.read).unique } + override def upsertBatch(unsaved: List[PasswordRow]): Stream[ConnectionIO, PasswordRow] = { + Update[PasswordRow]( + s"""insert into person.password("businessentityid", "passwordhash", "passwordsalt", "rowguid", "modifieddate") + values (?::int4,?,?,?::uuid,?::timestamp) + on conflict ("businessentityid") + do update set + "passwordhash" = EXCLUDED."passwordhash", + "passwordsalt" = EXCLUDED."passwordsalt", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + returning "businessentityid", "passwordhash", "passwordsalt", "rowguid", "modifieddate"::text""" + )(using PasswordRow.write) + .updateManyWithGeneratedKeys[PasswordRow]("businessentityid", "passwordhash", "passwordsalt", "rowguid", "modifieddate")(unsaved)(using catsStdInstancesForList, PasswordRow.read) + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, PasswordRow], batchSize: Int = 10000): ConnectionIO[Int] = { + for { + _ <- sql"create temporary table password_TEMP (like person.password) on commit drop".update.run + _ <- new FragmentOps(sql"""copy password_TEMP("businessentityid", "passwordhash", "passwordsalt", "rowguid", "modifieddate") from stdin""").copyIn(unsaved, batchSize)(using PasswordRow.text) + res <- sql"""insert into person.password("businessentityid", "passwordhash", "passwordsalt", "rowguid", "modifieddate") + select * from password_TEMP + on conflict ("businessentityid") + do update set + "passwordhash" = EXCLUDED."passwordhash", + "passwordsalt" = EXCLUDED."passwordsalt", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table password_TEMP;""".update.run + } yield res + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/person/password/PasswordRepoMock.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/person/password/PasswordRepoMock.scala index 7958d68ec8..932570e1ad 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/person/password/PasswordRepoMock.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/person/password/PasswordRepoMock.scala @@ -106,4 +106,23 @@ class PasswordRepoMock(toRow: Function1[PasswordRowUnsaved, PasswordRow], unsaved } } + override def upsertBatch(unsaved: List[PasswordRow]): Stream[ConnectionIO, PasswordRow] = { + Stream.emits { + unsaved.map { row => + map += (row.businessentityid -> row) + row + } + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, PasswordRow], batchSize: Int = 10000): ConnectionIO[Int] = { + unsaved.compile.toList.map { rows => + var num = 0 + rows.foreach { row => + map += (row.businessentityid -> row) + num += 1 + } + num + } + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/person/password/PasswordRow.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/person/password/PasswordRow.scala index 20603b218d..604145d17b 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/person/password/PasswordRow.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/person/password/PasswordRow.scala @@ -14,6 +14,7 @@ import adventureworks.person.businessentity.BusinessentityId import doobie.enumerated.Nullability import doobie.postgres.Text import doobie.util.Read +import doobie.util.Write import doobie.util.meta.Meta import io.circe.Decoder import io.circe.Encoder @@ -69,4 +70,26 @@ object PasswordRow { sb.append(Text.DELIMETER) TypoLocalDateTime.text.unsafeEncode(row.modifieddate, sb) } + implicit lazy val write: Write[PasswordRow] = new Write[PasswordRow]( + puts = List((BusinessentityId.put, Nullability.NoNulls), + (Meta.StringMeta.put, Nullability.NoNulls), + (Meta.StringMeta.put, Nullability.NoNulls), + (TypoUUID.put, Nullability.NoNulls), + (TypoLocalDateTime.put, Nullability.NoNulls)), + toList = x => List(x.businessentityid, x.passwordhash, x.passwordsalt, x.rowguid, x.modifieddate), + unsafeSet = (rs, i, a) => { + BusinessentityId.put.unsafeSetNonNullable(rs, i + 0, a.businessentityid) + Meta.StringMeta.put.unsafeSetNonNullable(rs, i + 1, a.passwordhash) + Meta.StringMeta.put.unsafeSetNonNullable(rs, i + 2, a.passwordsalt) + TypoUUID.put.unsafeSetNonNullable(rs, i + 3, a.rowguid) + TypoLocalDateTime.put.unsafeSetNonNullable(rs, i + 4, a.modifieddate) + }, + unsafeUpdate = (ps, i, a) => { + BusinessentityId.put.unsafeUpdateNonNullable(ps, i + 0, a.businessentityid) + Meta.StringMeta.put.unsafeUpdateNonNullable(ps, i + 1, a.passwordhash) + Meta.StringMeta.put.unsafeUpdateNonNullable(ps, i + 2, a.passwordsalt) + TypoUUID.put.unsafeUpdateNonNullable(ps, i + 3, a.rowguid) + TypoLocalDateTime.put.unsafeUpdateNonNullable(ps, i + 4, a.modifieddate) + } + ) } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/person/person/PersonRepo.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/person/person/PersonRepo.scala index 4fbf1b2a8a..2fd452cc03 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/person/person/PersonRepo.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/person/person/PersonRepo.scala @@ -31,4 +31,7 @@ trait PersonRepo { def update: UpdateBuilder[PersonFields, PersonRow] def update(row: PersonRow): ConnectionIO[Boolean] def upsert(unsaved: PersonRow): ConnectionIO[PersonRow] + def upsertBatch(unsaved: List[PersonRow]): Stream[ConnectionIO, PersonRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Stream[ConnectionIO, PersonRow], batchSize: Int = 10000): ConnectionIO[Int] } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/person/person/PersonRepoImpl.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/person/person/PersonRepoImpl.scala index 67c2319e89..9744f786f8 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/person/person/PersonRepoImpl.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/person/person/PersonRepoImpl.scala @@ -15,6 +15,7 @@ import adventureworks.person.businessentity.BusinessentityId import adventureworks.public.Name import adventureworks.public.NameStyle import adventureworks.userdefined.FirstName +import cats.instances.list.catsStdInstancesForList import doobie.free.connection.ConnectionIO import doobie.postgres.syntax.FragmentOps import doobie.syntax.SqlInterpolator.SingleFragment.fromWrite @@ -22,6 +23,7 @@ import doobie.syntax.string.toSqlInterpolator import doobie.util.Write import doobie.util.fragment.Fragment import doobie.util.meta.Meta +import doobie.util.update.Update import fs2.Stream import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder @@ -169,4 +171,51 @@ class PersonRepoImpl extends PersonRepo { returning "businessentityid", "persontype", "namestyle", "title", "firstname", "middlename", "lastname", "suffix", "emailpromotion", "additionalcontactinfo", "demographics", "rowguid", "modifieddate"::text """.query(using PersonRow.read).unique } + override def upsertBatch(unsaved: List[PersonRow]): Stream[ConnectionIO, PersonRow] = { + Update[PersonRow]( + s"""insert into person.person("businessentityid", "persontype", "namestyle", "title", "firstname", "middlename", "lastname", "suffix", "emailpromotion", "additionalcontactinfo", "demographics", "rowguid", "modifieddate") + values (?::int4,?::bpchar,?::bool,?,?::varchar,?::varchar,?::varchar,?,?::int4,?::xml,?::xml,?::uuid,?::timestamp) + on conflict ("businessentityid") + do update set + "persontype" = EXCLUDED."persontype", + "namestyle" = EXCLUDED."namestyle", + "title" = EXCLUDED."title", + "firstname" = EXCLUDED."firstname", + "middlename" = EXCLUDED."middlename", + "lastname" = EXCLUDED."lastname", + "suffix" = EXCLUDED."suffix", + "emailpromotion" = EXCLUDED."emailpromotion", + "additionalcontactinfo" = EXCLUDED."additionalcontactinfo", + "demographics" = EXCLUDED."demographics", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + returning "businessentityid", "persontype", "namestyle", "title", "firstname", "middlename", "lastname", "suffix", "emailpromotion", "additionalcontactinfo", "demographics", "rowguid", "modifieddate"::text""" + )(using PersonRow.write) + .updateManyWithGeneratedKeys[PersonRow]("businessentityid", "persontype", "namestyle", "title", "firstname", "middlename", "lastname", "suffix", "emailpromotion", "additionalcontactinfo", "demographics", "rowguid", "modifieddate")(unsaved)(using catsStdInstancesForList, PersonRow.read) + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, PersonRow], batchSize: Int = 10000): ConnectionIO[Int] = { + for { + _ <- sql"create temporary table person_TEMP (like person.person) on commit drop".update.run + _ <- new FragmentOps(sql"""copy person_TEMP("businessentityid", "persontype", "namestyle", "title", "firstname", "middlename", "lastname", "suffix", "emailpromotion", "additionalcontactinfo", "demographics", "rowguid", "modifieddate") from stdin""").copyIn(unsaved, batchSize)(using PersonRow.text) + res <- sql"""insert into person.person("businessentityid", "persontype", "namestyle", "title", "firstname", "middlename", "lastname", "suffix", "emailpromotion", "additionalcontactinfo", "demographics", "rowguid", "modifieddate") + select * from person_TEMP + on conflict ("businessentityid") + do update set + "persontype" = EXCLUDED."persontype", + "namestyle" = EXCLUDED."namestyle", + "title" = EXCLUDED."title", + "firstname" = EXCLUDED."firstname", + "middlename" = EXCLUDED."middlename", + "lastname" = EXCLUDED."lastname", + "suffix" = EXCLUDED."suffix", + "emailpromotion" = EXCLUDED."emailpromotion", + "additionalcontactinfo" = EXCLUDED."additionalcontactinfo", + "demographics" = EXCLUDED."demographics", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table person_TEMP;""".update.run + } yield res + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/person/person/PersonRepoMock.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/person/person/PersonRepoMock.scala index 6fb77b058f..5ab392d967 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/person/person/PersonRepoMock.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/person/person/PersonRepoMock.scala @@ -106,4 +106,23 @@ class PersonRepoMock(toRow: Function1[PersonRowUnsaved, PersonRow], unsaved } } + override def upsertBatch(unsaved: List[PersonRow]): Stream[ConnectionIO, PersonRow] = { + Stream.emits { + unsaved.map { row => + map += (row.businessentityid -> row) + row + } + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, PersonRow], batchSize: Int = 10000): ConnectionIO[Int] = { + unsaved.compile.toList.map { rows => + var num = 0 + rows.foreach { row => + map += (row.businessentityid -> row) + num += 1 + } + num + } + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/person/person/PersonRow.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/person/person/PersonRow.scala index 77761b2782..e20a68719e 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/person/person/PersonRow.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/person/person/PersonRow.scala @@ -18,6 +18,7 @@ import adventureworks.userdefined.FirstName import doobie.enumerated.Nullability import doobie.postgres.Text import doobie.util.Read +import doobie.util.Write import doobie.util.meta.Meta import io.circe.Decoder import io.circe.Encoder @@ -126,4 +127,50 @@ object PersonRow { sb.append(Text.DELIMETER) TypoLocalDateTime.text.unsafeEncode(row.modifieddate, sb) } + implicit lazy val write: Write[PersonRow] = new Write[PersonRow]( + puts = List((BusinessentityId.put, Nullability.NoNulls), + (Meta.StringMeta.put, Nullability.NoNulls), + (NameStyle.put, Nullability.NoNulls), + (Meta.StringMeta.put, Nullability.Nullable), + (/* user-picked */ FirstName.put, Nullability.NoNulls), + (Name.put, Nullability.Nullable), + (Name.put, Nullability.NoNulls), + (Meta.StringMeta.put, Nullability.Nullable), + (Meta.IntMeta.put, Nullability.NoNulls), + (TypoXml.put, Nullability.Nullable), + (TypoXml.put, Nullability.Nullable), + (TypoUUID.put, Nullability.NoNulls), + (TypoLocalDateTime.put, Nullability.NoNulls)), + toList = x => List(x.businessentityid, x.persontype, x.namestyle, x.title, x.firstname, x.middlename, x.lastname, x.suffix, x.emailpromotion, x.additionalcontactinfo, x.demographics, x.rowguid, x.modifieddate), + unsafeSet = (rs, i, a) => { + BusinessentityId.put.unsafeSetNonNullable(rs, i + 0, a.businessentityid) + Meta.StringMeta.put.unsafeSetNonNullable(rs, i + 1, a.persontype) + NameStyle.put.unsafeSetNonNullable(rs, i + 2, a.namestyle) + Meta.StringMeta.put.unsafeSetNullable(rs, i + 3, a.title) + /* user-picked */ FirstName.put.unsafeSetNonNullable(rs, i + 4, a.firstname) + Name.put.unsafeSetNullable(rs, i + 5, a.middlename) + Name.put.unsafeSetNonNullable(rs, i + 6, a.lastname) + Meta.StringMeta.put.unsafeSetNullable(rs, i + 7, a.suffix) + Meta.IntMeta.put.unsafeSetNonNullable(rs, i + 8, a.emailpromotion) + TypoXml.put.unsafeSetNullable(rs, i + 9, a.additionalcontactinfo) + TypoXml.put.unsafeSetNullable(rs, i + 10, a.demographics) + TypoUUID.put.unsafeSetNonNullable(rs, i + 11, a.rowguid) + TypoLocalDateTime.put.unsafeSetNonNullable(rs, i + 12, a.modifieddate) + }, + unsafeUpdate = (ps, i, a) => { + BusinessentityId.put.unsafeUpdateNonNullable(ps, i + 0, a.businessentityid) + Meta.StringMeta.put.unsafeUpdateNonNullable(ps, i + 1, a.persontype) + NameStyle.put.unsafeUpdateNonNullable(ps, i + 2, a.namestyle) + Meta.StringMeta.put.unsafeUpdateNullable(ps, i + 3, a.title) + /* user-picked */ FirstName.put.unsafeUpdateNonNullable(ps, i + 4, a.firstname) + Name.put.unsafeUpdateNullable(ps, i + 5, a.middlename) + Name.put.unsafeUpdateNonNullable(ps, i + 6, a.lastname) + Meta.StringMeta.put.unsafeUpdateNullable(ps, i + 7, a.suffix) + Meta.IntMeta.put.unsafeUpdateNonNullable(ps, i + 8, a.emailpromotion) + TypoXml.put.unsafeUpdateNullable(ps, i + 9, a.additionalcontactinfo) + TypoXml.put.unsafeUpdateNullable(ps, i + 10, a.demographics) + TypoUUID.put.unsafeUpdateNonNullable(ps, i + 11, a.rowguid) + TypoLocalDateTime.put.unsafeUpdateNonNullable(ps, i + 12, a.modifieddate) + } + ) } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/person/personphone/PersonphoneRepo.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/person/personphone/PersonphoneRepo.scala index 0cecdafb0c..f6d010f6e0 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/person/personphone/PersonphoneRepo.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/person/personphone/PersonphoneRepo.scala @@ -30,4 +30,7 @@ trait PersonphoneRepo { def update: UpdateBuilder[PersonphoneFields, PersonphoneRow] def update(row: PersonphoneRow): ConnectionIO[Boolean] def upsert(unsaved: PersonphoneRow): ConnectionIO[PersonphoneRow] + def upsertBatch(unsaved: List[PersonphoneRow]): Stream[ConnectionIO, PersonphoneRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Stream[ConnectionIO, PersonphoneRow], batchSize: Int = 10000): ConnectionIO[Int] } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/person/personphone/PersonphoneRepoImpl.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/person/personphone/PersonphoneRepoImpl.scala index a9fecce74e..e3ca1c18cc 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/person/personphone/PersonphoneRepoImpl.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/person/personphone/PersonphoneRepoImpl.scala @@ -12,12 +12,14 @@ import adventureworks.customtypes.TypoLocalDateTime import adventureworks.person.businessentity.BusinessentityId import adventureworks.person.phonenumbertype.PhonenumbertypeId import adventureworks.public.Phone +import cats.instances.list.catsStdInstancesForList import doobie.free.connection.ConnectionIO import doobie.postgres.syntax.FragmentOps import doobie.syntax.SqlInterpolator.SingleFragment.fromWrite import doobie.syntax.string.toSqlInterpolator import doobie.util.Write import doobie.util.fragment.Fragment +import doobie.util.update.Update import fs2.Stream import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder @@ -132,4 +134,29 @@ class PersonphoneRepoImpl extends PersonphoneRepo { returning "businessentityid", "phonenumber", "phonenumbertypeid", "modifieddate"::text """.query(using PersonphoneRow.read).unique } + override def upsertBatch(unsaved: List[PersonphoneRow]): Stream[ConnectionIO, PersonphoneRow] = { + Update[PersonphoneRow]( + s"""insert into person.personphone("businessentityid", "phonenumber", "phonenumbertypeid", "modifieddate") + values (?::int4,?::varchar,?::int4,?::timestamp) + on conflict ("businessentityid", "phonenumber", "phonenumbertypeid") + do update set + "modifieddate" = EXCLUDED."modifieddate" + returning "businessentityid", "phonenumber", "phonenumbertypeid", "modifieddate"::text""" + )(using PersonphoneRow.write) + .updateManyWithGeneratedKeys[PersonphoneRow]("businessentityid", "phonenumber", "phonenumbertypeid", "modifieddate")(unsaved)(using catsStdInstancesForList, PersonphoneRow.read) + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, PersonphoneRow], batchSize: Int = 10000): ConnectionIO[Int] = { + for { + _ <- sql"create temporary table personphone_TEMP (like person.personphone) on commit drop".update.run + _ <- new FragmentOps(sql"""copy personphone_TEMP("businessentityid", "phonenumber", "phonenumbertypeid", "modifieddate") from stdin""").copyIn(unsaved, batchSize)(using PersonphoneRow.text) + res <- sql"""insert into person.personphone("businessentityid", "phonenumber", "phonenumbertypeid", "modifieddate") + select * from personphone_TEMP + on conflict ("businessentityid", "phonenumber", "phonenumbertypeid") + do update set + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table personphone_TEMP;""".update.run + } yield res + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/person/personphone/PersonphoneRepoMock.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/person/personphone/PersonphoneRepoMock.scala index bb3bc4b6d9..aefd0e44ce 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/person/personphone/PersonphoneRepoMock.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/person/personphone/PersonphoneRepoMock.scala @@ -105,4 +105,23 @@ class PersonphoneRepoMock(toRow: Function1[PersonphoneRowUnsaved, PersonphoneRow unsaved } } + override def upsertBatch(unsaved: List[PersonphoneRow]): Stream[ConnectionIO, PersonphoneRow] = { + Stream.emits { + unsaved.map { row => + map += (row.compositeId -> row) + row + } + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, PersonphoneRow], batchSize: Int = 10000): ConnectionIO[Int] = { + unsaved.compile.toList.map { rows => + var num = 0 + rows.foreach { row => + map += (row.compositeId -> row) + num += 1 + } + num + } + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/person/personphone/PersonphoneRow.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/person/personphone/PersonphoneRow.scala index e642cdbc73..84a9f03f4e 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/person/personphone/PersonphoneRow.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/person/personphone/PersonphoneRow.scala @@ -15,6 +15,7 @@ import adventureworks.public.Phone import doobie.enumerated.Nullability import doobie.postgres.Text import doobie.util.Read +import doobie.util.Write import io.circe.Decoder import io.circe.Encoder import java.sql.ResultSet @@ -68,4 +69,23 @@ object PersonphoneRow { sb.append(Text.DELIMETER) TypoLocalDateTime.text.unsafeEncode(row.modifieddate, sb) } + implicit lazy val write: Write[PersonphoneRow] = new Write[PersonphoneRow]( + puts = List((BusinessentityId.put, Nullability.NoNulls), + (Phone.put, Nullability.NoNulls), + (PhonenumbertypeId.put, Nullability.NoNulls), + (TypoLocalDateTime.put, Nullability.NoNulls)), + toList = x => List(x.businessentityid, x.phonenumber, x.phonenumbertypeid, x.modifieddate), + unsafeSet = (rs, i, a) => { + BusinessentityId.put.unsafeSetNonNullable(rs, i + 0, a.businessentityid) + Phone.put.unsafeSetNonNullable(rs, i + 1, a.phonenumber) + PhonenumbertypeId.put.unsafeSetNonNullable(rs, i + 2, a.phonenumbertypeid) + TypoLocalDateTime.put.unsafeSetNonNullable(rs, i + 3, a.modifieddate) + }, + unsafeUpdate = (ps, i, a) => { + BusinessentityId.put.unsafeUpdateNonNullable(ps, i + 0, a.businessentityid) + Phone.put.unsafeUpdateNonNullable(ps, i + 1, a.phonenumber) + PhonenumbertypeId.put.unsafeUpdateNonNullable(ps, i + 2, a.phonenumbertypeid) + TypoLocalDateTime.put.unsafeUpdateNonNullable(ps, i + 3, a.modifieddate) + } + ) } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/person/phonenumbertype/PhonenumbertypeRepo.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/person/phonenumbertype/PhonenumbertypeRepo.scala index 18f72dd853..e7ba170091 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/person/phonenumbertype/PhonenumbertypeRepo.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/person/phonenumbertype/PhonenumbertypeRepo.scala @@ -30,4 +30,7 @@ trait PhonenumbertypeRepo { def update: UpdateBuilder[PhonenumbertypeFields, PhonenumbertypeRow] def update(row: PhonenumbertypeRow): ConnectionIO[Boolean] def upsert(unsaved: PhonenumbertypeRow): ConnectionIO[PhonenumbertypeRow] + def upsertBatch(unsaved: List[PhonenumbertypeRow]): Stream[ConnectionIO, PhonenumbertypeRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Stream[ConnectionIO, PhonenumbertypeRow], batchSize: Int = 10000): ConnectionIO[Int] } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/person/phonenumbertype/PhonenumbertypeRepoImpl.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/person/phonenumbertype/PhonenumbertypeRepoImpl.scala index c7972999b1..35b4e44d9d 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/person/phonenumbertype/PhonenumbertypeRepoImpl.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/person/phonenumbertype/PhonenumbertypeRepoImpl.scala @@ -10,12 +10,14 @@ package phonenumbertype import adventureworks.customtypes.Defaulted import adventureworks.customtypes.TypoLocalDateTime import adventureworks.public.Name +import cats.instances.list.catsStdInstancesForList import doobie.free.connection.ConnectionIO import doobie.postgres.syntax.FragmentOps import doobie.syntax.SqlInterpolator.SingleFragment.fromWrite import doobie.syntax.string.toSqlInterpolator import doobie.util.Write import doobie.util.fragment.Fragment +import doobie.util.update.Update import fs2.Stream import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder @@ -117,4 +119,31 @@ class PhonenumbertypeRepoImpl extends PhonenumbertypeRepo { returning "phonenumbertypeid", "name", "modifieddate"::text """.query(using PhonenumbertypeRow.read).unique } + override def upsertBatch(unsaved: List[PhonenumbertypeRow]): Stream[ConnectionIO, PhonenumbertypeRow] = { + Update[PhonenumbertypeRow]( + s"""insert into person.phonenumbertype("phonenumbertypeid", "name", "modifieddate") + values (?::int4,?::varchar,?::timestamp) + on conflict ("phonenumbertypeid") + do update set + "name" = EXCLUDED."name", + "modifieddate" = EXCLUDED."modifieddate" + returning "phonenumbertypeid", "name", "modifieddate"::text""" + )(using PhonenumbertypeRow.write) + .updateManyWithGeneratedKeys[PhonenumbertypeRow]("phonenumbertypeid", "name", "modifieddate")(unsaved)(using catsStdInstancesForList, PhonenumbertypeRow.read) + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, PhonenumbertypeRow], batchSize: Int = 10000): ConnectionIO[Int] = { + for { + _ <- sql"create temporary table phonenumbertype_TEMP (like person.phonenumbertype) on commit drop".update.run + _ <- new FragmentOps(sql"""copy phonenumbertype_TEMP("phonenumbertypeid", "name", "modifieddate") from stdin""").copyIn(unsaved, batchSize)(using PhonenumbertypeRow.text) + res <- sql"""insert into person.phonenumbertype("phonenumbertypeid", "name", "modifieddate") + select * from phonenumbertype_TEMP + on conflict ("phonenumbertypeid") + do update set + "name" = EXCLUDED."name", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table phonenumbertype_TEMP;""".update.run + } yield res + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/person/phonenumbertype/PhonenumbertypeRepoMock.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/person/phonenumbertype/PhonenumbertypeRepoMock.scala index 5253a2b2fa..7b2e9627ca 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/person/phonenumbertype/PhonenumbertypeRepoMock.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/person/phonenumbertype/PhonenumbertypeRepoMock.scala @@ -105,4 +105,23 @@ class PhonenumbertypeRepoMock(toRow: Function1[PhonenumbertypeRowUnsaved, Phonen unsaved } } + override def upsertBatch(unsaved: List[PhonenumbertypeRow]): Stream[ConnectionIO, PhonenumbertypeRow] = { + Stream.emits { + unsaved.map { row => + map += (row.phonenumbertypeid -> row) + row + } + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, PhonenumbertypeRow], batchSize: Int = 10000): ConnectionIO[Int] = { + unsaved.compile.toList.map { rows => + var num = 0 + rows.foreach { row => + map += (row.phonenumbertypeid -> row) + num += 1 + } + num + } + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/person/phonenumbertype/PhonenumbertypeRow.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/person/phonenumbertype/PhonenumbertypeRow.scala index 31373d96fc..f0ac7984a7 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/person/phonenumbertype/PhonenumbertypeRow.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/person/phonenumbertype/PhonenumbertypeRow.scala @@ -13,6 +13,7 @@ import adventureworks.public.Name import doobie.enumerated.Nullability import doobie.postgres.Text import doobie.util.Read +import doobie.util.Write import io.circe.Decoder import io.circe.Encoder import java.sql.ResultSet @@ -56,4 +57,20 @@ object PhonenumbertypeRow { sb.append(Text.DELIMETER) TypoLocalDateTime.text.unsafeEncode(row.modifieddate, sb) } + implicit lazy val write: Write[PhonenumbertypeRow] = new Write[PhonenumbertypeRow]( + puts = List((PhonenumbertypeId.put, Nullability.NoNulls), + (Name.put, Nullability.NoNulls), + (TypoLocalDateTime.put, Nullability.NoNulls)), + toList = x => List(x.phonenumbertypeid, x.name, x.modifieddate), + unsafeSet = (rs, i, a) => { + PhonenumbertypeId.put.unsafeSetNonNullable(rs, i + 0, a.phonenumbertypeid) + Name.put.unsafeSetNonNullable(rs, i + 1, a.name) + TypoLocalDateTime.put.unsafeSetNonNullable(rs, i + 2, a.modifieddate) + }, + unsafeUpdate = (ps, i, a) => { + PhonenumbertypeId.put.unsafeUpdateNonNullable(ps, i + 0, a.phonenumbertypeid) + Name.put.unsafeUpdateNonNullable(ps, i + 1, a.name) + TypoLocalDateTime.put.unsafeUpdateNonNullable(ps, i + 2, a.modifieddate) + } + ) } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/person/stateprovince/StateprovinceRepo.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/person/stateprovince/StateprovinceRepo.scala index eeba397b3f..2cddc6f17e 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/person/stateprovince/StateprovinceRepo.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/person/stateprovince/StateprovinceRepo.scala @@ -30,4 +30,7 @@ trait StateprovinceRepo { def update: UpdateBuilder[StateprovinceFields, StateprovinceRow] def update(row: StateprovinceRow): ConnectionIO[Boolean] def upsert(unsaved: StateprovinceRow): ConnectionIO[StateprovinceRow] + def upsertBatch(unsaved: List[StateprovinceRow]): Stream[ConnectionIO, StateprovinceRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Stream[ConnectionIO, StateprovinceRow], batchSize: Int = 10000): ConnectionIO[Int] } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/person/stateprovince/StateprovinceRepoImpl.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/person/stateprovince/StateprovinceRepoImpl.scala index 7320523091..1b63adf346 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/person/stateprovince/StateprovinceRepoImpl.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/person/stateprovince/StateprovinceRepoImpl.scala @@ -14,6 +14,7 @@ import adventureworks.person.countryregion.CountryregionId import adventureworks.public.Flag import adventureworks.public.Name import adventureworks.sales.salesterritory.SalesterritoryId +import cats.instances.list.catsStdInstancesForList import doobie.free.connection.ConnectionIO import doobie.postgres.syntax.FragmentOps import doobie.syntax.SqlInterpolator.SingleFragment.fromWrite @@ -21,6 +22,7 @@ import doobie.syntax.string.toSqlInterpolator import doobie.util.Write import doobie.util.fragment.Fragment import doobie.util.meta.Meta +import doobie.util.update.Update import fs2.Stream import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder @@ -148,4 +150,41 @@ class StateprovinceRepoImpl extends StateprovinceRepo { returning "stateprovinceid", "stateprovincecode", "countryregioncode", "isonlystateprovinceflag", "name", "territoryid", "rowguid", "modifieddate"::text """.query(using StateprovinceRow.read).unique } + override def upsertBatch(unsaved: List[StateprovinceRow]): Stream[ConnectionIO, StateprovinceRow] = { + Update[StateprovinceRow]( + s"""insert into person.stateprovince("stateprovinceid", "stateprovincecode", "countryregioncode", "isonlystateprovinceflag", "name", "territoryid", "rowguid", "modifieddate") + values (?::int4,?::bpchar,?,?::bool,?::varchar,?::int4,?::uuid,?::timestamp) + on conflict ("stateprovinceid") + do update set + "stateprovincecode" = EXCLUDED."stateprovincecode", + "countryregioncode" = EXCLUDED."countryregioncode", + "isonlystateprovinceflag" = EXCLUDED."isonlystateprovinceflag", + "name" = EXCLUDED."name", + "territoryid" = EXCLUDED."territoryid", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + returning "stateprovinceid", "stateprovincecode", "countryregioncode", "isonlystateprovinceflag", "name", "territoryid", "rowguid", "modifieddate"::text""" + )(using StateprovinceRow.write) + .updateManyWithGeneratedKeys[StateprovinceRow]("stateprovinceid", "stateprovincecode", "countryregioncode", "isonlystateprovinceflag", "name", "territoryid", "rowguid", "modifieddate")(unsaved)(using catsStdInstancesForList, StateprovinceRow.read) + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, StateprovinceRow], batchSize: Int = 10000): ConnectionIO[Int] = { + for { + _ <- sql"create temporary table stateprovince_TEMP (like person.stateprovince) on commit drop".update.run + _ <- new FragmentOps(sql"""copy stateprovince_TEMP("stateprovinceid", "stateprovincecode", "countryregioncode", "isonlystateprovinceflag", "name", "territoryid", "rowguid", "modifieddate") from stdin""").copyIn(unsaved, batchSize)(using StateprovinceRow.text) + res <- sql"""insert into person.stateprovince("stateprovinceid", "stateprovincecode", "countryregioncode", "isonlystateprovinceflag", "name", "territoryid", "rowguid", "modifieddate") + select * from stateprovince_TEMP + on conflict ("stateprovinceid") + do update set + "stateprovincecode" = EXCLUDED."stateprovincecode", + "countryregioncode" = EXCLUDED."countryregioncode", + "isonlystateprovinceflag" = EXCLUDED."isonlystateprovinceflag", + "name" = EXCLUDED."name", + "territoryid" = EXCLUDED."territoryid", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table stateprovince_TEMP;""".update.run + } yield res + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/person/stateprovince/StateprovinceRepoMock.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/person/stateprovince/StateprovinceRepoMock.scala index fac80aed17..0deaae9e2b 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/person/stateprovince/StateprovinceRepoMock.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/person/stateprovince/StateprovinceRepoMock.scala @@ -105,4 +105,23 @@ class StateprovinceRepoMock(toRow: Function1[StateprovinceRowUnsaved, Stateprovi unsaved } } + override def upsertBatch(unsaved: List[StateprovinceRow]): Stream[ConnectionIO, StateprovinceRow] = { + Stream.emits { + unsaved.map { row => + map += (row.stateprovinceid -> row) + row + } + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, StateprovinceRow], batchSize: Int = 10000): ConnectionIO[Int] = { + unsaved.compile.toList.map { rows => + var num = 0 + rows.foreach { row => + map += (row.stateprovinceid -> row) + num += 1 + } + num + } + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/person/stateprovince/StateprovinceRow.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/person/stateprovince/StateprovinceRow.scala index de4c2f3189..65fb438764 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/person/stateprovince/StateprovinceRow.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/person/stateprovince/StateprovinceRow.scala @@ -17,6 +17,7 @@ import adventureworks.sales.salesterritory.SalesterritoryId import doobie.enumerated.Nullability import doobie.postgres.Text import doobie.util.Read +import doobie.util.Write import doobie.util.meta.Meta import io.circe.Decoder import io.circe.Encoder @@ -94,4 +95,35 @@ object StateprovinceRow { sb.append(Text.DELIMETER) TypoLocalDateTime.text.unsafeEncode(row.modifieddate, sb) } + implicit lazy val write: Write[StateprovinceRow] = new Write[StateprovinceRow]( + puts = List((StateprovinceId.put, Nullability.NoNulls), + (Meta.StringMeta.put, Nullability.NoNulls), + (CountryregionId.put, Nullability.NoNulls), + (Flag.put, Nullability.NoNulls), + (Name.put, Nullability.NoNulls), + (SalesterritoryId.put, Nullability.NoNulls), + (TypoUUID.put, Nullability.NoNulls), + (TypoLocalDateTime.put, Nullability.NoNulls)), + toList = x => List(x.stateprovinceid, x.stateprovincecode, x.countryregioncode, x.isonlystateprovinceflag, x.name, x.territoryid, x.rowguid, x.modifieddate), + unsafeSet = (rs, i, a) => { + StateprovinceId.put.unsafeSetNonNullable(rs, i + 0, a.stateprovinceid) + Meta.StringMeta.put.unsafeSetNonNullable(rs, i + 1, a.stateprovincecode) + CountryregionId.put.unsafeSetNonNullable(rs, i + 2, a.countryregioncode) + Flag.put.unsafeSetNonNullable(rs, i + 3, a.isonlystateprovinceflag) + Name.put.unsafeSetNonNullable(rs, i + 4, a.name) + SalesterritoryId.put.unsafeSetNonNullable(rs, i + 5, a.territoryid) + TypoUUID.put.unsafeSetNonNullable(rs, i + 6, a.rowguid) + TypoLocalDateTime.put.unsafeSetNonNullable(rs, i + 7, a.modifieddate) + }, + unsafeUpdate = (ps, i, a) => { + StateprovinceId.put.unsafeUpdateNonNullable(ps, i + 0, a.stateprovinceid) + Meta.StringMeta.put.unsafeUpdateNonNullable(ps, i + 1, a.stateprovincecode) + CountryregionId.put.unsafeUpdateNonNullable(ps, i + 2, a.countryregioncode) + Flag.put.unsafeUpdateNonNullable(ps, i + 3, a.isonlystateprovinceflag) + Name.put.unsafeUpdateNonNullable(ps, i + 4, a.name) + SalesterritoryId.put.unsafeUpdateNonNullable(ps, i + 5, a.territoryid) + TypoUUID.put.unsafeUpdateNonNullable(ps, i + 6, a.rowguid) + TypoLocalDateTime.put.unsafeUpdateNonNullable(ps, i + 7, a.modifieddate) + } + ) } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/billofmaterials/BillofmaterialsRepo.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/billofmaterials/BillofmaterialsRepo.scala index 89ee581832..b7ef4bd78c 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/billofmaterials/BillofmaterialsRepo.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/billofmaterials/BillofmaterialsRepo.scala @@ -30,4 +30,7 @@ trait BillofmaterialsRepo { def update: UpdateBuilder[BillofmaterialsFields, BillofmaterialsRow] def update(row: BillofmaterialsRow): ConnectionIO[Boolean] def upsert(unsaved: BillofmaterialsRow): ConnectionIO[BillofmaterialsRow] + def upsertBatch(unsaved: List[BillofmaterialsRow]): Stream[ConnectionIO, BillofmaterialsRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Stream[ConnectionIO, BillofmaterialsRow], batchSize: Int = 10000): ConnectionIO[Int] } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/billofmaterials/BillofmaterialsRepoImpl.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/billofmaterials/BillofmaterialsRepoImpl.scala index 01c65f7c3c..efb879c162 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/billofmaterials/BillofmaterialsRepoImpl.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/billofmaterials/BillofmaterialsRepoImpl.scala @@ -12,6 +12,7 @@ import adventureworks.customtypes.TypoLocalDateTime import adventureworks.customtypes.TypoShort import adventureworks.production.product.ProductId import adventureworks.production.unitmeasure.UnitmeasureId +import cats.instances.list.catsStdInstancesForList import doobie.free.connection.ConnectionIO import doobie.postgres.syntax.FragmentOps import doobie.syntax.SqlInterpolator.SingleFragment.fromWrite @@ -19,6 +20,7 @@ import doobie.syntax.string.toSqlInterpolator import doobie.util.Write import doobie.util.fragment.Fragment import doobie.util.meta.Meta +import doobie.util.update.Update import fs2.Stream import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder @@ -150,4 +152,43 @@ class BillofmaterialsRepoImpl extends BillofmaterialsRepo { returning "billofmaterialsid", "productassemblyid", "componentid", "startdate"::text, "enddate"::text, "unitmeasurecode", "bomlevel", "perassemblyqty", "modifieddate"::text """.query(using BillofmaterialsRow.read).unique } + override def upsertBatch(unsaved: List[BillofmaterialsRow]): Stream[ConnectionIO, BillofmaterialsRow] = { + Update[BillofmaterialsRow]( + s"""insert into production.billofmaterials("billofmaterialsid", "productassemblyid", "componentid", "startdate", "enddate", "unitmeasurecode", "bomlevel", "perassemblyqty", "modifieddate") + values (?::int4,?::int4,?::int4,?::timestamp,?::timestamp,?::bpchar,?::int2,?::numeric,?::timestamp) + on conflict ("billofmaterialsid") + do update set + "productassemblyid" = EXCLUDED."productassemblyid", + "componentid" = EXCLUDED."componentid", + "startdate" = EXCLUDED."startdate", + "enddate" = EXCLUDED."enddate", + "unitmeasurecode" = EXCLUDED."unitmeasurecode", + "bomlevel" = EXCLUDED."bomlevel", + "perassemblyqty" = EXCLUDED."perassemblyqty", + "modifieddate" = EXCLUDED."modifieddate" + returning "billofmaterialsid", "productassemblyid", "componentid", "startdate"::text, "enddate"::text, "unitmeasurecode", "bomlevel", "perassemblyqty", "modifieddate"::text""" + )(using BillofmaterialsRow.write) + .updateManyWithGeneratedKeys[BillofmaterialsRow]("billofmaterialsid", "productassemblyid", "componentid", "startdate", "enddate", "unitmeasurecode", "bomlevel", "perassemblyqty", "modifieddate")(unsaved)(using catsStdInstancesForList, BillofmaterialsRow.read) + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, BillofmaterialsRow], batchSize: Int = 10000): ConnectionIO[Int] = { + for { + _ <- sql"create temporary table billofmaterials_TEMP (like production.billofmaterials) on commit drop".update.run + _ <- new FragmentOps(sql"""copy billofmaterials_TEMP("billofmaterialsid", "productassemblyid", "componentid", "startdate", "enddate", "unitmeasurecode", "bomlevel", "perassemblyqty", "modifieddate") from stdin""").copyIn(unsaved, batchSize)(using BillofmaterialsRow.text) + res <- sql"""insert into production.billofmaterials("billofmaterialsid", "productassemblyid", "componentid", "startdate", "enddate", "unitmeasurecode", "bomlevel", "perassemblyqty", "modifieddate") + select * from billofmaterials_TEMP + on conflict ("billofmaterialsid") + do update set + "productassemblyid" = EXCLUDED."productassemblyid", + "componentid" = EXCLUDED."componentid", + "startdate" = EXCLUDED."startdate", + "enddate" = EXCLUDED."enddate", + "unitmeasurecode" = EXCLUDED."unitmeasurecode", + "bomlevel" = EXCLUDED."bomlevel", + "perassemblyqty" = EXCLUDED."perassemblyqty", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table billofmaterials_TEMP;""".update.run + } yield res + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/billofmaterials/BillofmaterialsRepoMock.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/billofmaterials/BillofmaterialsRepoMock.scala index 5640ab96c8..d79efe0daa 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/billofmaterials/BillofmaterialsRepoMock.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/billofmaterials/BillofmaterialsRepoMock.scala @@ -105,4 +105,23 @@ class BillofmaterialsRepoMock(toRow: Function1[BillofmaterialsRowUnsaved, Billof unsaved } } + override def upsertBatch(unsaved: List[BillofmaterialsRow]): Stream[ConnectionIO, BillofmaterialsRow] = { + Stream.emits { + unsaved.map { row => + map += (row.billofmaterialsid -> row) + row + } + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, BillofmaterialsRow], batchSize: Int = 10000): ConnectionIO[Int] = { + unsaved.compile.toList.map { rows => + var num = 0 + rows.foreach { row => + map += (row.billofmaterialsid -> row) + num += 1 + } + num + } + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/billofmaterials/BillofmaterialsRow.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/billofmaterials/BillofmaterialsRow.scala index e8429d3abf..868216fa1c 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/billofmaterials/BillofmaterialsRow.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/billofmaterials/BillofmaterialsRow.scala @@ -15,6 +15,7 @@ import adventureworks.production.unitmeasure.UnitmeasureId import doobie.enumerated.Nullability import doobie.postgres.Text import doobie.util.Read +import doobie.util.Write import doobie.util.meta.Meta import io.circe.Decoder import io.circe.Encoder @@ -108,4 +109,38 @@ object BillofmaterialsRow { sb.append(Text.DELIMETER) TypoLocalDateTime.text.unsafeEncode(row.modifieddate, sb) } + implicit lazy val write: Write[BillofmaterialsRow] = new Write[BillofmaterialsRow]( + puts = List((Meta.IntMeta.put, Nullability.NoNulls), + (ProductId.put, Nullability.Nullable), + (ProductId.put, Nullability.NoNulls), + (TypoLocalDateTime.put, Nullability.NoNulls), + (TypoLocalDateTime.put, Nullability.Nullable), + (UnitmeasureId.put, Nullability.NoNulls), + (TypoShort.put, Nullability.NoNulls), + (Meta.ScalaBigDecimalMeta.put, Nullability.NoNulls), + (TypoLocalDateTime.put, Nullability.NoNulls)), + toList = x => List(x.billofmaterialsid, x.productassemblyid, x.componentid, x.startdate, x.enddate, x.unitmeasurecode, x.bomlevel, x.perassemblyqty, x.modifieddate), + unsafeSet = (rs, i, a) => { + Meta.IntMeta.put.unsafeSetNonNullable(rs, i + 0, a.billofmaterialsid) + ProductId.put.unsafeSetNullable(rs, i + 1, a.productassemblyid) + ProductId.put.unsafeSetNonNullable(rs, i + 2, a.componentid) + TypoLocalDateTime.put.unsafeSetNonNullable(rs, i + 3, a.startdate) + TypoLocalDateTime.put.unsafeSetNullable(rs, i + 4, a.enddate) + UnitmeasureId.put.unsafeSetNonNullable(rs, i + 5, a.unitmeasurecode) + TypoShort.put.unsafeSetNonNullable(rs, i + 6, a.bomlevel) + Meta.ScalaBigDecimalMeta.put.unsafeSetNonNullable(rs, i + 7, a.perassemblyqty) + TypoLocalDateTime.put.unsafeSetNonNullable(rs, i + 8, a.modifieddate) + }, + unsafeUpdate = (ps, i, a) => { + Meta.IntMeta.put.unsafeUpdateNonNullable(ps, i + 0, a.billofmaterialsid) + ProductId.put.unsafeUpdateNullable(ps, i + 1, a.productassemblyid) + ProductId.put.unsafeUpdateNonNullable(ps, i + 2, a.componentid) + TypoLocalDateTime.put.unsafeUpdateNonNullable(ps, i + 3, a.startdate) + TypoLocalDateTime.put.unsafeUpdateNullable(ps, i + 4, a.enddate) + UnitmeasureId.put.unsafeUpdateNonNullable(ps, i + 5, a.unitmeasurecode) + TypoShort.put.unsafeUpdateNonNullable(ps, i + 6, a.bomlevel) + Meta.ScalaBigDecimalMeta.put.unsafeUpdateNonNullable(ps, i + 7, a.perassemblyqty) + TypoLocalDateTime.put.unsafeUpdateNonNullable(ps, i + 8, a.modifieddate) + } + ) } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/culture/CultureRepo.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/culture/CultureRepo.scala index 58a1d85c3d..984d40b8d5 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/culture/CultureRepo.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/culture/CultureRepo.scala @@ -30,4 +30,7 @@ trait CultureRepo { def update: UpdateBuilder[CultureFields, CultureRow] def update(row: CultureRow): ConnectionIO[Boolean] def upsert(unsaved: CultureRow): ConnectionIO[CultureRow] + def upsertBatch(unsaved: List[CultureRow]): Stream[ConnectionIO, CultureRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Stream[ConnectionIO, CultureRow], batchSize: Int = 10000): ConnectionIO[Int] } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/culture/CultureRepoImpl.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/culture/CultureRepoImpl.scala index a58da3339a..7008fcdc11 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/culture/CultureRepoImpl.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/culture/CultureRepoImpl.scala @@ -10,12 +10,14 @@ package culture import adventureworks.customtypes.Defaulted import adventureworks.customtypes.TypoLocalDateTime import adventureworks.public.Name +import cats.instances.list.catsStdInstancesForList import doobie.free.connection.ConnectionIO import doobie.postgres.syntax.FragmentOps import doobie.syntax.SqlInterpolator.SingleFragment.fromWrite import doobie.syntax.string.toSqlInterpolator import doobie.util.Write import doobie.util.fragment.Fragment +import doobie.util.update.Update import fs2.Stream import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder @@ -114,4 +116,31 @@ class CultureRepoImpl extends CultureRepo { returning "cultureid", "name", "modifieddate"::text """.query(using CultureRow.read).unique } + override def upsertBatch(unsaved: List[CultureRow]): Stream[ConnectionIO, CultureRow] = { + Update[CultureRow]( + s"""insert into production.culture("cultureid", "name", "modifieddate") + values (?::bpchar,?::varchar,?::timestamp) + on conflict ("cultureid") + do update set + "name" = EXCLUDED."name", + "modifieddate" = EXCLUDED."modifieddate" + returning "cultureid", "name", "modifieddate"::text""" + )(using CultureRow.write) + .updateManyWithGeneratedKeys[CultureRow]("cultureid", "name", "modifieddate")(unsaved)(using catsStdInstancesForList, CultureRow.read) + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, CultureRow], batchSize: Int = 10000): ConnectionIO[Int] = { + for { + _ <- sql"create temporary table culture_TEMP (like production.culture) on commit drop".update.run + _ <- new FragmentOps(sql"""copy culture_TEMP("cultureid", "name", "modifieddate") from stdin""").copyIn(unsaved, batchSize)(using CultureRow.text) + res <- sql"""insert into production.culture("cultureid", "name", "modifieddate") + select * from culture_TEMP + on conflict ("cultureid") + do update set + "name" = EXCLUDED."name", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table culture_TEMP;""".update.run + } yield res + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/culture/CultureRepoMock.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/culture/CultureRepoMock.scala index 52aaccb3dd..7d5e188ab2 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/culture/CultureRepoMock.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/culture/CultureRepoMock.scala @@ -105,4 +105,23 @@ class CultureRepoMock(toRow: Function1[CultureRowUnsaved, CultureRow], unsaved } } + override def upsertBatch(unsaved: List[CultureRow]): Stream[ConnectionIO, CultureRow] = { + Stream.emits { + unsaved.map { row => + map += (row.cultureid -> row) + row + } + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, CultureRow], batchSize: Int = 10000): ConnectionIO[Int] = { + unsaved.compile.toList.map { rows => + var num = 0 + rows.foreach { row => + map += (row.cultureid -> row) + num += 1 + } + num + } + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/culture/CultureRow.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/culture/CultureRow.scala index 0849fe0607..eae0873980 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/culture/CultureRow.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/culture/CultureRow.scala @@ -13,6 +13,7 @@ import adventureworks.public.Name import doobie.enumerated.Nullability import doobie.postgres.Text import doobie.util.Read +import doobie.util.Write import io.circe.Decoder import io.circe.Encoder import java.sql.ResultSet @@ -55,4 +56,20 @@ object CultureRow { sb.append(Text.DELIMETER) TypoLocalDateTime.text.unsafeEncode(row.modifieddate, sb) } + implicit lazy val write: Write[CultureRow] = new Write[CultureRow]( + puts = List((CultureId.put, Nullability.NoNulls), + (Name.put, Nullability.NoNulls), + (TypoLocalDateTime.put, Nullability.NoNulls)), + toList = x => List(x.cultureid, x.name, x.modifieddate), + unsafeSet = (rs, i, a) => { + CultureId.put.unsafeSetNonNullable(rs, i + 0, a.cultureid) + Name.put.unsafeSetNonNullable(rs, i + 1, a.name) + TypoLocalDateTime.put.unsafeSetNonNullable(rs, i + 2, a.modifieddate) + }, + unsafeUpdate = (ps, i, a) => { + CultureId.put.unsafeUpdateNonNullable(ps, i + 0, a.cultureid) + Name.put.unsafeUpdateNonNullable(ps, i + 1, a.name) + TypoLocalDateTime.put.unsafeUpdateNonNullable(ps, i + 2, a.modifieddate) + } + ) } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/document/DocumentRepo.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/document/DocumentRepo.scala index 463a4d0879..694a5f48d5 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/document/DocumentRepo.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/document/DocumentRepo.scala @@ -32,4 +32,7 @@ trait DocumentRepo { def update: UpdateBuilder[DocumentFields, DocumentRow] def update(row: DocumentRow): ConnectionIO[Boolean] def upsert(unsaved: DocumentRow): ConnectionIO[DocumentRow] + def upsertBatch(unsaved: List[DocumentRow]): Stream[ConnectionIO, DocumentRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Stream[ConnectionIO, DocumentRow], batchSize: Int = 10000): ConnectionIO[Int] } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/document/DocumentRepoImpl.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/document/DocumentRepoImpl.scala index 447c9fef23..b254478794 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/document/DocumentRepoImpl.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/document/DocumentRepoImpl.scala @@ -14,6 +14,7 @@ import adventureworks.customtypes.TypoShort import adventureworks.customtypes.TypoUUID import adventureworks.person.businessentity.BusinessentityId import adventureworks.public.Flag +import cats.instances.list.catsStdInstancesForList import doobie.free.connection.ConnectionIO import doobie.postgres.syntax.FragmentOps import doobie.syntax.SqlInterpolator.SingleFragment.fromWrite @@ -21,6 +22,7 @@ import doobie.syntax.string.toSqlInterpolator import doobie.util.Write import doobie.util.fragment.Fragment import doobie.util.meta.Meta +import doobie.util.update.Update import fs2.Stream import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder @@ -177,4 +179,51 @@ class DocumentRepoImpl extends DocumentRepo { returning "title", "owner", "folderflag", "filename", "fileextension", "revision", "changenumber", "status", "documentsummary", "document", "rowguid", "modifieddate"::text, "documentnode" """.query(using DocumentRow.read).unique } + override def upsertBatch(unsaved: List[DocumentRow]): Stream[ConnectionIO, DocumentRow] = { + Update[DocumentRow]( + s"""insert into production.document("title", "owner", "folderflag", "filename", "fileextension", "revision", "changenumber", "status", "documentsummary", "document", "rowguid", "modifieddate", "documentnode") + values (?,?::int4,?::bool,?,?,?::bpchar,?::int4,?::int2,?,?::bytea,?::uuid,?::timestamp,?) + on conflict ("documentnode") + do update set + "title" = EXCLUDED."title", + "owner" = EXCLUDED."owner", + "folderflag" = EXCLUDED."folderflag", + "filename" = EXCLUDED."filename", + "fileextension" = EXCLUDED."fileextension", + "revision" = EXCLUDED."revision", + "changenumber" = EXCLUDED."changenumber", + "status" = EXCLUDED."status", + "documentsummary" = EXCLUDED."documentsummary", + "document" = EXCLUDED."document", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + returning "title", "owner", "folderflag", "filename", "fileextension", "revision", "changenumber", "status", "documentsummary", "document", "rowguid", "modifieddate"::text, "documentnode"""" + )(using DocumentRow.write) + .updateManyWithGeneratedKeys[DocumentRow]("title", "owner", "folderflag", "filename", "fileextension", "revision", "changenumber", "status", "documentsummary", "document", "rowguid", "modifieddate", "documentnode")(unsaved)(using catsStdInstancesForList, DocumentRow.read) + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, DocumentRow], batchSize: Int = 10000): ConnectionIO[Int] = { + for { + _ <- sql"create temporary table document_TEMP (like production.document) on commit drop".update.run + _ <- new FragmentOps(sql"""copy document_TEMP("title", "owner", "folderflag", "filename", "fileextension", "revision", "changenumber", "status", "documentsummary", "document", "rowguid", "modifieddate", "documentnode") from stdin""").copyIn(unsaved, batchSize)(using DocumentRow.text) + res <- sql"""insert into production.document("title", "owner", "folderflag", "filename", "fileextension", "revision", "changenumber", "status", "documentsummary", "document", "rowguid", "modifieddate", "documentnode") + select * from document_TEMP + on conflict ("documentnode") + do update set + "title" = EXCLUDED."title", + "owner" = EXCLUDED."owner", + "folderflag" = EXCLUDED."folderflag", + "filename" = EXCLUDED."filename", + "fileextension" = EXCLUDED."fileextension", + "revision" = EXCLUDED."revision", + "changenumber" = EXCLUDED."changenumber", + "status" = EXCLUDED."status", + "documentsummary" = EXCLUDED."documentsummary", + "document" = EXCLUDED."document", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table document_TEMP;""".update.run + } yield res + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/document/DocumentRepoMock.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/document/DocumentRepoMock.scala index 270536c084..51b5cc7314 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/document/DocumentRepoMock.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/document/DocumentRepoMock.scala @@ -109,4 +109,23 @@ class DocumentRepoMock(toRow: Function1[DocumentRowUnsaved, DocumentRow], unsaved } } + override def upsertBatch(unsaved: List[DocumentRow]): Stream[ConnectionIO, DocumentRow] = { + Stream.emits { + unsaved.map { row => + map += (row.documentnode -> row) + row + } + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, DocumentRow], batchSize: Int = 10000): ConnectionIO[Int] = { + unsaved.compile.toList.map { rows => + var num = 0 + rows.foreach { row => + map += (row.documentnode -> row) + num += 1 + } + num + } + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/document/DocumentRow.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/document/DocumentRow.scala index 8f15a5fcd7..99269ec784 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/document/DocumentRow.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/document/DocumentRow.scala @@ -17,6 +17,7 @@ import adventureworks.public.Flag import doobie.enumerated.Nullability import doobie.postgres.Text import doobie.util.Read +import doobie.util.Write import doobie.util.meta.Meta import io.circe.Decoder import io.circe.Encoder @@ -126,4 +127,50 @@ object DocumentRow { sb.append(Text.DELIMETER) DocumentId.text.unsafeEncode(row.documentnode, sb) } + implicit lazy val write: Write[DocumentRow] = new Write[DocumentRow]( + puts = List((Meta.StringMeta.put, Nullability.NoNulls), + (BusinessentityId.put, Nullability.NoNulls), + (Flag.put, Nullability.NoNulls), + (Meta.StringMeta.put, Nullability.NoNulls), + (Meta.StringMeta.put, Nullability.Nullable), + (Meta.StringMeta.put, Nullability.NoNulls), + (Meta.IntMeta.put, Nullability.NoNulls), + (TypoShort.put, Nullability.NoNulls), + (Meta.StringMeta.put, Nullability.Nullable), + (TypoBytea.put, Nullability.Nullable), + (TypoUUID.put, Nullability.NoNulls), + (TypoLocalDateTime.put, Nullability.NoNulls), + (DocumentId.put, Nullability.NoNulls)), + toList = x => List(x.title, x.owner, x.folderflag, x.filename, x.fileextension, x.revision, x.changenumber, x.status, x.documentsummary, x.document, x.rowguid, x.modifieddate, x.documentnode), + unsafeSet = (rs, i, a) => { + Meta.StringMeta.put.unsafeSetNonNullable(rs, i + 0, a.title) + BusinessentityId.put.unsafeSetNonNullable(rs, i + 1, a.owner) + Flag.put.unsafeSetNonNullable(rs, i + 2, a.folderflag) + Meta.StringMeta.put.unsafeSetNonNullable(rs, i + 3, a.filename) + Meta.StringMeta.put.unsafeSetNullable(rs, i + 4, a.fileextension) + Meta.StringMeta.put.unsafeSetNonNullable(rs, i + 5, a.revision) + Meta.IntMeta.put.unsafeSetNonNullable(rs, i + 6, a.changenumber) + TypoShort.put.unsafeSetNonNullable(rs, i + 7, a.status) + Meta.StringMeta.put.unsafeSetNullable(rs, i + 8, a.documentsummary) + TypoBytea.put.unsafeSetNullable(rs, i + 9, a.document) + TypoUUID.put.unsafeSetNonNullable(rs, i + 10, a.rowguid) + TypoLocalDateTime.put.unsafeSetNonNullable(rs, i + 11, a.modifieddate) + DocumentId.put.unsafeSetNonNullable(rs, i + 12, a.documentnode) + }, + unsafeUpdate = (ps, i, a) => { + Meta.StringMeta.put.unsafeUpdateNonNullable(ps, i + 0, a.title) + BusinessentityId.put.unsafeUpdateNonNullable(ps, i + 1, a.owner) + Flag.put.unsafeUpdateNonNullable(ps, i + 2, a.folderflag) + Meta.StringMeta.put.unsafeUpdateNonNullable(ps, i + 3, a.filename) + Meta.StringMeta.put.unsafeUpdateNullable(ps, i + 4, a.fileextension) + Meta.StringMeta.put.unsafeUpdateNonNullable(ps, i + 5, a.revision) + Meta.IntMeta.put.unsafeUpdateNonNullable(ps, i + 6, a.changenumber) + TypoShort.put.unsafeUpdateNonNullable(ps, i + 7, a.status) + Meta.StringMeta.put.unsafeUpdateNullable(ps, i + 8, a.documentsummary) + TypoBytea.put.unsafeUpdateNullable(ps, i + 9, a.document) + TypoUUID.put.unsafeUpdateNonNullable(ps, i + 10, a.rowguid) + TypoLocalDateTime.put.unsafeUpdateNonNullable(ps, i + 11, a.modifieddate) + DocumentId.put.unsafeUpdateNonNullable(ps, i + 12, a.documentnode) + } + ) } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/illustration/IllustrationRepo.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/illustration/IllustrationRepo.scala index 7e513c17c4..790cef1844 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/illustration/IllustrationRepo.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/illustration/IllustrationRepo.scala @@ -30,4 +30,7 @@ trait IllustrationRepo { def update: UpdateBuilder[IllustrationFields, IllustrationRow] def update(row: IllustrationRow): ConnectionIO[Boolean] def upsert(unsaved: IllustrationRow): ConnectionIO[IllustrationRow] + def upsertBatch(unsaved: List[IllustrationRow]): Stream[ConnectionIO, IllustrationRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Stream[ConnectionIO, IllustrationRow], batchSize: Int = 10000): ConnectionIO[Int] } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/illustration/IllustrationRepoImpl.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/illustration/IllustrationRepoImpl.scala index 488fb755b5..0a508b1508 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/illustration/IllustrationRepoImpl.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/illustration/IllustrationRepoImpl.scala @@ -10,12 +10,14 @@ package illustration import adventureworks.customtypes.Defaulted import adventureworks.customtypes.TypoLocalDateTime import adventureworks.customtypes.TypoXml +import cats.instances.list.catsStdInstancesForList import doobie.free.connection.ConnectionIO import doobie.postgres.syntax.FragmentOps import doobie.syntax.SqlInterpolator.SingleFragment.fromWrite import doobie.syntax.string.toSqlInterpolator import doobie.util.Write import doobie.util.fragment.Fragment +import doobie.util.update.Update import fs2.Stream import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder @@ -117,4 +119,31 @@ class IllustrationRepoImpl extends IllustrationRepo { returning "illustrationid", "diagram", "modifieddate"::text """.query(using IllustrationRow.read).unique } + override def upsertBatch(unsaved: List[IllustrationRow]): Stream[ConnectionIO, IllustrationRow] = { + Update[IllustrationRow]( + s"""insert into production.illustration("illustrationid", "diagram", "modifieddate") + values (?::int4,?::xml,?::timestamp) + on conflict ("illustrationid") + do update set + "diagram" = EXCLUDED."diagram", + "modifieddate" = EXCLUDED."modifieddate" + returning "illustrationid", "diagram", "modifieddate"::text""" + )(using IllustrationRow.write) + .updateManyWithGeneratedKeys[IllustrationRow]("illustrationid", "diagram", "modifieddate")(unsaved)(using catsStdInstancesForList, IllustrationRow.read) + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, IllustrationRow], batchSize: Int = 10000): ConnectionIO[Int] = { + for { + _ <- sql"create temporary table illustration_TEMP (like production.illustration) on commit drop".update.run + _ <- new FragmentOps(sql"""copy illustration_TEMP("illustrationid", "diagram", "modifieddate") from stdin""").copyIn(unsaved, batchSize)(using IllustrationRow.text) + res <- sql"""insert into production.illustration("illustrationid", "diagram", "modifieddate") + select * from illustration_TEMP + on conflict ("illustrationid") + do update set + "diagram" = EXCLUDED."diagram", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table illustration_TEMP;""".update.run + } yield res + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/illustration/IllustrationRepoMock.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/illustration/IllustrationRepoMock.scala index 8925569c55..635eb931b6 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/illustration/IllustrationRepoMock.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/illustration/IllustrationRepoMock.scala @@ -105,4 +105,23 @@ class IllustrationRepoMock(toRow: Function1[IllustrationRowUnsaved, Illustration unsaved } } + override def upsertBatch(unsaved: List[IllustrationRow]): Stream[ConnectionIO, IllustrationRow] = { + Stream.emits { + unsaved.map { row => + map += (row.illustrationid -> row) + row + } + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, IllustrationRow], batchSize: Int = 10000): ConnectionIO[Int] = { + unsaved.compile.toList.map { rows => + var num = 0 + rows.foreach { row => + map += (row.illustrationid -> row) + num += 1 + } + num + } + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/illustration/IllustrationRow.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/illustration/IllustrationRow.scala index 2587464740..90975d9052 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/illustration/IllustrationRow.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/illustration/IllustrationRow.scala @@ -13,6 +13,7 @@ import adventureworks.customtypes.TypoXml import doobie.enumerated.Nullability import doobie.postgres.Text import doobie.util.Read +import doobie.util.Write import io.circe.Decoder import io.circe.Encoder import java.sql.ResultSet @@ -56,4 +57,20 @@ object IllustrationRow { sb.append(Text.DELIMETER) TypoLocalDateTime.text.unsafeEncode(row.modifieddate, sb) } + implicit lazy val write: Write[IllustrationRow] = new Write[IllustrationRow]( + puts = List((IllustrationId.put, Nullability.NoNulls), + (TypoXml.put, Nullability.Nullable), + (TypoLocalDateTime.put, Nullability.NoNulls)), + toList = x => List(x.illustrationid, x.diagram, x.modifieddate), + unsafeSet = (rs, i, a) => { + IllustrationId.put.unsafeSetNonNullable(rs, i + 0, a.illustrationid) + TypoXml.put.unsafeSetNullable(rs, i + 1, a.diagram) + TypoLocalDateTime.put.unsafeSetNonNullable(rs, i + 2, a.modifieddate) + }, + unsafeUpdate = (ps, i, a) => { + IllustrationId.put.unsafeUpdateNonNullable(ps, i + 0, a.illustrationid) + TypoXml.put.unsafeUpdateNullable(ps, i + 1, a.diagram) + TypoLocalDateTime.put.unsafeUpdateNonNullable(ps, i + 2, a.modifieddate) + } + ) } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/location/LocationRepo.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/location/LocationRepo.scala index e86826bc17..13ed69247c 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/location/LocationRepo.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/location/LocationRepo.scala @@ -30,4 +30,7 @@ trait LocationRepo { def update: UpdateBuilder[LocationFields, LocationRow] def update(row: LocationRow): ConnectionIO[Boolean] def upsert(unsaved: LocationRow): ConnectionIO[LocationRow] + def upsertBatch(unsaved: List[LocationRow]): Stream[ConnectionIO, LocationRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Stream[ConnectionIO, LocationRow], batchSize: Int = 10000): ConnectionIO[Int] } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/location/LocationRepoImpl.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/location/LocationRepoImpl.scala index c1d8f5d8f8..e48b20a1fb 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/location/LocationRepoImpl.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/location/LocationRepoImpl.scala @@ -10,6 +10,7 @@ package location import adventureworks.customtypes.Defaulted import adventureworks.customtypes.TypoLocalDateTime import adventureworks.public.Name +import cats.instances.list.catsStdInstancesForList import doobie.free.connection.ConnectionIO import doobie.postgres.syntax.FragmentOps import doobie.syntax.SqlInterpolator.SingleFragment.fromWrite @@ -17,6 +18,7 @@ import doobie.syntax.string.toSqlInterpolator import doobie.util.Write import doobie.util.fragment.Fragment import doobie.util.meta.Meta +import doobie.util.update.Update import fs2.Stream import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder @@ -132,4 +134,35 @@ class LocationRepoImpl extends LocationRepo { returning "locationid", "name", "costrate", "availability", "modifieddate"::text """.query(using LocationRow.read).unique } + override def upsertBatch(unsaved: List[LocationRow]): Stream[ConnectionIO, LocationRow] = { + Update[LocationRow]( + s"""insert into production.location("locationid", "name", "costrate", "availability", "modifieddate") + values (?::int4,?::varchar,?::numeric,?::numeric,?::timestamp) + on conflict ("locationid") + do update set + "name" = EXCLUDED."name", + "costrate" = EXCLUDED."costrate", + "availability" = EXCLUDED."availability", + "modifieddate" = EXCLUDED."modifieddate" + returning "locationid", "name", "costrate", "availability", "modifieddate"::text""" + )(using LocationRow.write) + .updateManyWithGeneratedKeys[LocationRow]("locationid", "name", "costrate", "availability", "modifieddate")(unsaved)(using catsStdInstancesForList, LocationRow.read) + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, LocationRow], batchSize: Int = 10000): ConnectionIO[Int] = { + for { + _ <- sql"create temporary table location_TEMP (like production.location) on commit drop".update.run + _ <- new FragmentOps(sql"""copy location_TEMP("locationid", "name", "costrate", "availability", "modifieddate") from stdin""").copyIn(unsaved, batchSize)(using LocationRow.text) + res <- sql"""insert into production.location("locationid", "name", "costrate", "availability", "modifieddate") + select * from location_TEMP + on conflict ("locationid") + do update set + "name" = EXCLUDED."name", + "costrate" = EXCLUDED."costrate", + "availability" = EXCLUDED."availability", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table location_TEMP;""".update.run + } yield res + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/location/LocationRepoMock.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/location/LocationRepoMock.scala index 7337b0d6e1..7042d1e665 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/location/LocationRepoMock.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/location/LocationRepoMock.scala @@ -105,4 +105,23 @@ class LocationRepoMock(toRow: Function1[LocationRowUnsaved, LocationRow], unsaved } } + override def upsertBatch(unsaved: List[LocationRow]): Stream[ConnectionIO, LocationRow] = { + Stream.emits { + unsaved.map { row => + map += (row.locationid -> row) + row + } + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, LocationRow], batchSize: Int = 10000): ConnectionIO[Int] = { + unsaved.compile.toList.map { rows => + var num = 0 + rows.foreach { row => + map += (row.locationid -> row) + num += 1 + } + num + } + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/location/LocationRow.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/location/LocationRow.scala index efc3273e1f..63fc662d84 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/location/LocationRow.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/location/LocationRow.scala @@ -13,6 +13,7 @@ import adventureworks.public.Name import doobie.enumerated.Nullability import doobie.postgres.Text import doobie.util.Read +import doobie.util.Write import doobie.util.meta.Meta import io.circe.Decoder import io.circe.Encoder @@ -73,4 +74,26 @@ object LocationRow { sb.append(Text.DELIMETER) TypoLocalDateTime.text.unsafeEncode(row.modifieddate, sb) } + implicit lazy val write: Write[LocationRow] = new Write[LocationRow]( + puts = List((LocationId.put, Nullability.NoNulls), + (Name.put, Nullability.NoNulls), + (Meta.ScalaBigDecimalMeta.put, Nullability.NoNulls), + (Meta.ScalaBigDecimalMeta.put, Nullability.NoNulls), + (TypoLocalDateTime.put, Nullability.NoNulls)), + toList = x => List(x.locationid, x.name, x.costrate, x.availability, x.modifieddate), + unsafeSet = (rs, i, a) => { + LocationId.put.unsafeSetNonNullable(rs, i + 0, a.locationid) + Name.put.unsafeSetNonNullable(rs, i + 1, a.name) + Meta.ScalaBigDecimalMeta.put.unsafeSetNonNullable(rs, i + 2, a.costrate) + Meta.ScalaBigDecimalMeta.put.unsafeSetNonNullable(rs, i + 3, a.availability) + TypoLocalDateTime.put.unsafeSetNonNullable(rs, i + 4, a.modifieddate) + }, + unsafeUpdate = (ps, i, a) => { + LocationId.put.unsafeUpdateNonNullable(ps, i + 0, a.locationid) + Name.put.unsafeUpdateNonNullable(ps, i + 1, a.name) + Meta.ScalaBigDecimalMeta.put.unsafeUpdateNonNullable(ps, i + 2, a.costrate) + Meta.ScalaBigDecimalMeta.put.unsafeUpdateNonNullable(ps, i + 3, a.availability) + TypoLocalDateTime.put.unsafeUpdateNonNullable(ps, i + 4, a.modifieddate) + } + ) } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/product/ProductRepo.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/product/ProductRepo.scala index af68a87206..e97d9992a4 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/product/ProductRepo.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/product/ProductRepo.scala @@ -30,4 +30,7 @@ trait ProductRepo { def update: UpdateBuilder[ProductFields, ProductRow] def update(row: ProductRow): ConnectionIO[Boolean] def upsert(unsaved: ProductRow): ConnectionIO[ProductRow] + def upsertBatch(unsaved: List[ProductRow]): Stream[ConnectionIO, ProductRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Stream[ConnectionIO, ProductRow], batchSize: Int = 10000): ConnectionIO[Int] } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/product/ProductRepoImpl.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/product/ProductRepoImpl.scala index 7e0ad5f73b..594c578215 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/product/ProductRepoImpl.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/product/ProductRepoImpl.scala @@ -16,6 +16,7 @@ import adventureworks.production.productsubcategory.ProductsubcategoryId import adventureworks.production.unitmeasure.UnitmeasureId import adventureworks.public.Flag import adventureworks.public.Name +import cats.instances.list.catsStdInstancesForList import doobie.free.connection.ConnectionIO import doobie.postgres.syntax.FragmentOps import doobie.syntax.SqlInterpolator.SingleFragment.fromWrite @@ -23,6 +24,7 @@ import doobie.syntax.string.toSqlInterpolator import doobie.util.Write import doobie.util.fragment.Fragment import doobie.util.meta.Meta +import doobie.util.update.Update import fs2.Stream import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder @@ -221,4 +223,75 @@ class ProductRepoImpl extends ProductRepo { returning "productid", "name", "productnumber", "makeflag", "finishedgoodsflag", "color", "safetystocklevel", "reorderpoint", "standardcost", "listprice", "size", "sizeunitmeasurecode", "weightunitmeasurecode", "weight", "daystomanufacture", "productline", "class", "style", "productsubcategoryid", "productmodelid", "sellstartdate"::text, "sellenddate"::text, "discontinueddate"::text, "rowguid", "modifieddate"::text """.query(using ProductRow.read).unique } + override def upsertBatch(unsaved: List[ProductRow]): Stream[ConnectionIO, ProductRow] = { + Update[ProductRow]( + s"""insert into production.product("productid", "name", "productnumber", "makeflag", "finishedgoodsflag", "color", "safetystocklevel", "reorderpoint", "standardcost", "listprice", "size", "sizeunitmeasurecode", "weightunitmeasurecode", "weight", "daystomanufacture", "productline", "class", "style", "productsubcategoryid", "productmodelid", "sellstartdate", "sellenddate", "discontinueddate", "rowguid", "modifieddate") + values (?::int4,?::varchar,?,?::bool,?::bool,?,?::int2,?::int2,?::numeric,?::numeric,?,?::bpchar,?::bpchar,?::numeric,?::int4,?::bpchar,?::bpchar,?::bpchar,?::int4,?::int4,?::timestamp,?::timestamp,?::timestamp,?::uuid,?::timestamp) + on conflict ("productid") + do update set + "name" = EXCLUDED."name", + "productnumber" = EXCLUDED."productnumber", + "makeflag" = EXCLUDED."makeflag", + "finishedgoodsflag" = EXCLUDED."finishedgoodsflag", + "color" = EXCLUDED."color", + "safetystocklevel" = EXCLUDED."safetystocklevel", + "reorderpoint" = EXCLUDED."reorderpoint", + "standardcost" = EXCLUDED."standardcost", + "listprice" = EXCLUDED."listprice", + "size" = EXCLUDED."size", + "sizeunitmeasurecode" = EXCLUDED."sizeunitmeasurecode", + "weightunitmeasurecode" = EXCLUDED."weightunitmeasurecode", + "weight" = EXCLUDED."weight", + "daystomanufacture" = EXCLUDED."daystomanufacture", + "productline" = EXCLUDED."productline", + "class" = EXCLUDED."class", + "style" = EXCLUDED."style", + "productsubcategoryid" = EXCLUDED."productsubcategoryid", + "productmodelid" = EXCLUDED."productmodelid", + "sellstartdate" = EXCLUDED."sellstartdate", + "sellenddate" = EXCLUDED."sellenddate", + "discontinueddate" = EXCLUDED."discontinueddate", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + returning "productid", "name", "productnumber", "makeflag", "finishedgoodsflag", "color", "safetystocklevel", "reorderpoint", "standardcost", "listprice", "size", "sizeunitmeasurecode", "weightunitmeasurecode", "weight", "daystomanufacture", "productline", "class", "style", "productsubcategoryid", "productmodelid", "sellstartdate"::text, "sellenddate"::text, "discontinueddate"::text, "rowguid", "modifieddate"::text""" + )(using ProductRow.write) + .updateManyWithGeneratedKeys[ProductRow]("productid", "name", "productnumber", "makeflag", "finishedgoodsflag", "color", "safetystocklevel", "reorderpoint", "standardcost", "listprice", "size", "sizeunitmeasurecode", "weightunitmeasurecode", "weight", "daystomanufacture", "productline", "class", "style", "productsubcategoryid", "productmodelid", "sellstartdate", "sellenddate", "discontinueddate", "rowguid", "modifieddate")(unsaved)(using catsStdInstancesForList, ProductRow.read) + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, ProductRow], batchSize: Int = 10000): ConnectionIO[Int] = { + for { + _ <- sql"create temporary table product_TEMP (like production.product) on commit drop".update.run + _ <- new FragmentOps(sql"""copy product_TEMP("productid", "name", "productnumber", "makeflag", "finishedgoodsflag", "color", "safetystocklevel", "reorderpoint", "standardcost", "listprice", "size", "sizeunitmeasurecode", "weightunitmeasurecode", "weight", "daystomanufacture", "productline", "class", "style", "productsubcategoryid", "productmodelid", "sellstartdate", "sellenddate", "discontinueddate", "rowguid", "modifieddate") from stdin""").copyIn(unsaved, batchSize)(using ProductRow.text) + res <- sql"""insert into production.product("productid", "name", "productnumber", "makeflag", "finishedgoodsflag", "color", "safetystocklevel", "reorderpoint", "standardcost", "listprice", "size", "sizeunitmeasurecode", "weightunitmeasurecode", "weight", "daystomanufacture", "productline", "class", "style", "productsubcategoryid", "productmodelid", "sellstartdate", "sellenddate", "discontinueddate", "rowguid", "modifieddate") + select * from product_TEMP + on conflict ("productid") + do update set + "name" = EXCLUDED."name", + "productnumber" = EXCLUDED."productnumber", + "makeflag" = EXCLUDED."makeflag", + "finishedgoodsflag" = EXCLUDED."finishedgoodsflag", + "color" = EXCLUDED."color", + "safetystocklevel" = EXCLUDED."safetystocklevel", + "reorderpoint" = EXCLUDED."reorderpoint", + "standardcost" = EXCLUDED."standardcost", + "listprice" = EXCLUDED."listprice", + "size" = EXCLUDED."size", + "sizeunitmeasurecode" = EXCLUDED."sizeunitmeasurecode", + "weightunitmeasurecode" = EXCLUDED."weightunitmeasurecode", + "weight" = EXCLUDED."weight", + "daystomanufacture" = EXCLUDED."daystomanufacture", + "productline" = EXCLUDED."productline", + "class" = EXCLUDED."class", + "style" = EXCLUDED."style", + "productsubcategoryid" = EXCLUDED."productsubcategoryid", + "productmodelid" = EXCLUDED."productmodelid", + "sellstartdate" = EXCLUDED."sellstartdate", + "sellenddate" = EXCLUDED."sellenddate", + "discontinueddate" = EXCLUDED."discontinueddate", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table product_TEMP;""".update.run + } yield res + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/product/ProductRepoMock.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/product/ProductRepoMock.scala index e60b5715b9..92c26703b1 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/product/ProductRepoMock.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/product/ProductRepoMock.scala @@ -105,4 +105,23 @@ class ProductRepoMock(toRow: Function1[ProductRowUnsaved, ProductRow], unsaved } } + override def upsertBatch(unsaved: List[ProductRow]): Stream[ConnectionIO, ProductRow] = { + Stream.emits { + unsaved.map { row => + map += (row.productid -> row) + row + } + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, ProductRow], batchSize: Int = 10000): ConnectionIO[Int] = { + unsaved.compile.toList.map { rows => + var num = 0 + rows.foreach { row => + map += (row.productid -> row) + num += 1 + } + num + } + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/product/ProductRow.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/product/ProductRow.scala index 7ab660d0ec..bda24fd71a 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/product/ProductRow.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/product/ProductRow.scala @@ -19,6 +19,7 @@ import adventureworks.public.Name import doobie.enumerated.Nullability import doobie.postgres.Text import doobie.util.Read +import doobie.util.Write import doobie.util.meta.Meta import io.circe.Decoder import io.circe.DecodingFailure @@ -278,4 +279,86 @@ object ProductRow { sb.append(Text.DELIMETER) TypoLocalDateTime.text.unsafeEncode(row.modifieddate, sb) } + implicit lazy val write: Write[ProductRow] = new Write[ProductRow]( + puts = List((ProductId.put, Nullability.NoNulls), + (Name.put, Nullability.NoNulls), + (Meta.StringMeta.put, Nullability.NoNulls), + (Flag.put, Nullability.NoNulls), + (Flag.put, Nullability.NoNulls), + (Meta.StringMeta.put, Nullability.Nullable), + (TypoShort.put, Nullability.NoNulls), + (TypoShort.put, Nullability.NoNulls), + (Meta.ScalaBigDecimalMeta.put, Nullability.NoNulls), + (Meta.ScalaBigDecimalMeta.put, Nullability.NoNulls), + (Meta.StringMeta.put, Nullability.Nullable), + (UnitmeasureId.put, Nullability.Nullable), + (UnitmeasureId.put, Nullability.Nullable), + (Meta.ScalaBigDecimalMeta.put, Nullability.Nullable), + (Meta.IntMeta.put, Nullability.NoNulls), + (Meta.StringMeta.put, Nullability.Nullable), + (Meta.StringMeta.put, Nullability.Nullable), + (Meta.StringMeta.put, Nullability.Nullable), + (ProductsubcategoryId.put, Nullability.Nullable), + (ProductmodelId.put, Nullability.Nullable), + (TypoLocalDateTime.put, Nullability.NoNulls), + (TypoLocalDateTime.put, Nullability.Nullable), + (TypoLocalDateTime.put, Nullability.Nullable), + (TypoUUID.put, Nullability.NoNulls), + (TypoLocalDateTime.put, Nullability.NoNulls)), + toList = x => List(x.productid, x.name, x.productnumber, x.makeflag, x.finishedgoodsflag, x.color, x.safetystocklevel, x.reorderpoint, x.standardcost, x.listprice, x.size, x.sizeunitmeasurecode, x.weightunitmeasurecode, x.weight, x.daystomanufacture, x.productline, x.`class`, x.style, x.productsubcategoryid, x.productmodelid, x.sellstartdate, x.sellenddate, x.discontinueddate, x.rowguid, x.modifieddate), + unsafeSet = (rs, i, a) => { + ProductId.put.unsafeSetNonNullable(rs, i + 0, a.productid) + Name.put.unsafeSetNonNullable(rs, i + 1, a.name) + Meta.StringMeta.put.unsafeSetNonNullable(rs, i + 2, a.productnumber) + Flag.put.unsafeSetNonNullable(rs, i + 3, a.makeflag) + Flag.put.unsafeSetNonNullable(rs, i + 4, a.finishedgoodsflag) + Meta.StringMeta.put.unsafeSetNullable(rs, i + 5, a.color) + TypoShort.put.unsafeSetNonNullable(rs, i + 6, a.safetystocklevel) + TypoShort.put.unsafeSetNonNullable(rs, i + 7, a.reorderpoint) + Meta.ScalaBigDecimalMeta.put.unsafeSetNonNullable(rs, i + 8, a.standardcost) + Meta.ScalaBigDecimalMeta.put.unsafeSetNonNullable(rs, i + 9, a.listprice) + Meta.StringMeta.put.unsafeSetNullable(rs, i + 10, a.size) + UnitmeasureId.put.unsafeSetNullable(rs, i + 11, a.sizeunitmeasurecode) + UnitmeasureId.put.unsafeSetNullable(rs, i + 12, a.weightunitmeasurecode) + Meta.ScalaBigDecimalMeta.put.unsafeSetNullable(rs, i + 13, a.weight) + Meta.IntMeta.put.unsafeSetNonNullable(rs, i + 14, a.daystomanufacture) + Meta.StringMeta.put.unsafeSetNullable(rs, i + 15, a.productline) + Meta.StringMeta.put.unsafeSetNullable(rs, i + 16, a.`class`) + Meta.StringMeta.put.unsafeSetNullable(rs, i + 17, a.style) + ProductsubcategoryId.put.unsafeSetNullable(rs, i + 18, a.productsubcategoryid) + ProductmodelId.put.unsafeSetNullable(rs, i + 19, a.productmodelid) + TypoLocalDateTime.put.unsafeSetNonNullable(rs, i + 20, a.sellstartdate) + TypoLocalDateTime.put.unsafeSetNullable(rs, i + 21, a.sellenddate) + TypoLocalDateTime.put.unsafeSetNullable(rs, i + 22, a.discontinueddate) + TypoUUID.put.unsafeSetNonNullable(rs, i + 23, a.rowguid) + TypoLocalDateTime.put.unsafeSetNonNullable(rs, i + 24, a.modifieddate) + }, + unsafeUpdate = (ps, i, a) => { + ProductId.put.unsafeUpdateNonNullable(ps, i + 0, a.productid) + Name.put.unsafeUpdateNonNullable(ps, i + 1, a.name) + Meta.StringMeta.put.unsafeUpdateNonNullable(ps, i + 2, a.productnumber) + Flag.put.unsafeUpdateNonNullable(ps, i + 3, a.makeflag) + Flag.put.unsafeUpdateNonNullable(ps, i + 4, a.finishedgoodsflag) + Meta.StringMeta.put.unsafeUpdateNullable(ps, i + 5, a.color) + TypoShort.put.unsafeUpdateNonNullable(ps, i + 6, a.safetystocklevel) + TypoShort.put.unsafeUpdateNonNullable(ps, i + 7, a.reorderpoint) + Meta.ScalaBigDecimalMeta.put.unsafeUpdateNonNullable(ps, i + 8, a.standardcost) + Meta.ScalaBigDecimalMeta.put.unsafeUpdateNonNullable(ps, i + 9, a.listprice) + Meta.StringMeta.put.unsafeUpdateNullable(ps, i + 10, a.size) + UnitmeasureId.put.unsafeUpdateNullable(ps, i + 11, a.sizeunitmeasurecode) + UnitmeasureId.put.unsafeUpdateNullable(ps, i + 12, a.weightunitmeasurecode) + Meta.ScalaBigDecimalMeta.put.unsafeUpdateNullable(ps, i + 13, a.weight) + Meta.IntMeta.put.unsafeUpdateNonNullable(ps, i + 14, a.daystomanufacture) + Meta.StringMeta.put.unsafeUpdateNullable(ps, i + 15, a.productline) + Meta.StringMeta.put.unsafeUpdateNullable(ps, i + 16, a.`class`) + Meta.StringMeta.put.unsafeUpdateNullable(ps, i + 17, a.style) + ProductsubcategoryId.put.unsafeUpdateNullable(ps, i + 18, a.productsubcategoryid) + ProductmodelId.put.unsafeUpdateNullable(ps, i + 19, a.productmodelid) + TypoLocalDateTime.put.unsafeUpdateNonNullable(ps, i + 20, a.sellstartdate) + TypoLocalDateTime.put.unsafeUpdateNullable(ps, i + 21, a.sellenddate) + TypoLocalDateTime.put.unsafeUpdateNullable(ps, i + 22, a.discontinueddate) + TypoUUID.put.unsafeUpdateNonNullable(ps, i + 23, a.rowguid) + TypoLocalDateTime.put.unsafeUpdateNonNullable(ps, i + 24, a.modifieddate) + } + ) } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productcategory/ProductcategoryRepo.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productcategory/ProductcategoryRepo.scala index 87559d3054..8410b4a8a7 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productcategory/ProductcategoryRepo.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productcategory/ProductcategoryRepo.scala @@ -30,4 +30,7 @@ trait ProductcategoryRepo { def update: UpdateBuilder[ProductcategoryFields, ProductcategoryRow] def update(row: ProductcategoryRow): ConnectionIO[Boolean] def upsert(unsaved: ProductcategoryRow): ConnectionIO[ProductcategoryRow] + def upsertBatch(unsaved: List[ProductcategoryRow]): Stream[ConnectionIO, ProductcategoryRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Stream[ConnectionIO, ProductcategoryRow], batchSize: Int = 10000): ConnectionIO[Int] } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productcategory/ProductcategoryRepoImpl.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productcategory/ProductcategoryRepoImpl.scala index 39545534f1..17564c3fa4 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productcategory/ProductcategoryRepoImpl.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productcategory/ProductcategoryRepoImpl.scala @@ -11,12 +11,14 @@ import adventureworks.customtypes.Defaulted import adventureworks.customtypes.TypoLocalDateTime import adventureworks.customtypes.TypoUUID import adventureworks.public.Name +import cats.instances.list.catsStdInstancesForList import doobie.free.connection.ConnectionIO import doobie.postgres.syntax.FragmentOps import doobie.syntax.SqlInterpolator.SingleFragment.fromWrite import doobie.syntax.string.toSqlInterpolator import doobie.util.Write import doobie.util.fragment.Fragment +import doobie.util.update.Update import fs2.Stream import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder @@ -125,4 +127,33 @@ class ProductcategoryRepoImpl extends ProductcategoryRepo { returning "productcategoryid", "name", "rowguid", "modifieddate"::text """.query(using ProductcategoryRow.read).unique } + override def upsertBatch(unsaved: List[ProductcategoryRow]): Stream[ConnectionIO, ProductcategoryRow] = { + Update[ProductcategoryRow]( + s"""insert into production.productcategory("productcategoryid", "name", "rowguid", "modifieddate") + values (?::int4,?::varchar,?::uuid,?::timestamp) + on conflict ("productcategoryid") + do update set + "name" = EXCLUDED."name", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + returning "productcategoryid", "name", "rowguid", "modifieddate"::text""" + )(using ProductcategoryRow.write) + .updateManyWithGeneratedKeys[ProductcategoryRow]("productcategoryid", "name", "rowguid", "modifieddate")(unsaved)(using catsStdInstancesForList, ProductcategoryRow.read) + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, ProductcategoryRow], batchSize: Int = 10000): ConnectionIO[Int] = { + for { + _ <- sql"create temporary table productcategory_TEMP (like production.productcategory) on commit drop".update.run + _ <- new FragmentOps(sql"""copy productcategory_TEMP("productcategoryid", "name", "rowguid", "modifieddate") from stdin""").copyIn(unsaved, batchSize)(using ProductcategoryRow.text) + res <- sql"""insert into production.productcategory("productcategoryid", "name", "rowguid", "modifieddate") + select * from productcategory_TEMP + on conflict ("productcategoryid") + do update set + "name" = EXCLUDED."name", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table productcategory_TEMP;""".update.run + } yield res + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productcategory/ProductcategoryRepoMock.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productcategory/ProductcategoryRepoMock.scala index ff0c576400..22d1d9d1fd 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productcategory/ProductcategoryRepoMock.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productcategory/ProductcategoryRepoMock.scala @@ -105,4 +105,23 @@ class ProductcategoryRepoMock(toRow: Function1[ProductcategoryRowUnsaved, Produc unsaved } } + override def upsertBatch(unsaved: List[ProductcategoryRow]): Stream[ConnectionIO, ProductcategoryRow] = { + Stream.emits { + unsaved.map { row => + map += (row.productcategoryid -> row) + row + } + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, ProductcategoryRow], batchSize: Int = 10000): ConnectionIO[Int] = { + unsaved.compile.toList.map { rows => + var num = 0 + rows.foreach { row => + map += (row.productcategoryid -> row) + num += 1 + } + num + } + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productcategory/ProductcategoryRow.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productcategory/ProductcategoryRow.scala index 1b79d0993f..1d76e7087d 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productcategory/ProductcategoryRow.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productcategory/ProductcategoryRow.scala @@ -14,6 +14,7 @@ import adventureworks.public.Name import doobie.enumerated.Nullability import doobie.postgres.Text import doobie.util.Read +import doobie.util.Write import io.circe.Decoder import io.circe.Encoder import java.sql.ResultSet @@ -63,4 +64,23 @@ object ProductcategoryRow { sb.append(Text.DELIMETER) TypoLocalDateTime.text.unsafeEncode(row.modifieddate, sb) } + implicit lazy val write: Write[ProductcategoryRow] = new Write[ProductcategoryRow]( + puts = List((ProductcategoryId.put, Nullability.NoNulls), + (Name.put, Nullability.NoNulls), + (TypoUUID.put, Nullability.NoNulls), + (TypoLocalDateTime.put, Nullability.NoNulls)), + toList = x => List(x.productcategoryid, x.name, x.rowguid, x.modifieddate), + unsafeSet = (rs, i, a) => { + ProductcategoryId.put.unsafeSetNonNullable(rs, i + 0, a.productcategoryid) + Name.put.unsafeSetNonNullable(rs, i + 1, a.name) + TypoUUID.put.unsafeSetNonNullable(rs, i + 2, a.rowguid) + TypoLocalDateTime.put.unsafeSetNonNullable(rs, i + 3, a.modifieddate) + }, + unsafeUpdate = (ps, i, a) => { + ProductcategoryId.put.unsafeUpdateNonNullable(ps, i + 0, a.productcategoryid) + Name.put.unsafeUpdateNonNullable(ps, i + 1, a.name) + TypoUUID.put.unsafeUpdateNonNullable(ps, i + 2, a.rowguid) + TypoLocalDateTime.put.unsafeUpdateNonNullable(ps, i + 3, a.modifieddate) + } + ) } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productcosthistory/ProductcosthistoryRepo.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productcosthistory/ProductcosthistoryRepo.scala index 151ea57fdd..e3330ab805 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productcosthistory/ProductcosthistoryRepo.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productcosthistory/ProductcosthistoryRepo.scala @@ -30,4 +30,7 @@ trait ProductcosthistoryRepo { def update: UpdateBuilder[ProductcosthistoryFields, ProductcosthistoryRow] def update(row: ProductcosthistoryRow): ConnectionIO[Boolean] def upsert(unsaved: ProductcosthistoryRow): ConnectionIO[ProductcosthistoryRow] + def upsertBatch(unsaved: List[ProductcosthistoryRow]): Stream[ConnectionIO, ProductcosthistoryRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Stream[ConnectionIO, ProductcosthistoryRow], batchSize: Int = 10000): ConnectionIO[Int] } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productcosthistory/ProductcosthistoryRepoImpl.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productcosthistory/ProductcosthistoryRepoImpl.scala index 1f4673f642..4b07a0ebda 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productcosthistory/ProductcosthistoryRepoImpl.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productcosthistory/ProductcosthistoryRepoImpl.scala @@ -10,6 +10,7 @@ package productcosthistory import adventureworks.customtypes.Defaulted import adventureworks.customtypes.TypoLocalDateTime import adventureworks.production.product.ProductId +import cats.instances.list.catsStdInstancesForList import doobie.free.connection.ConnectionIO import doobie.postgres.syntax.FragmentOps import doobie.syntax.SqlInterpolator.SingleFragment.fromWrite @@ -17,6 +18,7 @@ import doobie.syntax.string.toSqlInterpolator import doobie.util.Write import doobie.util.fragment.Fragment import doobie.util.meta.Meta +import doobie.util.update.Update import fs2.Stream import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder @@ -135,4 +137,33 @@ class ProductcosthistoryRepoImpl extends ProductcosthistoryRepo { returning "productid", "startdate"::text, "enddate"::text, "standardcost", "modifieddate"::text """.query(using ProductcosthistoryRow.read).unique } + override def upsertBatch(unsaved: List[ProductcosthistoryRow]): Stream[ConnectionIO, ProductcosthistoryRow] = { + Update[ProductcosthistoryRow]( + s"""insert into production.productcosthistory("productid", "startdate", "enddate", "standardcost", "modifieddate") + values (?::int4,?::timestamp,?::timestamp,?::numeric,?::timestamp) + on conflict ("productid", "startdate") + do update set + "enddate" = EXCLUDED."enddate", + "standardcost" = EXCLUDED."standardcost", + "modifieddate" = EXCLUDED."modifieddate" + returning "productid", "startdate"::text, "enddate"::text, "standardcost", "modifieddate"::text""" + )(using ProductcosthistoryRow.write) + .updateManyWithGeneratedKeys[ProductcosthistoryRow]("productid", "startdate", "enddate", "standardcost", "modifieddate")(unsaved)(using catsStdInstancesForList, ProductcosthistoryRow.read) + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, ProductcosthistoryRow], batchSize: Int = 10000): ConnectionIO[Int] = { + for { + _ <- sql"create temporary table productcosthistory_TEMP (like production.productcosthistory) on commit drop".update.run + _ <- new FragmentOps(sql"""copy productcosthistory_TEMP("productid", "startdate", "enddate", "standardcost", "modifieddate") from stdin""").copyIn(unsaved, batchSize)(using ProductcosthistoryRow.text) + res <- sql"""insert into production.productcosthistory("productid", "startdate", "enddate", "standardcost", "modifieddate") + select * from productcosthistory_TEMP + on conflict ("productid", "startdate") + do update set + "enddate" = EXCLUDED."enddate", + "standardcost" = EXCLUDED."standardcost", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table productcosthistory_TEMP;""".update.run + } yield res + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productcosthistory/ProductcosthistoryRepoMock.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productcosthistory/ProductcosthistoryRepoMock.scala index c92ee66eb4..8cc64e2b1a 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productcosthistory/ProductcosthistoryRepoMock.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productcosthistory/ProductcosthistoryRepoMock.scala @@ -105,4 +105,23 @@ class ProductcosthistoryRepoMock(toRow: Function1[ProductcosthistoryRowUnsaved, unsaved } } + override def upsertBatch(unsaved: List[ProductcosthistoryRow]): Stream[ConnectionIO, ProductcosthistoryRow] = { + Stream.emits { + unsaved.map { row => + map += (row.compositeId -> row) + row + } + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, ProductcosthistoryRow], batchSize: Int = 10000): ConnectionIO[Int] = { + unsaved.compile.toList.map { rows => + var num = 0 + rows.foreach { row => + map += (row.compositeId -> row) + num += 1 + } + num + } + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productcosthistory/ProductcosthistoryRow.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productcosthistory/ProductcosthistoryRow.scala index 92bfc4ff4d..77159deb07 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productcosthistory/ProductcosthistoryRow.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productcosthistory/ProductcosthistoryRow.scala @@ -13,6 +13,7 @@ import adventureworks.production.product.ProductId import doobie.enumerated.Nullability import doobie.postgres.Text import doobie.util.Read +import doobie.util.Write import doobie.util.meta.Meta import io.circe.Decoder import io.circe.Encoder @@ -75,4 +76,26 @@ object ProductcosthistoryRow { sb.append(Text.DELIMETER) TypoLocalDateTime.text.unsafeEncode(row.modifieddate, sb) } + implicit lazy val write: Write[ProductcosthistoryRow] = new Write[ProductcosthistoryRow]( + puts = List((ProductId.put, Nullability.NoNulls), + (TypoLocalDateTime.put, Nullability.NoNulls), + (TypoLocalDateTime.put, Nullability.Nullable), + (Meta.ScalaBigDecimalMeta.put, Nullability.NoNulls), + (TypoLocalDateTime.put, Nullability.NoNulls)), + toList = x => List(x.productid, x.startdate, x.enddate, x.standardcost, x.modifieddate), + unsafeSet = (rs, i, a) => { + ProductId.put.unsafeSetNonNullable(rs, i + 0, a.productid) + TypoLocalDateTime.put.unsafeSetNonNullable(rs, i + 1, a.startdate) + TypoLocalDateTime.put.unsafeSetNullable(rs, i + 2, a.enddate) + Meta.ScalaBigDecimalMeta.put.unsafeSetNonNullable(rs, i + 3, a.standardcost) + TypoLocalDateTime.put.unsafeSetNonNullable(rs, i + 4, a.modifieddate) + }, + unsafeUpdate = (ps, i, a) => { + ProductId.put.unsafeUpdateNonNullable(ps, i + 0, a.productid) + TypoLocalDateTime.put.unsafeUpdateNonNullable(ps, i + 1, a.startdate) + TypoLocalDateTime.put.unsafeUpdateNullable(ps, i + 2, a.enddate) + Meta.ScalaBigDecimalMeta.put.unsafeUpdateNonNullable(ps, i + 3, a.standardcost) + TypoLocalDateTime.put.unsafeUpdateNonNullable(ps, i + 4, a.modifieddate) + } + ) } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productdescription/ProductdescriptionRepo.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productdescription/ProductdescriptionRepo.scala index 9447f002c1..8c552ec58b 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productdescription/ProductdescriptionRepo.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productdescription/ProductdescriptionRepo.scala @@ -30,4 +30,7 @@ trait ProductdescriptionRepo { def update: UpdateBuilder[ProductdescriptionFields, ProductdescriptionRow] def update(row: ProductdescriptionRow): ConnectionIO[Boolean] def upsert(unsaved: ProductdescriptionRow): ConnectionIO[ProductdescriptionRow] + def upsertBatch(unsaved: List[ProductdescriptionRow]): Stream[ConnectionIO, ProductdescriptionRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Stream[ConnectionIO, ProductdescriptionRow], batchSize: Int = 10000): ConnectionIO[Int] } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productdescription/ProductdescriptionRepoImpl.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productdescription/ProductdescriptionRepoImpl.scala index f5e5f931cf..a7bf4a8cfc 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productdescription/ProductdescriptionRepoImpl.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productdescription/ProductdescriptionRepoImpl.scala @@ -10,6 +10,7 @@ package productdescription import adventureworks.customtypes.Defaulted import adventureworks.customtypes.TypoLocalDateTime import adventureworks.customtypes.TypoUUID +import cats.instances.list.catsStdInstancesForList import doobie.free.connection.ConnectionIO import doobie.postgres.syntax.FragmentOps import doobie.syntax.SqlInterpolator.SingleFragment.fromWrite @@ -17,6 +18,7 @@ import doobie.syntax.string.toSqlInterpolator import doobie.util.Write import doobie.util.fragment.Fragment import doobie.util.meta.Meta +import doobie.util.update.Update import fs2.Stream import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder @@ -125,4 +127,33 @@ class ProductdescriptionRepoImpl extends ProductdescriptionRepo { returning "productdescriptionid", "description", "rowguid", "modifieddate"::text """.query(using ProductdescriptionRow.read).unique } + override def upsertBatch(unsaved: List[ProductdescriptionRow]): Stream[ConnectionIO, ProductdescriptionRow] = { + Update[ProductdescriptionRow]( + s"""insert into production.productdescription("productdescriptionid", "description", "rowguid", "modifieddate") + values (?::int4,?,?::uuid,?::timestamp) + on conflict ("productdescriptionid") + do update set + "description" = EXCLUDED."description", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + returning "productdescriptionid", "description", "rowguid", "modifieddate"::text""" + )(using ProductdescriptionRow.write) + .updateManyWithGeneratedKeys[ProductdescriptionRow]("productdescriptionid", "description", "rowguid", "modifieddate")(unsaved)(using catsStdInstancesForList, ProductdescriptionRow.read) + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, ProductdescriptionRow], batchSize: Int = 10000): ConnectionIO[Int] = { + for { + _ <- sql"create temporary table productdescription_TEMP (like production.productdescription) on commit drop".update.run + _ <- new FragmentOps(sql"""copy productdescription_TEMP("productdescriptionid", "description", "rowguid", "modifieddate") from stdin""").copyIn(unsaved, batchSize)(using ProductdescriptionRow.text) + res <- sql"""insert into production.productdescription("productdescriptionid", "description", "rowguid", "modifieddate") + select * from productdescription_TEMP + on conflict ("productdescriptionid") + do update set + "description" = EXCLUDED."description", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table productdescription_TEMP;""".update.run + } yield res + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productdescription/ProductdescriptionRepoMock.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productdescription/ProductdescriptionRepoMock.scala index c91b85a7e3..48b6ffbc4e 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productdescription/ProductdescriptionRepoMock.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productdescription/ProductdescriptionRepoMock.scala @@ -105,4 +105,23 @@ class ProductdescriptionRepoMock(toRow: Function1[ProductdescriptionRowUnsaved, unsaved } } + override def upsertBatch(unsaved: List[ProductdescriptionRow]): Stream[ConnectionIO, ProductdescriptionRow] = { + Stream.emits { + unsaved.map { row => + map += (row.productdescriptionid -> row) + row + } + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, ProductdescriptionRow], batchSize: Int = 10000): ConnectionIO[Int] = { + unsaved.compile.toList.map { rows => + var num = 0 + rows.foreach { row => + map += (row.productdescriptionid -> row) + num += 1 + } + num + } + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productdescription/ProductdescriptionRow.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productdescription/ProductdescriptionRow.scala index d6592e1811..7d9da3f265 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productdescription/ProductdescriptionRow.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productdescription/ProductdescriptionRow.scala @@ -13,6 +13,7 @@ import adventureworks.customtypes.TypoUUID import doobie.enumerated.Nullability import doobie.postgres.Text import doobie.util.Read +import doobie.util.Write import doobie.util.meta.Meta import io.circe.Decoder import io.circe.Encoder @@ -63,4 +64,23 @@ object ProductdescriptionRow { sb.append(Text.DELIMETER) TypoLocalDateTime.text.unsafeEncode(row.modifieddate, sb) } + implicit lazy val write: Write[ProductdescriptionRow] = new Write[ProductdescriptionRow]( + puts = List((ProductdescriptionId.put, Nullability.NoNulls), + (Meta.StringMeta.put, Nullability.NoNulls), + (TypoUUID.put, Nullability.NoNulls), + (TypoLocalDateTime.put, Nullability.NoNulls)), + toList = x => List(x.productdescriptionid, x.description, x.rowguid, x.modifieddate), + unsafeSet = (rs, i, a) => { + ProductdescriptionId.put.unsafeSetNonNullable(rs, i + 0, a.productdescriptionid) + Meta.StringMeta.put.unsafeSetNonNullable(rs, i + 1, a.description) + TypoUUID.put.unsafeSetNonNullable(rs, i + 2, a.rowguid) + TypoLocalDateTime.put.unsafeSetNonNullable(rs, i + 3, a.modifieddate) + }, + unsafeUpdate = (ps, i, a) => { + ProductdescriptionId.put.unsafeUpdateNonNullable(ps, i + 0, a.productdescriptionid) + Meta.StringMeta.put.unsafeUpdateNonNullable(ps, i + 1, a.description) + TypoUUID.put.unsafeUpdateNonNullable(ps, i + 2, a.rowguid) + TypoLocalDateTime.put.unsafeUpdateNonNullable(ps, i + 3, a.modifieddate) + } + ) } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productdocument/ProductdocumentRepo.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productdocument/ProductdocumentRepo.scala index 415d1b3438..b5f6b036d0 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productdocument/ProductdocumentRepo.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productdocument/ProductdocumentRepo.scala @@ -30,4 +30,7 @@ trait ProductdocumentRepo { def update: UpdateBuilder[ProductdocumentFields, ProductdocumentRow] def update(row: ProductdocumentRow): ConnectionIO[Boolean] def upsert(unsaved: ProductdocumentRow): ConnectionIO[ProductdocumentRow] + def upsertBatch(unsaved: List[ProductdocumentRow]): Stream[ConnectionIO, ProductdocumentRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Stream[ConnectionIO, ProductdocumentRow], batchSize: Int = 10000): ConnectionIO[Int] } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productdocument/ProductdocumentRepoImpl.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productdocument/ProductdocumentRepoImpl.scala index df2cda2f90..a0d4feeaa8 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productdocument/ProductdocumentRepoImpl.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productdocument/ProductdocumentRepoImpl.scala @@ -11,12 +11,14 @@ import adventureworks.customtypes.Defaulted import adventureworks.customtypes.TypoLocalDateTime import adventureworks.production.document.DocumentId import adventureworks.production.product.ProductId +import cats.instances.list.catsStdInstancesForList import doobie.free.connection.ConnectionIO import doobie.postgres.syntax.FragmentOps import doobie.syntax.SqlInterpolator.SingleFragment.fromWrite import doobie.syntax.string.toSqlInterpolator import doobie.util.Write import doobie.util.fragment.Fragment +import doobie.util.update.Update import fs2.Stream import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder @@ -130,4 +132,29 @@ class ProductdocumentRepoImpl extends ProductdocumentRepo { returning "productid", "modifieddate"::text, "documentnode" """.query(using ProductdocumentRow.read).unique } + override def upsertBatch(unsaved: List[ProductdocumentRow]): Stream[ConnectionIO, ProductdocumentRow] = { + Update[ProductdocumentRow]( + s"""insert into production.productdocument("productid", "modifieddate", "documentnode") + values (?::int4,?::timestamp,?) + on conflict ("productid", "documentnode") + do update set + "modifieddate" = EXCLUDED."modifieddate" + returning "productid", "modifieddate"::text, "documentnode"""" + )(using ProductdocumentRow.write) + .updateManyWithGeneratedKeys[ProductdocumentRow]("productid", "modifieddate", "documentnode")(unsaved)(using catsStdInstancesForList, ProductdocumentRow.read) + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, ProductdocumentRow], batchSize: Int = 10000): ConnectionIO[Int] = { + for { + _ <- sql"create temporary table productdocument_TEMP (like production.productdocument) on commit drop".update.run + _ <- new FragmentOps(sql"""copy productdocument_TEMP("productid", "modifieddate", "documentnode") from stdin""").copyIn(unsaved, batchSize)(using ProductdocumentRow.text) + res <- sql"""insert into production.productdocument("productid", "modifieddate", "documentnode") + select * from productdocument_TEMP + on conflict ("productid", "documentnode") + do update set + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table productdocument_TEMP;""".update.run + } yield res + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productdocument/ProductdocumentRepoMock.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productdocument/ProductdocumentRepoMock.scala index 9d8fbc0213..05006ff3fb 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productdocument/ProductdocumentRepoMock.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productdocument/ProductdocumentRepoMock.scala @@ -105,4 +105,23 @@ class ProductdocumentRepoMock(toRow: Function1[ProductdocumentRowUnsaved, Produc unsaved } } + override def upsertBatch(unsaved: List[ProductdocumentRow]): Stream[ConnectionIO, ProductdocumentRow] = { + Stream.emits { + unsaved.map { row => + map += (row.compositeId -> row) + row + } + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, ProductdocumentRow], batchSize: Int = 10000): ConnectionIO[Int] = { + unsaved.compile.toList.map { rows => + var num = 0 + rows.foreach { row => + map += (row.compositeId -> row) + num += 1 + } + num + } + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productdocument/ProductdocumentRow.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productdocument/ProductdocumentRow.scala index b81bec8c94..2f32ee36d5 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productdocument/ProductdocumentRow.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productdocument/ProductdocumentRow.scala @@ -14,6 +14,7 @@ import adventureworks.production.product.ProductId import doobie.enumerated.Nullability import doobie.postgres.Text import doobie.util.Read +import doobie.util.Write import io.circe.Decoder import io.circe.Encoder import java.sql.ResultSet @@ -62,4 +63,20 @@ object ProductdocumentRow { sb.append(Text.DELIMETER) DocumentId.text.unsafeEncode(row.documentnode, sb) } + implicit lazy val write: Write[ProductdocumentRow] = new Write[ProductdocumentRow]( + puts = List((ProductId.put, Nullability.NoNulls), + (TypoLocalDateTime.put, Nullability.NoNulls), + (DocumentId.put, Nullability.NoNulls)), + toList = x => List(x.productid, x.modifieddate, x.documentnode), + unsafeSet = (rs, i, a) => { + ProductId.put.unsafeSetNonNullable(rs, i + 0, a.productid) + TypoLocalDateTime.put.unsafeSetNonNullable(rs, i + 1, a.modifieddate) + DocumentId.put.unsafeSetNonNullable(rs, i + 2, a.documentnode) + }, + unsafeUpdate = (ps, i, a) => { + ProductId.put.unsafeUpdateNonNullable(ps, i + 0, a.productid) + TypoLocalDateTime.put.unsafeUpdateNonNullable(ps, i + 1, a.modifieddate) + DocumentId.put.unsafeUpdateNonNullable(ps, i + 2, a.documentnode) + } + ) } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productinventory/ProductinventoryRepo.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productinventory/ProductinventoryRepo.scala index 6500d9122b..204cd2db99 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productinventory/ProductinventoryRepo.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productinventory/ProductinventoryRepo.scala @@ -30,4 +30,7 @@ trait ProductinventoryRepo { def update: UpdateBuilder[ProductinventoryFields, ProductinventoryRow] def update(row: ProductinventoryRow): ConnectionIO[Boolean] def upsert(unsaved: ProductinventoryRow): ConnectionIO[ProductinventoryRow] + def upsertBatch(unsaved: List[ProductinventoryRow]): Stream[ConnectionIO, ProductinventoryRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Stream[ConnectionIO, ProductinventoryRow], batchSize: Int = 10000): ConnectionIO[Int] } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productinventory/ProductinventoryRepoImpl.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productinventory/ProductinventoryRepoImpl.scala index 72c2db629e..1dd6a9c282 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productinventory/ProductinventoryRepoImpl.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productinventory/ProductinventoryRepoImpl.scala @@ -13,6 +13,7 @@ import adventureworks.customtypes.TypoShort import adventureworks.customtypes.TypoUUID import adventureworks.production.location.LocationId import adventureworks.production.product.ProductId +import cats.instances.list.catsStdInstancesForList import doobie.free.connection.ConnectionIO import doobie.postgres.syntax.FragmentOps import doobie.syntax.SqlInterpolator.SingleFragment.fromWrite @@ -20,6 +21,7 @@ import doobie.syntax.string.toSqlInterpolator import doobie.util.Write import doobie.util.fragment.Fragment import doobie.util.meta.Meta +import doobie.util.update.Update import fs2.Stream import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder @@ -152,4 +154,37 @@ class ProductinventoryRepoImpl extends ProductinventoryRepo { returning "productid", "locationid", "shelf", "bin", "quantity", "rowguid", "modifieddate"::text """.query(using ProductinventoryRow.read).unique } + override def upsertBatch(unsaved: List[ProductinventoryRow]): Stream[ConnectionIO, ProductinventoryRow] = { + Update[ProductinventoryRow]( + s"""insert into production.productinventory("productid", "locationid", "shelf", "bin", "quantity", "rowguid", "modifieddate") + values (?::int4,?::int2,?,?::int2,?::int2,?::uuid,?::timestamp) + on conflict ("productid", "locationid") + do update set + "shelf" = EXCLUDED."shelf", + "bin" = EXCLUDED."bin", + "quantity" = EXCLUDED."quantity", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + returning "productid", "locationid", "shelf", "bin", "quantity", "rowguid", "modifieddate"::text""" + )(using ProductinventoryRow.write) + .updateManyWithGeneratedKeys[ProductinventoryRow]("productid", "locationid", "shelf", "bin", "quantity", "rowguid", "modifieddate")(unsaved)(using catsStdInstancesForList, ProductinventoryRow.read) + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, ProductinventoryRow], batchSize: Int = 10000): ConnectionIO[Int] = { + for { + _ <- sql"create temporary table productinventory_TEMP (like production.productinventory) on commit drop".update.run + _ <- new FragmentOps(sql"""copy productinventory_TEMP("productid", "locationid", "shelf", "bin", "quantity", "rowguid", "modifieddate") from stdin""").copyIn(unsaved, batchSize)(using ProductinventoryRow.text) + res <- sql"""insert into production.productinventory("productid", "locationid", "shelf", "bin", "quantity", "rowguid", "modifieddate") + select * from productinventory_TEMP + on conflict ("productid", "locationid") + do update set + "shelf" = EXCLUDED."shelf", + "bin" = EXCLUDED."bin", + "quantity" = EXCLUDED."quantity", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table productinventory_TEMP;""".update.run + } yield res + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productinventory/ProductinventoryRepoMock.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productinventory/ProductinventoryRepoMock.scala index 9c06fc15d6..5ba22cadae 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productinventory/ProductinventoryRepoMock.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productinventory/ProductinventoryRepoMock.scala @@ -105,4 +105,23 @@ class ProductinventoryRepoMock(toRow: Function1[ProductinventoryRowUnsaved, Prod unsaved } } + override def upsertBatch(unsaved: List[ProductinventoryRow]): Stream[ConnectionIO, ProductinventoryRow] = { + Stream.emits { + unsaved.map { row => + map += (row.compositeId -> row) + row + } + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, ProductinventoryRow], batchSize: Int = 10000): ConnectionIO[Int] = { + unsaved.compile.toList.map { rows => + var num = 0 + rows.foreach { row => + map += (row.compositeId -> row) + num += 1 + } + num + } + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productinventory/ProductinventoryRow.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productinventory/ProductinventoryRow.scala index 193540dcfc..10487a5ef7 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productinventory/ProductinventoryRow.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productinventory/ProductinventoryRow.scala @@ -16,6 +16,7 @@ import adventureworks.production.product.ProductId import doobie.enumerated.Nullability import doobie.postgres.Text import doobie.util.Read +import doobie.util.Write import doobie.util.meta.Meta import io.circe.Decoder import io.circe.Encoder @@ -90,4 +91,32 @@ object ProductinventoryRow { sb.append(Text.DELIMETER) TypoLocalDateTime.text.unsafeEncode(row.modifieddate, sb) } + implicit lazy val write: Write[ProductinventoryRow] = new Write[ProductinventoryRow]( + puts = List((ProductId.put, Nullability.NoNulls), + (LocationId.put, Nullability.NoNulls), + (Meta.StringMeta.put, Nullability.NoNulls), + (TypoShort.put, Nullability.NoNulls), + (TypoShort.put, Nullability.NoNulls), + (TypoUUID.put, Nullability.NoNulls), + (TypoLocalDateTime.put, Nullability.NoNulls)), + toList = x => List(x.productid, x.locationid, x.shelf, x.bin, x.quantity, x.rowguid, x.modifieddate), + unsafeSet = (rs, i, a) => { + ProductId.put.unsafeSetNonNullable(rs, i + 0, a.productid) + LocationId.put.unsafeSetNonNullable(rs, i + 1, a.locationid) + Meta.StringMeta.put.unsafeSetNonNullable(rs, i + 2, a.shelf) + TypoShort.put.unsafeSetNonNullable(rs, i + 3, a.bin) + TypoShort.put.unsafeSetNonNullable(rs, i + 4, a.quantity) + TypoUUID.put.unsafeSetNonNullable(rs, i + 5, a.rowguid) + TypoLocalDateTime.put.unsafeSetNonNullable(rs, i + 6, a.modifieddate) + }, + unsafeUpdate = (ps, i, a) => { + ProductId.put.unsafeUpdateNonNullable(ps, i + 0, a.productid) + LocationId.put.unsafeUpdateNonNullable(ps, i + 1, a.locationid) + Meta.StringMeta.put.unsafeUpdateNonNullable(ps, i + 2, a.shelf) + TypoShort.put.unsafeUpdateNonNullable(ps, i + 3, a.bin) + TypoShort.put.unsafeUpdateNonNullable(ps, i + 4, a.quantity) + TypoUUID.put.unsafeUpdateNonNullable(ps, i + 5, a.rowguid) + TypoLocalDateTime.put.unsafeUpdateNonNullable(ps, i + 6, a.modifieddate) + } + ) } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productlistpricehistory/ProductlistpricehistoryRepo.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productlistpricehistory/ProductlistpricehistoryRepo.scala index c0ae6041ae..ff5fa80fc6 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productlistpricehistory/ProductlistpricehistoryRepo.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productlistpricehistory/ProductlistpricehistoryRepo.scala @@ -30,4 +30,7 @@ trait ProductlistpricehistoryRepo { def update: UpdateBuilder[ProductlistpricehistoryFields, ProductlistpricehistoryRow] def update(row: ProductlistpricehistoryRow): ConnectionIO[Boolean] def upsert(unsaved: ProductlistpricehistoryRow): ConnectionIO[ProductlistpricehistoryRow] + def upsertBatch(unsaved: List[ProductlistpricehistoryRow]): Stream[ConnectionIO, ProductlistpricehistoryRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Stream[ConnectionIO, ProductlistpricehistoryRow], batchSize: Int = 10000): ConnectionIO[Int] } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productlistpricehistory/ProductlistpricehistoryRepoImpl.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productlistpricehistory/ProductlistpricehistoryRepoImpl.scala index 8db559386e..e493e1683d 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productlistpricehistory/ProductlistpricehistoryRepoImpl.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productlistpricehistory/ProductlistpricehistoryRepoImpl.scala @@ -10,6 +10,7 @@ package productlistpricehistory import adventureworks.customtypes.Defaulted import adventureworks.customtypes.TypoLocalDateTime import adventureworks.production.product.ProductId +import cats.instances.list.catsStdInstancesForList import doobie.free.connection.ConnectionIO import doobie.postgres.syntax.FragmentOps import doobie.syntax.SqlInterpolator.SingleFragment.fromWrite @@ -17,6 +18,7 @@ import doobie.syntax.string.toSqlInterpolator import doobie.util.Write import doobie.util.fragment.Fragment import doobie.util.meta.Meta +import doobie.util.update.Update import fs2.Stream import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder @@ -135,4 +137,33 @@ class ProductlistpricehistoryRepoImpl extends ProductlistpricehistoryRepo { returning "productid", "startdate"::text, "enddate"::text, "listprice", "modifieddate"::text """.query(using ProductlistpricehistoryRow.read).unique } + override def upsertBatch(unsaved: List[ProductlistpricehistoryRow]): Stream[ConnectionIO, ProductlistpricehistoryRow] = { + Update[ProductlistpricehistoryRow]( + s"""insert into production.productlistpricehistory("productid", "startdate", "enddate", "listprice", "modifieddate") + values (?::int4,?::timestamp,?::timestamp,?::numeric,?::timestamp) + on conflict ("productid", "startdate") + do update set + "enddate" = EXCLUDED."enddate", + "listprice" = EXCLUDED."listprice", + "modifieddate" = EXCLUDED."modifieddate" + returning "productid", "startdate"::text, "enddate"::text, "listprice", "modifieddate"::text""" + )(using ProductlistpricehistoryRow.write) + .updateManyWithGeneratedKeys[ProductlistpricehistoryRow]("productid", "startdate", "enddate", "listprice", "modifieddate")(unsaved)(using catsStdInstancesForList, ProductlistpricehistoryRow.read) + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, ProductlistpricehistoryRow], batchSize: Int = 10000): ConnectionIO[Int] = { + for { + _ <- sql"create temporary table productlistpricehistory_TEMP (like production.productlistpricehistory) on commit drop".update.run + _ <- new FragmentOps(sql"""copy productlistpricehistory_TEMP("productid", "startdate", "enddate", "listprice", "modifieddate") from stdin""").copyIn(unsaved, batchSize)(using ProductlistpricehistoryRow.text) + res <- sql"""insert into production.productlistpricehistory("productid", "startdate", "enddate", "listprice", "modifieddate") + select * from productlistpricehistory_TEMP + on conflict ("productid", "startdate") + do update set + "enddate" = EXCLUDED."enddate", + "listprice" = EXCLUDED."listprice", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table productlistpricehistory_TEMP;""".update.run + } yield res + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productlistpricehistory/ProductlistpricehistoryRepoMock.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productlistpricehistory/ProductlistpricehistoryRepoMock.scala index d56124d38d..b82e790762 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productlistpricehistory/ProductlistpricehistoryRepoMock.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productlistpricehistory/ProductlistpricehistoryRepoMock.scala @@ -105,4 +105,23 @@ class ProductlistpricehistoryRepoMock(toRow: Function1[ProductlistpricehistoryRo unsaved } } + override def upsertBatch(unsaved: List[ProductlistpricehistoryRow]): Stream[ConnectionIO, ProductlistpricehistoryRow] = { + Stream.emits { + unsaved.map { row => + map += (row.compositeId -> row) + row + } + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, ProductlistpricehistoryRow], batchSize: Int = 10000): ConnectionIO[Int] = { + unsaved.compile.toList.map { rows => + var num = 0 + rows.foreach { row => + map += (row.compositeId -> row) + num += 1 + } + num + } + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productlistpricehistory/ProductlistpricehistoryRow.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productlistpricehistory/ProductlistpricehistoryRow.scala index e8061f6d46..55ad132aca 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productlistpricehistory/ProductlistpricehistoryRow.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productlistpricehistory/ProductlistpricehistoryRow.scala @@ -13,6 +13,7 @@ import adventureworks.production.product.ProductId import doobie.enumerated.Nullability import doobie.postgres.Text import doobie.util.Read +import doobie.util.Write import doobie.util.meta.Meta import io.circe.Decoder import io.circe.Encoder @@ -75,4 +76,26 @@ object ProductlistpricehistoryRow { sb.append(Text.DELIMETER) TypoLocalDateTime.text.unsafeEncode(row.modifieddate, sb) } + implicit lazy val write: Write[ProductlistpricehistoryRow] = new Write[ProductlistpricehistoryRow]( + puts = List((ProductId.put, Nullability.NoNulls), + (TypoLocalDateTime.put, Nullability.NoNulls), + (TypoLocalDateTime.put, Nullability.Nullable), + (Meta.ScalaBigDecimalMeta.put, Nullability.NoNulls), + (TypoLocalDateTime.put, Nullability.NoNulls)), + toList = x => List(x.productid, x.startdate, x.enddate, x.listprice, x.modifieddate), + unsafeSet = (rs, i, a) => { + ProductId.put.unsafeSetNonNullable(rs, i + 0, a.productid) + TypoLocalDateTime.put.unsafeSetNonNullable(rs, i + 1, a.startdate) + TypoLocalDateTime.put.unsafeSetNullable(rs, i + 2, a.enddate) + Meta.ScalaBigDecimalMeta.put.unsafeSetNonNullable(rs, i + 3, a.listprice) + TypoLocalDateTime.put.unsafeSetNonNullable(rs, i + 4, a.modifieddate) + }, + unsafeUpdate = (ps, i, a) => { + ProductId.put.unsafeUpdateNonNullable(ps, i + 0, a.productid) + TypoLocalDateTime.put.unsafeUpdateNonNullable(ps, i + 1, a.startdate) + TypoLocalDateTime.put.unsafeUpdateNullable(ps, i + 2, a.enddate) + Meta.ScalaBigDecimalMeta.put.unsafeUpdateNonNullable(ps, i + 3, a.listprice) + TypoLocalDateTime.put.unsafeUpdateNonNullable(ps, i + 4, a.modifieddate) + } + ) } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productmodel/ProductmodelRepo.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productmodel/ProductmodelRepo.scala index 5b87a5c973..f5f9262ca5 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productmodel/ProductmodelRepo.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productmodel/ProductmodelRepo.scala @@ -30,4 +30,7 @@ trait ProductmodelRepo { def update: UpdateBuilder[ProductmodelFields, ProductmodelRow] def update(row: ProductmodelRow): ConnectionIO[Boolean] def upsert(unsaved: ProductmodelRow): ConnectionIO[ProductmodelRow] + def upsertBatch(unsaved: List[ProductmodelRow]): Stream[ConnectionIO, ProductmodelRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Stream[ConnectionIO, ProductmodelRow], batchSize: Int = 10000): ConnectionIO[Int] } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productmodel/ProductmodelRepoImpl.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productmodel/ProductmodelRepoImpl.scala index ffce7940fa..f5588b958e 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productmodel/ProductmodelRepoImpl.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productmodel/ProductmodelRepoImpl.scala @@ -12,12 +12,14 @@ import adventureworks.customtypes.TypoLocalDateTime import adventureworks.customtypes.TypoUUID import adventureworks.customtypes.TypoXml import adventureworks.public.Name +import cats.instances.list.catsStdInstancesForList import doobie.free.connection.ConnectionIO import doobie.postgres.syntax.FragmentOps import doobie.syntax.SqlInterpolator.SingleFragment.fromWrite import doobie.syntax.string.toSqlInterpolator import doobie.util.Write import doobie.util.fragment.Fragment +import doobie.util.update.Update import fs2.Stream import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder @@ -134,4 +136,37 @@ class ProductmodelRepoImpl extends ProductmodelRepo { returning "productmodelid", "name", "catalogdescription", "instructions", "rowguid", "modifieddate"::text """.query(using ProductmodelRow.read).unique } + override def upsertBatch(unsaved: List[ProductmodelRow]): Stream[ConnectionIO, ProductmodelRow] = { + Update[ProductmodelRow]( + s"""insert into production.productmodel("productmodelid", "name", "catalogdescription", "instructions", "rowguid", "modifieddate") + values (?::int4,?::varchar,?::xml,?::xml,?::uuid,?::timestamp) + on conflict ("productmodelid") + do update set + "name" = EXCLUDED."name", + "catalogdescription" = EXCLUDED."catalogdescription", + "instructions" = EXCLUDED."instructions", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + returning "productmodelid", "name", "catalogdescription", "instructions", "rowguid", "modifieddate"::text""" + )(using ProductmodelRow.write) + .updateManyWithGeneratedKeys[ProductmodelRow]("productmodelid", "name", "catalogdescription", "instructions", "rowguid", "modifieddate")(unsaved)(using catsStdInstancesForList, ProductmodelRow.read) + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, ProductmodelRow], batchSize: Int = 10000): ConnectionIO[Int] = { + for { + _ <- sql"create temporary table productmodel_TEMP (like production.productmodel) on commit drop".update.run + _ <- new FragmentOps(sql"""copy productmodel_TEMP("productmodelid", "name", "catalogdescription", "instructions", "rowguid", "modifieddate") from stdin""").copyIn(unsaved, batchSize)(using ProductmodelRow.text) + res <- sql"""insert into production.productmodel("productmodelid", "name", "catalogdescription", "instructions", "rowguid", "modifieddate") + select * from productmodel_TEMP + on conflict ("productmodelid") + do update set + "name" = EXCLUDED."name", + "catalogdescription" = EXCLUDED."catalogdescription", + "instructions" = EXCLUDED."instructions", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table productmodel_TEMP;""".update.run + } yield res + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productmodel/ProductmodelRepoMock.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productmodel/ProductmodelRepoMock.scala index f27941dc84..ceba04f8fd 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productmodel/ProductmodelRepoMock.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productmodel/ProductmodelRepoMock.scala @@ -105,4 +105,23 @@ class ProductmodelRepoMock(toRow: Function1[ProductmodelRowUnsaved, Productmodel unsaved } } + override def upsertBatch(unsaved: List[ProductmodelRow]): Stream[ConnectionIO, ProductmodelRow] = { + Stream.emits { + unsaved.map { row => + map += (row.productmodelid -> row) + row + } + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, ProductmodelRow], batchSize: Int = 10000): ConnectionIO[Int] = { + unsaved.compile.toList.map { rows => + var num = 0 + rows.foreach { row => + map += (row.productmodelid -> row) + num += 1 + } + num + } + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productmodel/ProductmodelRow.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productmodel/ProductmodelRow.scala index 6efd3c5128..fcf9fc93e9 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productmodel/ProductmodelRow.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productmodel/ProductmodelRow.scala @@ -15,6 +15,7 @@ import adventureworks.public.Name import doobie.enumerated.Nullability import doobie.postgres.Text import doobie.util.Read +import doobie.util.Write import io.circe.Decoder import io.circe.Encoder import java.sql.ResultSet @@ -76,4 +77,29 @@ object ProductmodelRow { sb.append(Text.DELIMETER) TypoLocalDateTime.text.unsafeEncode(row.modifieddate, sb) } + implicit lazy val write: Write[ProductmodelRow] = new Write[ProductmodelRow]( + puts = List((ProductmodelId.put, Nullability.NoNulls), + (Name.put, Nullability.NoNulls), + (TypoXml.put, Nullability.Nullable), + (TypoXml.put, Nullability.Nullable), + (TypoUUID.put, Nullability.NoNulls), + (TypoLocalDateTime.put, Nullability.NoNulls)), + toList = x => List(x.productmodelid, x.name, x.catalogdescription, x.instructions, x.rowguid, x.modifieddate), + unsafeSet = (rs, i, a) => { + ProductmodelId.put.unsafeSetNonNullable(rs, i + 0, a.productmodelid) + Name.put.unsafeSetNonNullable(rs, i + 1, a.name) + TypoXml.put.unsafeSetNullable(rs, i + 2, a.catalogdescription) + TypoXml.put.unsafeSetNullable(rs, i + 3, a.instructions) + TypoUUID.put.unsafeSetNonNullable(rs, i + 4, a.rowguid) + TypoLocalDateTime.put.unsafeSetNonNullable(rs, i + 5, a.modifieddate) + }, + unsafeUpdate = (ps, i, a) => { + ProductmodelId.put.unsafeUpdateNonNullable(ps, i + 0, a.productmodelid) + Name.put.unsafeUpdateNonNullable(ps, i + 1, a.name) + TypoXml.put.unsafeUpdateNullable(ps, i + 2, a.catalogdescription) + TypoXml.put.unsafeUpdateNullable(ps, i + 3, a.instructions) + TypoUUID.put.unsafeUpdateNonNullable(ps, i + 4, a.rowguid) + TypoLocalDateTime.put.unsafeUpdateNonNullable(ps, i + 5, a.modifieddate) + } + ) } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productmodelillustration/ProductmodelillustrationRepo.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productmodelillustration/ProductmodelillustrationRepo.scala index eacb7fec25..0be204b673 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productmodelillustration/ProductmodelillustrationRepo.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productmodelillustration/ProductmodelillustrationRepo.scala @@ -30,4 +30,7 @@ trait ProductmodelillustrationRepo { def update: UpdateBuilder[ProductmodelillustrationFields, ProductmodelillustrationRow] def update(row: ProductmodelillustrationRow): ConnectionIO[Boolean] def upsert(unsaved: ProductmodelillustrationRow): ConnectionIO[ProductmodelillustrationRow] + def upsertBatch(unsaved: List[ProductmodelillustrationRow]): Stream[ConnectionIO, ProductmodelillustrationRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Stream[ConnectionIO, ProductmodelillustrationRow], batchSize: Int = 10000): ConnectionIO[Int] } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productmodelillustration/ProductmodelillustrationRepoImpl.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productmodelillustration/ProductmodelillustrationRepoImpl.scala index e70731460a..2a5343b67c 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productmodelillustration/ProductmodelillustrationRepoImpl.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productmodelillustration/ProductmodelillustrationRepoImpl.scala @@ -11,12 +11,14 @@ import adventureworks.customtypes.Defaulted import adventureworks.customtypes.TypoLocalDateTime import adventureworks.production.illustration.IllustrationId import adventureworks.production.productmodel.ProductmodelId +import cats.instances.list.catsStdInstancesForList import doobie.free.connection.ConnectionIO import doobie.postgres.syntax.FragmentOps import doobie.syntax.SqlInterpolator.SingleFragment.fromWrite import doobie.syntax.string.toSqlInterpolator import doobie.util.Write import doobie.util.fragment.Fragment +import doobie.util.update.Update import fs2.Stream import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder @@ -127,4 +129,29 @@ class ProductmodelillustrationRepoImpl extends ProductmodelillustrationRepo { returning "productmodelid", "illustrationid", "modifieddate"::text """.query(using ProductmodelillustrationRow.read).unique } + override def upsertBatch(unsaved: List[ProductmodelillustrationRow]): Stream[ConnectionIO, ProductmodelillustrationRow] = { + Update[ProductmodelillustrationRow]( + s"""insert into production.productmodelillustration("productmodelid", "illustrationid", "modifieddate") + values (?::int4,?::int4,?::timestamp) + on conflict ("productmodelid", "illustrationid") + do update set + "modifieddate" = EXCLUDED."modifieddate" + returning "productmodelid", "illustrationid", "modifieddate"::text""" + )(using ProductmodelillustrationRow.write) + .updateManyWithGeneratedKeys[ProductmodelillustrationRow]("productmodelid", "illustrationid", "modifieddate")(unsaved)(using catsStdInstancesForList, ProductmodelillustrationRow.read) + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, ProductmodelillustrationRow], batchSize: Int = 10000): ConnectionIO[Int] = { + for { + _ <- sql"create temporary table productmodelillustration_TEMP (like production.productmodelillustration) on commit drop".update.run + _ <- new FragmentOps(sql"""copy productmodelillustration_TEMP("productmodelid", "illustrationid", "modifieddate") from stdin""").copyIn(unsaved, batchSize)(using ProductmodelillustrationRow.text) + res <- sql"""insert into production.productmodelillustration("productmodelid", "illustrationid", "modifieddate") + select * from productmodelillustration_TEMP + on conflict ("productmodelid", "illustrationid") + do update set + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table productmodelillustration_TEMP;""".update.run + } yield res + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productmodelillustration/ProductmodelillustrationRepoMock.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productmodelillustration/ProductmodelillustrationRepoMock.scala index 4454e0bfd2..baafb37cc5 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productmodelillustration/ProductmodelillustrationRepoMock.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productmodelillustration/ProductmodelillustrationRepoMock.scala @@ -105,4 +105,23 @@ class ProductmodelillustrationRepoMock(toRow: Function1[Productmodelillustration unsaved } } + override def upsertBatch(unsaved: List[ProductmodelillustrationRow]): Stream[ConnectionIO, ProductmodelillustrationRow] = { + Stream.emits { + unsaved.map { row => + map += (row.compositeId -> row) + row + } + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, ProductmodelillustrationRow], batchSize: Int = 10000): ConnectionIO[Int] = { + unsaved.compile.toList.map { rows => + var num = 0 + rows.foreach { row => + map += (row.compositeId -> row) + num += 1 + } + num + } + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productmodelillustration/ProductmodelillustrationRow.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productmodelillustration/ProductmodelillustrationRow.scala index 882805de7a..01b9fc10f0 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productmodelillustration/ProductmodelillustrationRow.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productmodelillustration/ProductmodelillustrationRow.scala @@ -14,6 +14,7 @@ import adventureworks.production.productmodel.ProductmodelId import doobie.enumerated.Nullability import doobie.postgres.Text import doobie.util.Read +import doobie.util.Write import io.circe.Decoder import io.circe.Encoder import java.sql.ResultSet @@ -61,4 +62,20 @@ object ProductmodelillustrationRow { sb.append(Text.DELIMETER) TypoLocalDateTime.text.unsafeEncode(row.modifieddate, sb) } + implicit lazy val write: Write[ProductmodelillustrationRow] = new Write[ProductmodelillustrationRow]( + puts = List((ProductmodelId.put, Nullability.NoNulls), + (IllustrationId.put, Nullability.NoNulls), + (TypoLocalDateTime.put, Nullability.NoNulls)), + toList = x => List(x.productmodelid, x.illustrationid, x.modifieddate), + unsafeSet = (rs, i, a) => { + ProductmodelId.put.unsafeSetNonNullable(rs, i + 0, a.productmodelid) + IllustrationId.put.unsafeSetNonNullable(rs, i + 1, a.illustrationid) + TypoLocalDateTime.put.unsafeSetNonNullable(rs, i + 2, a.modifieddate) + }, + unsafeUpdate = (ps, i, a) => { + ProductmodelId.put.unsafeUpdateNonNullable(ps, i + 0, a.productmodelid) + IllustrationId.put.unsafeUpdateNonNullable(ps, i + 1, a.illustrationid) + TypoLocalDateTime.put.unsafeUpdateNonNullable(ps, i + 2, a.modifieddate) + } + ) } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productmodelproductdescriptionculture/ProductmodelproductdescriptioncultureRepo.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productmodelproductdescriptionculture/ProductmodelproductdescriptioncultureRepo.scala index 49c91cc708..2cb61f47fe 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productmodelproductdescriptionculture/ProductmodelproductdescriptioncultureRepo.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productmodelproductdescriptionculture/ProductmodelproductdescriptioncultureRepo.scala @@ -30,4 +30,7 @@ trait ProductmodelproductdescriptioncultureRepo { def update: UpdateBuilder[ProductmodelproductdescriptioncultureFields, ProductmodelproductdescriptioncultureRow] def update(row: ProductmodelproductdescriptioncultureRow): ConnectionIO[Boolean] def upsert(unsaved: ProductmodelproductdescriptioncultureRow): ConnectionIO[ProductmodelproductdescriptioncultureRow] + def upsertBatch(unsaved: List[ProductmodelproductdescriptioncultureRow]): Stream[ConnectionIO, ProductmodelproductdescriptioncultureRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Stream[ConnectionIO, ProductmodelproductdescriptioncultureRow], batchSize: Int = 10000): ConnectionIO[Int] } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productmodelproductdescriptionculture/ProductmodelproductdescriptioncultureRepoImpl.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productmodelproductdescriptionculture/ProductmodelproductdescriptioncultureRepoImpl.scala index 943dbfa152..0a44aaed42 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productmodelproductdescriptionculture/ProductmodelproductdescriptioncultureRepoImpl.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productmodelproductdescriptionculture/ProductmodelproductdescriptioncultureRepoImpl.scala @@ -12,12 +12,14 @@ import adventureworks.customtypes.TypoLocalDateTime import adventureworks.production.culture.CultureId import adventureworks.production.productdescription.ProductdescriptionId import adventureworks.production.productmodel.ProductmodelId +import cats.instances.list.catsStdInstancesForList import doobie.free.connection.ConnectionIO import doobie.postgres.syntax.FragmentOps import doobie.syntax.SqlInterpolator.SingleFragment.fromWrite import doobie.syntax.string.toSqlInterpolator import doobie.util.Write import doobie.util.fragment.Fragment +import doobie.util.update.Update import fs2.Stream import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder @@ -132,4 +134,29 @@ class ProductmodelproductdescriptioncultureRepoImpl extends Productmodelproductd returning "productmodelid", "productdescriptionid", "cultureid", "modifieddate"::text """.query(using ProductmodelproductdescriptioncultureRow.read).unique } + override def upsertBatch(unsaved: List[ProductmodelproductdescriptioncultureRow]): Stream[ConnectionIO, ProductmodelproductdescriptioncultureRow] = { + Update[ProductmodelproductdescriptioncultureRow]( + s"""insert into production.productmodelproductdescriptionculture("productmodelid", "productdescriptionid", "cultureid", "modifieddate") + values (?::int4,?::int4,?::bpchar,?::timestamp) + on conflict ("productmodelid", "productdescriptionid", "cultureid") + do update set + "modifieddate" = EXCLUDED."modifieddate" + returning "productmodelid", "productdescriptionid", "cultureid", "modifieddate"::text""" + )(using ProductmodelproductdescriptioncultureRow.write) + .updateManyWithGeneratedKeys[ProductmodelproductdescriptioncultureRow]("productmodelid", "productdescriptionid", "cultureid", "modifieddate")(unsaved)(using catsStdInstancesForList, ProductmodelproductdescriptioncultureRow.read) + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, ProductmodelproductdescriptioncultureRow], batchSize: Int = 10000): ConnectionIO[Int] = { + for { + _ <- sql"create temporary table productmodelproductdescriptionculture_TEMP (like production.productmodelproductdescriptionculture) on commit drop".update.run + _ <- new FragmentOps(sql"""copy productmodelproductdescriptionculture_TEMP("productmodelid", "productdescriptionid", "cultureid", "modifieddate") from stdin""").copyIn(unsaved, batchSize)(using ProductmodelproductdescriptioncultureRow.text) + res <- sql"""insert into production.productmodelproductdescriptionculture("productmodelid", "productdescriptionid", "cultureid", "modifieddate") + select * from productmodelproductdescriptionculture_TEMP + on conflict ("productmodelid", "productdescriptionid", "cultureid") + do update set + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table productmodelproductdescriptionculture_TEMP;""".update.run + } yield res + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productmodelproductdescriptionculture/ProductmodelproductdescriptioncultureRepoMock.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productmodelproductdescriptionculture/ProductmodelproductdescriptioncultureRepoMock.scala index 6774728e3e..81b9629538 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productmodelproductdescriptionculture/ProductmodelproductdescriptioncultureRepoMock.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productmodelproductdescriptionculture/ProductmodelproductdescriptioncultureRepoMock.scala @@ -105,4 +105,23 @@ class ProductmodelproductdescriptioncultureRepoMock(toRow: Function1[Productmode unsaved } } + override def upsertBatch(unsaved: List[ProductmodelproductdescriptioncultureRow]): Stream[ConnectionIO, ProductmodelproductdescriptioncultureRow] = { + Stream.emits { + unsaved.map { row => + map += (row.compositeId -> row) + row + } + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, ProductmodelproductdescriptioncultureRow], batchSize: Int = 10000): ConnectionIO[Int] = { + unsaved.compile.toList.map { rows => + var num = 0 + rows.foreach { row => + map += (row.compositeId -> row) + num += 1 + } + num + } + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productmodelproductdescriptionculture/ProductmodelproductdescriptioncultureRow.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productmodelproductdescriptionculture/ProductmodelproductdescriptioncultureRow.scala index 61ae5c5b9a..a8b9b86797 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productmodelproductdescriptionculture/ProductmodelproductdescriptioncultureRow.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productmodelproductdescriptionculture/ProductmodelproductdescriptioncultureRow.scala @@ -15,6 +15,7 @@ import adventureworks.production.productmodel.ProductmodelId import doobie.enumerated.Nullability import doobie.postgres.Text import doobie.util.Read +import doobie.util.Write import io.circe.Decoder import io.circe.Encoder import java.sql.ResultSet @@ -69,4 +70,23 @@ object ProductmodelproductdescriptioncultureRow { sb.append(Text.DELIMETER) TypoLocalDateTime.text.unsafeEncode(row.modifieddate, sb) } + implicit lazy val write: Write[ProductmodelproductdescriptioncultureRow] = new Write[ProductmodelproductdescriptioncultureRow]( + puts = List((ProductmodelId.put, Nullability.NoNulls), + (ProductdescriptionId.put, Nullability.NoNulls), + (CultureId.put, Nullability.NoNulls), + (TypoLocalDateTime.put, Nullability.NoNulls)), + toList = x => List(x.productmodelid, x.productdescriptionid, x.cultureid, x.modifieddate), + unsafeSet = (rs, i, a) => { + ProductmodelId.put.unsafeSetNonNullable(rs, i + 0, a.productmodelid) + ProductdescriptionId.put.unsafeSetNonNullable(rs, i + 1, a.productdescriptionid) + CultureId.put.unsafeSetNonNullable(rs, i + 2, a.cultureid) + TypoLocalDateTime.put.unsafeSetNonNullable(rs, i + 3, a.modifieddate) + }, + unsafeUpdate = (ps, i, a) => { + ProductmodelId.put.unsafeUpdateNonNullable(ps, i + 0, a.productmodelid) + ProductdescriptionId.put.unsafeUpdateNonNullable(ps, i + 1, a.productdescriptionid) + CultureId.put.unsafeUpdateNonNullable(ps, i + 2, a.cultureid) + TypoLocalDateTime.put.unsafeUpdateNonNullable(ps, i + 3, a.modifieddate) + } + ) } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productphoto/ProductphotoRepo.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productphoto/ProductphotoRepo.scala index eb2dd313d7..fc4939e053 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productphoto/ProductphotoRepo.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productphoto/ProductphotoRepo.scala @@ -30,4 +30,7 @@ trait ProductphotoRepo { def update: UpdateBuilder[ProductphotoFields, ProductphotoRow] def update(row: ProductphotoRow): ConnectionIO[Boolean] def upsert(unsaved: ProductphotoRow): ConnectionIO[ProductphotoRow] + def upsertBatch(unsaved: List[ProductphotoRow]): Stream[ConnectionIO, ProductphotoRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Stream[ConnectionIO, ProductphotoRow], batchSize: Int = 10000): ConnectionIO[Int] } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productphoto/ProductphotoRepoImpl.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productphoto/ProductphotoRepoImpl.scala index fb10e15b04..5e8a6fa6e1 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productphoto/ProductphotoRepoImpl.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productphoto/ProductphotoRepoImpl.scala @@ -10,6 +10,7 @@ package productphoto import adventureworks.customtypes.Defaulted import adventureworks.customtypes.TypoBytea import adventureworks.customtypes.TypoLocalDateTime +import cats.instances.list.catsStdInstancesForList import doobie.free.connection.ConnectionIO import doobie.postgres.syntax.FragmentOps import doobie.syntax.SqlInterpolator.SingleFragment.fromWrite @@ -17,6 +18,7 @@ import doobie.syntax.string.toSqlInterpolator import doobie.util.Write import doobie.util.fragment.Fragment import doobie.util.meta.Meta +import doobie.util.update.Update import fs2.Stream import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder @@ -130,4 +132,37 @@ class ProductphotoRepoImpl extends ProductphotoRepo { returning "productphotoid", "thumbnailphoto", "thumbnailphotofilename", "largephoto", "largephotofilename", "modifieddate"::text """.query(using ProductphotoRow.read).unique } + override def upsertBatch(unsaved: List[ProductphotoRow]): Stream[ConnectionIO, ProductphotoRow] = { + Update[ProductphotoRow]( + s"""insert into production.productphoto("productphotoid", "thumbnailphoto", "thumbnailphotofilename", "largephoto", "largephotofilename", "modifieddate") + values (?::int4,?::bytea,?,?::bytea,?,?::timestamp) + on conflict ("productphotoid") + do update set + "thumbnailphoto" = EXCLUDED."thumbnailphoto", + "thumbnailphotofilename" = EXCLUDED."thumbnailphotofilename", + "largephoto" = EXCLUDED."largephoto", + "largephotofilename" = EXCLUDED."largephotofilename", + "modifieddate" = EXCLUDED."modifieddate" + returning "productphotoid", "thumbnailphoto", "thumbnailphotofilename", "largephoto", "largephotofilename", "modifieddate"::text""" + )(using ProductphotoRow.write) + .updateManyWithGeneratedKeys[ProductphotoRow]("productphotoid", "thumbnailphoto", "thumbnailphotofilename", "largephoto", "largephotofilename", "modifieddate")(unsaved)(using catsStdInstancesForList, ProductphotoRow.read) + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, ProductphotoRow], batchSize: Int = 10000): ConnectionIO[Int] = { + for { + _ <- sql"create temporary table productphoto_TEMP (like production.productphoto) on commit drop".update.run + _ <- new FragmentOps(sql"""copy productphoto_TEMP("productphotoid", "thumbnailphoto", "thumbnailphotofilename", "largephoto", "largephotofilename", "modifieddate") from stdin""").copyIn(unsaved, batchSize)(using ProductphotoRow.text) + res <- sql"""insert into production.productphoto("productphotoid", "thumbnailphoto", "thumbnailphotofilename", "largephoto", "largephotofilename", "modifieddate") + select * from productphoto_TEMP + on conflict ("productphotoid") + do update set + "thumbnailphoto" = EXCLUDED."thumbnailphoto", + "thumbnailphotofilename" = EXCLUDED."thumbnailphotofilename", + "largephoto" = EXCLUDED."largephoto", + "largephotofilename" = EXCLUDED."largephotofilename", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table productphoto_TEMP;""".update.run + } yield res + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productphoto/ProductphotoRepoMock.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productphoto/ProductphotoRepoMock.scala index 5c1f278516..e055c79211 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productphoto/ProductphotoRepoMock.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productphoto/ProductphotoRepoMock.scala @@ -105,4 +105,23 @@ class ProductphotoRepoMock(toRow: Function1[ProductphotoRowUnsaved, Productphoto unsaved } } + override def upsertBatch(unsaved: List[ProductphotoRow]): Stream[ConnectionIO, ProductphotoRow] = { + Stream.emits { + unsaved.map { row => + map += (row.productphotoid -> row) + row + } + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, ProductphotoRow], batchSize: Int = 10000): ConnectionIO[Int] = { + unsaved.compile.toList.map { rows => + var num = 0 + rows.foreach { row => + map += (row.productphotoid -> row) + num += 1 + } + num + } + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productphoto/ProductphotoRow.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productphoto/ProductphotoRow.scala index f15fce4818..5b00c05042 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productphoto/ProductphotoRow.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productphoto/ProductphotoRow.scala @@ -13,6 +13,7 @@ import adventureworks.customtypes.TypoLocalDateTime import doobie.enumerated.Nullability import doobie.postgres.Text import doobie.util.Read +import doobie.util.Write import doobie.util.meta.Meta import io.circe.Decoder import io.circe.Encoder @@ -75,4 +76,29 @@ object ProductphotoRow { sb.append(Text.DELIMETER) TypoLocalDateTime.text.unsafeEncode(row.modifieddate, sb) } + implicit lazy val write: Write[ProductphotoRow] = new Write[ProductphotoRow]( + puts = List((ProductphotoId.put, Nullability.NoNulls), + (TypoBytea.put, Nullability.Nullable), + (Meta.StringMeta.put, Nullability.Nullable), + (TypoBytea.put, Nullability.Nullable), + (Meta.StringMeta.put, Nullability.Nullable), + (TypoLocalDateTime.put, Nullability.NoNulls)), + toList = x => List(x.productphotoid, x.thumbnailphoto, x.thumbnailphotofilename, x.largephoto, x.largephotofilename, x.modifieddate), + unsafeSet = (rs, i, a) => { + ProductphotoId.put.unsafeSetNonNullable(rs, i + 0, a.productphotoid) + TypoBytea.put.unsafeSetNullable(rs, i + 1, a.thumbnailphoto) + Meta.StringMeta.put.unsafeSetNullable(rs, i + 2, a.thumbnailphotofilename) + TypoBytea.put.unsafeSetNullable(rs, i + 3, a.largephoto) + Meta.StringMeta.put.unsafeSetNullable(rs, i + 4, a.largephotofilename) + TypoLocalDateTime.put.unsafeSetNonNullable(rs, i + 5, a.modifieddate) + }, + unsafeUpdate = (ps, i, a) => { + ProductphotoId.put.unsafeUpdateNonNullable(ps, i + 0, a.productphotoid) + TypoBytea.put.unsafeUpdateNullable(ps, i + 1, a.thumbnailphoto) + Meta.StringMeta.put.unsafeUpdateNullable(ps, i + 2, a.thumbnailphotofilename) + TypoBytea.put.unsafeUpdateNullable(ps, i + 3, a.largephoto) + Meta.StringMeta.put.unsafeUpdateNullable(ps, i + 4, a.largephotofilename) + TypoLocalDateTime.put.unsafeUpdateNonNullable(ps, i + 5, a.modifieddate) + } + ) } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productproductphoto/ProductproductphotoRepo.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productproductphoto/ProductproductphotoRepo.scala index b57a106ec4..8df69d7102 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productproductphoto/ProductproductphotoRepo.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productproductphoto/ProductproductphotoRepo.scala @@ -30,4 +30,7 @@ trait ProductproductphotoRepo { def update: UpdateBuilder[ProductproductphotoFields, ProductproductphotoRow] def update(row: ProductproductphotoRow): ConnectionIO[Boolean] def upsert(unsaved: ProductproductphotoRow): ConnectionIO[ProductproductphotoRow] + def upsertBatch(unsaved: List[ProductproductphotoRow]): Stream[ConnectionIO, ProductproductphotoRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Stream[ConnectionIO, ProductproductphotoRow], batchSize: Int = 10000): ConnectionIO[Int] } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productproductphoto/ProductproductphotoRepoImpl.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productproductphoto/ProductproductphotoRepoImpl.scala index 57a57afaf1..bd1e2db8f3 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productproductphoto/ProductproductphotoRepoImpl.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productproductphoto/ProductproductphotoRepoImpl.scala @@ -12,12 +12,14 @@ import adventureworks.customtypes.TypoLocalDateTime import adventureworks.production.product.ProductId import adventureworks.production.productphoto.ProductphotoId import adventureworks.public.Flag +import cats.instances.list.catsStdInstancesForList import doobie.free.connection.ConnectionIO import doobie.postgres.syntax.FragmentOps import doobie.syntax.SqlInterpolator.SingleFragment.fromWrite import doobie.syntax.string.toSqlInterpolator import doobie.util.Write import doobie.util.fragment.Fragment +import doobie.util.update.Update import fs2.Stream import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder @@ -135,4 +137,31 @@ class ProductproductphotoRepoImpl extends ProductproductphotoRepo { returning "productid", "productphotoid", "primary", "modifieddate"::text """.query(using ProductproductphotoRow.read).unique } + override def upsertBatch(unsaved: List[ProductproductphotoRow]): Stream[ConnectionIO, ProductproductphotoRow] = { + Update[ProductproductphotoRow]( + s"""insert into production.productproductphoto("productid", "productphotoid", "primary", "modifieddate") + values (?::int4,?::int4,?::bool,?::timestamp) + on conflict ("productid", "productphotoid") + do update set + "primary" = EXCLUDED."primary", + "modifieddate" = EXCLUDED."modifieddate" + returning "productid", "productphotoid", "primary", "modifieddate"::text""" + )(using ProductproductphotoRow.write) + .updateManyWithGeneratedKeys[ProductproductphotoRow]("productid", "productphotoid", "primary", "modifieddate")(unsaved)(using catsStdInstancesForList, ProductproductphotoRow.read) + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, ProductproductphotoRow], batchSize: Int = 10000): ConnectionIO[Int] = { + for { + _ <- sql"create temporary table productproductphoto_TEMP (like production.productproductphoto) on commit drop".update.run + _ <- new FragmentOps(sql"""copy productproductphoto_TEMP("productid", "productphotoid", "primary", "modifieddate") from stdin""").copyIn(unsaved, batchSize)(using ProductproductphotoRow.text) + res <- sql"""insert into production.productproductphoto("productid", "productphotoid", "primary", "modifieddate") + select * from productproductphoto_TEMP + on conflict ("productid", "productphotoid") + do update set + "primary" = EXCLUDED."primary", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table productproductphoto_TEMP;""".update.run + } yield res + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productproductphoto/ProductproductphotoRepoMock.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productproductphoto/ProductproductphotoRepoMock.scala index d16a4b9bb5..f383f4b2c6 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productproductphoto/ProductproductphotoRepoMock.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productproductphoto/ProductproductphotoRepoMock.scala @@ -105,4 +105,23 @@ class ProductproductphotoRepoMock(toRow: Function1[ProductproductphotoRowUnsaved unsaved } } + override def upsertBatch(unsaved: List[ProductproductphotoRow]): Stream[ConnectionIO, ProductproductphotoRow] = { + Stream.emits { + unsaved.map { row => + map += (row.compositeId -> row) + row + } + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, ProductproductphotoRow], batchSize: Int = 10000): ConnectionIO[Int] = { + unsaved.compile.toList.map { rows => + var num = 0 + rows.foreach { row => + map += (row.compositeId -> row) + num += 1 + } + num + } + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productproductphoto/ProductproductphotoRow.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productproductphoto/ProductproductphotoRow.scala index 682bb79a80..9d03a28f28 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productproductphoto/ProductproductphotoRow.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productproductphoto/ProductproductphotoRow.scala @@ -15,6 +15,7 @@ import adventureworks.public.Flag import doobie.enumerated.Nullability import doobie.postgres.Text import doobie.util.Read +import doobie.util.Write import io.circe.Decoder import io.circe.Encoder import java.sql.ResultSet @@ -69,4 +70,23 @@ object ProductproductphotoRow { sb.append(Text.DELIMETER) TypoLocalDateTime.text.unsafeEncode(row.modifieddate, sb) } + implicit lazy val write: Write[ProductproductphotoRow] = new Write[ProductproductphotoRow]( + puts = List((ProductId.put, Nullability.NoNulls), + (ProductphotoId.put, Nullability.NoNulls), + (Flag.put, Nullability.NoNulls), + (TypoLocalDateTime.put, Nullability.NoNulls)), + toList = x => List(x.productid, x.productphotoid, x.primary, x.modifieddate), + unsafeSet = (rs, i, a) => { + ProductId.put.unsafeSetNonNullable(rs, i + 0, a.productid) + ProductphotoId.put.unsafeSetNonNullable(rs, i + 1, a.productphotoid) + Flag.put.unsafeSetNonNullable(rs, i + 2, a.primary) + TypoLocalDateTime.put.unsafeSetNonNullable(rs, i + 3, a.modifieddate) + }, + unsafeUpdate = (ps, i, a) => { + ProductId.put.unsafeUpdateNonNullable(ps, i + 0, a.productid) + ProductphotoId.put.unsafeUpdateNonNullable(ps, i + 1, a.productphotoid) + Flag.put.unsafeUpdateNonNullable(ps, i + 2, a.primary) + TypoLocalDateTime.put.unsafeUpdateNonNullable(ps, i + 3, a.modifieddate) + } + ) } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productreview/ProductreviewRepo.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productreview/ProductreviewRepo.scala index 2fc839da12..6053523842 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productreview/ProductreviewRepo.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productreview/ProductreviewRepo.scala @@ -30,4 +30,7 @@ trait ProductreviewRepo { def update: UpdateBuilder[ProductreviewFields, ProductreviewRow] def update(row: ProductreviewRow): ConnectionIO[Boolean] def upsert(unsaved: ProductreviewRow): ConnectionIO[ProductreviewRow] + def upsertBatch(unsaved: List[ProductreviewRow]): Stream[ConnectionIO, ProductreviewRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Stream[ConnectionIO, ProductreviewRow], batchSize: Int = 10000): ConnectionIO[Int] } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productreview/ProductreviewRepoImpl.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productreview/ProductreviewRepoImpl.scala index 9bb72aa514..e773add324 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productreview/ProductreviewRepoImpl.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productreview/ProductreviewRepoImpl.scala @@ -11,6 +11,7 @@ import adventureworks.customtypes.Defaulted import adventureworks.customtypes.TypoLocalDateTime import adventureworks.production.product.ProductId import adventureworks.public.Name +import cats.instances.list.catsStdInstancesForList import doobie.free.connection.ConnectionIO import doobie.postgres.syntax.FragmentOps import doobie.syntax.SqlInterpolator.SingleFragment.fromWrite @@ -18,6 +19,7 @@ import doobie.syntax.string.toSqlInterpolator import doobie.util.Write import doobie.util.fragment.Fragment import doobie.util.meta.Meta +import doobie.util.update.Update import fs2.Stream import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder @@ -142,4 +144,41 @@ class ProductreviewRepoImpl extends ProductreviewRepo { returning "productreviewid", "productid", "reviewername", "reviewdate"::text, "emailaddress", "rating", "comments", "modifieddate"::text """.query(using ProductreviewRow.read).unique } + override def upsertBatch(unsaved: List[ProductreviewRow]): Stream[ConnectionIO, ProductreviewRow] = { + Update[ProductreviewRow]( + s"""insert into production.productreview("productreviewid", "productid", "reviewername", "reviewdate", "emailaddress", "rating", "comments", "modifieddate") + values (?::int4,?::int4,?::varchar,?::timestamp,?,?::int4,?,?::timestamp) + on conflict ("productreviewid") + do update set + "productid" = EXCLUDED."productid", + "reviewername" = EXCLUDED."reviewername", + "reviewdate" = EXCLUDED."reviewdate", + "emailaddress" = EXCLUDED."emailaddress", + "rating" = EXCLUDED."rating", + "comments" = EXCLUDED."comments", + "modifieddate" = EXCLUDED."modifieddate" + returning "productreviewid", "productid", "reviewername", "reviewdate"::text, "emailaddress", "rating", "comments", "modifieddate"::text""" + )(using ProductreviewRow.write) + .updateManyWithGeneratedKeys[ProductreviewRow]("productreviewid", "productid", "reviewername", "reviewdate", "emailaddress", "rating", "comments", "modifieddate")(unsaved)(using catsStdInstancesForList, ProductreviewRow.read) + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, ProductreviewRow], batchSize: Int = 10000): ConnectionIO[Int] = { + for { + _ <- sql"create temporary table productreview_TEMP (like production.productreview) on commit drop".update.run + _ <- new FragmentOps(sql"""copy productreview_TEMP("productreviewid", "productid", "reviewername", "reviewdate", "emailaddress", "rating", "comments", "modifieddate") from stdin""").copyIn(unsaved, batchSize)(using ProductreviewRow.text) + res <- sql"""insert into production.productreview("productreviewid", "productid", "reviewername", "reviewdate", "emailaddress", "rating", "comments", "modifieddate") + select * from productreview_TEMP + on conflict ("productreviewid") + do update set + "productid" = EXCLUDED."productid", + "reviewername" = EXCLUDED."reviewername", + "reviewdate" = EXCLUDED."reviewdate", + "emailaddress" = EXCLUDED."emailaddress", + "rating" = EXCLUDED."rating", + "comments" = EXCLUDED."comments", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table productreview_TEMP;""".update.run + } yield res + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productreview/ProductreviewRepoMock.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productreview/ProductreviewRepoMock.scala index 46a33caf89..54780e129f 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productreview/ProductreviewRepoMock.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productreview/ProductreviewRepoMock.scala @@ -105,4 +105,23 @@ class ProductreviewRepoMock(toRow: Function1[ProductreviewRowUnsaved, Productrev unsaved } } + override def upsertBatch(unsaved: List[ProductreviewRow]): Stream[ConnectionIO, ProductreviewRow] = { + Stream.emits { + unsaved.map { row => + map += (row.productreviewid -> row) + row + } + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, ProductreviewRow], batchSize: Int = 10000): ConnectionIO[Int] = { + unsaved.compile.toList.map { rows => + var num = 0 + rows.foreach { row => + map += (row.productreviewid -> row) + num += 1 + } + num + } + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productreview/ProductreviewRow.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productreview/ProductreviewRow.scala index 990385733c..0fb4a1a398 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productreview/ProductreviewRow.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productreview/ProductreviewRow.scala @@ -14,6 +14,7 @@ import adventureworks.public.Name import doobie.enumerated.Nullability import doobie.postgres.Text import doobie.util.Read +import doobie.util.Write import doobie.util.meta.Meta import io.circe.Decoder import io.circe.Encoder @@ -91,4 +92,35 @@ object ProductreviewRow { sb.append(Text.DELIMETER) TypoLocalDateTime.text.unsafeEncode(row.modifieddate, sb) } + implicit lazy val write: Write[ProductreviewRow] = new Write[ProductreviewRow]( + puts = List((ProductreviewId.put, Nullability.NoNulls), + (ProductId.put, Nullability.NoNulls), + (Name.put, Nullability.NoNulls), + (TypoLocalDateTime.put, Nullability.NoNulls), + (Meta.StringMeta.put, Nullability.NoNulls), + (Meta.IntMeta.put, Nullability.NoNulls), + (Meta.StringMeta.put, Nullability.Nullable), + (TypoLocalDateTime.put, Nullability.NoNulls)), + toList = x => List(x.productreviewid, x.productid, x.reviewername, x.reviewdate, x.emailaddress, x.rating, x.comments, x.modifieddate), + unsafeSet = (rs, i, a) => { + ProductreviewId.put.unsafeSetNonNullable(rs, i + 0, a.productreviewid) + ProductId.put.unsafeSetNonNullable(rs, i + 1, a.productid) + Name.put.unsafeSetNonNullable(rs, i + 2, a.reviewername) + TypoLocalDateTime.put.unsafeSetNonNullable(rs, i + 3, a.reviewdate) + Meta.StringMeta.put.unsafeSetNonNullable(rs, i + 4, a.emailaddress) + Meta.IntMeta.put.unsafeSetNonNullable(rs, i + 5, a.rating) + Meta.StringMeta.put.unsafeSetNullable(rs, i + 6, a.comments) + TypoLocalDateTime.put.unsafeSetNonNullable(rs, i + 7, a.modifieddate) + }, + unsafeUpdate = (ps, i, a) => { + ProductreviewId.put.unsafeUpdateNonNullable(ps, i + 0, a.productreviewid) + ProductId.put.unsafeUpdateNonNullable(ps, i + 1, a.productid) + Name.put.unsafeUpdateNonNullable(ps, i + 2, a.reviewername) + TypoLocalDateTime.put.unsafeUpdateNonNullable(ps, i + 3, a.reviewdate) + Meta.StringMeta.put.unsafeUpdateNonNullable(ps, i + 4, a.emailaddress) + Meta.IntMeta.put.unsafeUpdateNonNullable(ps, i + 5, a.rating) + Meta.StringMeta.put.unsafeUpdateNullable(ps, i + 6, a.comments) + TypoLocalDateTime.put.unsafeUpdateNonNullable(ps, i + 7, a.modifieddate) + } + ) } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productsubcategory/ProductsubcategoryRepo.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productsubcategory/ProductsubcategoryRepo.scala index c5381c27ab..4227314ffd 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productsubcategory/ProductsubcategoryRepo.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productsubcategory/ProductsubcategoryRepo.scala @@ -30,4 +30,7 @@ trait ProductsubcategoryRepo { def update: UpdateBuilder[ProductsubcategoryFields, ProductsubcategoryRow] def update(row: ProductsubcategoryRow): ConnectionIO[Boolean] def upsert(unsaved: ProductsubcategoryRow): ConnectionIO[ProductsubcategoryRow] + def upsertBatch(unsaved: List[ProductsubcategoryRow]): Stream[ConnectionIO, ProductsubcategoryRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Stream[ConnectionIO, ProductsubcategoryRow], batchSize: Int = 10000): ConnectionIO[Int] } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productsubcategory/ProductsubcategoryRepoImpl.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productsubcategory/ProductsubcategoryRepoImpl.scala index 895d5a4b96..d54dc753a4 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productsubcategory/ProductsubcategoryRepoImpl.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productsubcategory/ProductsubcategoryRepoImpl.scala @@ -12,12 +12,14 @@ import adventureworks.customtypes.TypoLocalDateTime import adventureworks.customtypes.TypoUUID import adventureworks.production.productcategory.ProductcategoryId import adventureworks.public.Name +import cats.instances.list.catsStdInstancesForList import doobie.free.connection.ConnectionIO import doobie.postgres.syntax.FragmentOps import doobie.syntax.SqlInterpolator.SingleFragment.fromWrite import doobie.syntax.string.toSqlInterpolator import doobie.util.Write import doobie.util.fragment.Fragment +import doobie.util.update.Update import fs2.Stream import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder @@ -130,4 +132,35 @@ class ProductsubcategoryRepoImpl extends ProductsubcategoryRepo { returning "productsubcategoryid", "productcategoryid", "name", "rowguid", "modifieddate"::text """.query(using ProductsubcategoryRow.read).unique } + override def upsertBatch(unsaved: List[ProductsubcategoryRow]): Stream[ConnectionIO, ProductsubcategoryRow] = { + Update[ProductsubcategoryRow]( + s"""insert into production.productsubcategory("productsubcategoryid", "productcategoryid", "name", "rowguid", "modifieddate") + values (?::int4,?::int4,?::varchar,?::uuid,?::timestamp) + on conflict ("productsubcategoryid") + do update set + "productcategoryid" = EXCLUDED."productcategoryid", + "name" = EXCLUDED."name", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + returning "productsubcategoryid", "productcategoryid", "name", "rowguid", "modifieddate"::text""" + )(using ProductsubcategoryRow.write) + .updateManyWithGeneratedKeys[ProductsubcategoryRow]("productsubcategoryid", "productcategoryid", "name", "rowguid", "modifieddate")(unsaved)(using catsStdInstancesForList, ProductsubcategoryRow.read) + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, ProductsubcategoryRow], batchSize: Int = 10000): ConnectionIO[Int] = { + for { + _ <- sql"create temporary table productsubcategory_TEMP (like production.productsubcategory) on commit drop".update.run + _ <- new FragmentOps(sql"""copy productsubcategory_TEMP("productsubcategoryid", "productcategoryid", "name", "rowguid", "modifieddate") from stdin""").copyIn(unsaved, batchSize)(using ProductsubcategoryRow.text) + res <- sql"""insert into production.productsubcategory("productsubcategoryid", "productcategoryid", "name", "rowguid", "modifieddate") + select * from productsubcategory_TEMP + on conflict ("productsubcategoryid") + do update set + "productcategoryid" = EXCLUDED."productcategoryid", + "name" = EXCLUDED."name", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table productsubcategory_TEMP;""".update.run + } yield res + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productsubcategory/ProductsubcategoryRepoMock.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productsubcategory/ProductsubcategoryRepoMock.scala index d3bf0c6055..2718ff9f09 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productsubcategory/ProductsubcategoryRepoMock.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productsubcategory/ProductsubcategoryRepoMock.scala @@ -105,4 +105,23 @@ class ProductsubcategoryRepoMock(toRow: Function1[ProductsubcategoryRowUnsaved, unsaved } } + override def upsertBatch(unsaved: List[ProductsubcategoryRow]): Stream[ConnectionIO, ProductsubcategoryRow] = { + Stream.emits { + unsaved.map { row => + map += (row.productsubcategoryid -> row) + row + } + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, ProductsubcategoryRow], batchSize: Int = 10000): ConnectionIO[Int] = { + unsaved.compile.toList.map { rows => + var num = 0 + rows.foreach { row => + map += (row.productsubcategoryid -> row) + num += 1 + } + num + } + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productsubcategory/ProductsubcategoryRow.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productsubcategory/ProductsubcategoryRow.scala index 73c98d42ff..ad09819527 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productsubcategory/ProductsubcategoryRow.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/productsubcategory/ProductsubcategoryRow.scala @@ -15,6 +15,7 @@ import adventureworks.public.Name import doobie.enumerated.Nullability import doobie.postgres.Text import doobie.util.Read +import doobie.util.Write import io.circe.Decoder import io.circe.Encoder import java.sql.ResultSet @@ -71,4 +72,26 @@ object ProductsubcategoryRow { sb.append(Text.DELIMETER) TypoLocalDateTime.text.unsafeEncode(row.modifieddate, sb) } + implicit lazy val write: Write[ProductsubcategoryRow] = new Write[ProductsubcategoryRow]( + puts = List((ProductsubcategoryId.put, Nullability.NoNulls), + (ProductcategoryId.put, Nullability.NoNulls), + (Name.put, Nullability.NoNulls), + (TypoUUID.put, Nullability.NoNulls), + (TypoLocalDateTime.put, Nullability.NoNulls)), + toList = x => List(x.productsubcategoryid, x.productcategoryid, x.name, x.rowguid, x.modifieddate), + unsafeSet = (rs, i, a) => { + ProductsubcategoryId.put.unsafeSetNonNullable(rs, i + 0, a.productsubcategoryid) + ProductcategoryId.put.unsafeSetNonNullable(rs, i + 1, a.productcategoryid) + Name.put.unsafeSetNonNullable(rs, i + 2, a.name) + TypoUUID.put.unsafeSetNonNullable(rs, i + 3, a.rowguid) + TypoLocalDateTime.put.unsafeSetNonNullable(rs, i + 4, a.modifieddate) + }, + unsafeUpdate = (ps, i, a) => { + ProductsubcategoryId.put.unsafeUpdateNonNullable(ps, i + 0, a.productsubcategoryid) + ProductcategoryId.put.unsafeUpdateNonNullable(ps, i + 1, a.productcategoryid) + Name.put.unsafeUpdateNonNullable(ps, i + 2, a.name) + TypoUUID.put.unsafeUpdateNonNullable(ps, i + 3, a.rowguid) + TypoLocalDateTime.put.unsafeUpdateNonNullable(ps, i + 4, a.modifieddate) + } + ) } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/scrapreason/ScrapreasonRepo.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/scrapreason/ScrapreasonRepo.scala index 63e63855f0..b9589f1f7f 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/scrapreason/ScrapreasonRepo.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/scrapreason/ScrapreasonRepo.scala @@ -30,4 +30,7 @@ trait ScrapreasonRepo { def update: UpdateBuilder[ScrapreasonFields, ScrapreasonRow] def update(row: ScrapreasonRow): ConnectionIO[Boolean] def upsert(unsaved: ScrapreasonRow): ConnectionIO[ScrapreasonRow] + def upsertBatch(unsaved: List[ScrapreasonRow]): Stream[ConnectionIO, ScrapreasonRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Stream[ConnectionIO, ScrapreasonRow], batchSize: Int = 10000): ConnectionIO[Int] } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/scrapreason/ScrapreasonRepoImpl.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/scrapreason/ScrapreasonRepoImpl.scala index 08dd8d1f57..23015215e6 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/scrapreason/ScrapreasonRepoImpl.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/scrapreason/ScrapreasonRepoImpl.scala @@ -10,12 +10,14 @@ package scrapreason import adventureworks.customtypes.Defaulted import adventureworks.customtypes.TypoLocalDateTime import adventureworks.public.Name +import cats.instances.list.catsStdInstancesForList import doobie.free.connection.ConnectionIO import doobie.postgres.syntax.FragmentOps import doobie.syntax.SqlInterpolator.SingleFragment.fromWrite import doobie.syntax.string.toSqlInterpolator import doobie.util.Write import doobie.util.fragment.Fragment +import doobie.util.update.Update import fs2.Stream import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder @@ -117,4 +119,31 @@ class ScrapreasonRepoImpl extends ScrapreasonRepo { returning "scrapreasonid", "name", "modifieddate"::text """.query(using ScrapreasonRow.read).unique } + override def upsertBatch(unsaved: List[ScrapreasonRow]): Stream[ConnectionIO, ScrapreasonRow] = { + Update[ScrapreasonRow]( + s"""insert into production.scrapreason("scrapreasonid", "name", "modifieddate") + values (?::int4,?::varchar,?::timestamp) + on conflict ("scrapreasonid") + do update set + "name" = EXCLUDED."name", + "modifieddate" = EXCLUDED."modifieddate" + returning "scrapreasonid", "name", "modifieddate"::text""" + )(using ScrapreasonRow.write) + .updateManyWithGeneratedKeys[ScrapreasonRow]("scrapreasonid", "name", "modifieddate")(unsaved)(using catsStdInstancesForList, ScrapreasonRow.read) + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, ScrapreasonRow], batchSize: Int = 10000): ConnectionIO[Int] = { + for { + _ <- sql"create temporary table scrapreason_TEMP (like production.scrapreason) on commit drop".update.run + _ <- new FragmentOps(sql"""copy scrapreason_TEMP("scrapreasonid", "name", "modifieddate") from stdin""").copyIn(unsaved, batchSize)(using ScrapreasonRow.text) + res <- sql"""insert into production.scrapreason("scrapreasonid", "name", "modifieddate") + select * from scrapreason_TEMP + on conflict ("scrapreasonid") + do update set + "name" = EXCLUDED."name", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table scrapreason_TEMP;""".update.run + } yield res + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/scrapreason/ScrapreasonRepoMock.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/scrapreason/ScrapreasonRepoMock.scala index 327f41aae4..3576e6fe0a 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/scrapreason/ScrapreasonRepoMock.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/scrapreason/ScrapreasonRepoMock.scala @@ -105,4 +105,23 @@ class ScrapreasonRepoMock(toRow: Function1[ScrapreasonRowUnsaved, ScrapreasonRow unsaved } } + override def upsertBatch(unsaved: List[ScrapreasonRow]): Stream[ConnectionIO, ScrapreasonRow] = { + Stream.emits { + unsaved.map { row => + map += (row.scrapreasonid -> row) + row + } + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, ScrapreasonRow], batchSize: Int = 10000): ConnectionIO[Int] = { + unsaved.compile.toList.map { rows => + var num = 0 + rows.foreach { row => + map += (row.scrapreasonid -> row) + num += 1 + } + num + } + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/scrapreason/ScrapreasonRow.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/scrapreason/ScrapreasonRow.scala index bed5313007..f8af862fc9 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/scrapreason/ScrapreasonRow.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/scrapreason/ScrapreasonRow.scala @@ -13,6 +13,7 @@ import adventureworks.public.Name import doobie.enumerated.Nullability import doobie.postgres.Text import doobie.util.Read +import doobie.util.Write import io.circe.Decoder import io.circe.Encoder import java.sql.ResultSet @@ -56,4 +57,20 @@ object ScrapreasonRow { sb.append(Text.DELIMETER) TypoLocalDateTime.text.unsafeEncode(row.modifieddate, sb) } + implicit lazy val write: Write[ScrapreasonRow] = new Write[ScrapreasonRow]( + puts = List((ScrapreasonId.put, Nullability.NoNulls), + (Name.put, Nullability.NoNulls), + (TypoLocalDateTime.put, Nullability.NoNulls)), + toList = x => List(x.scrapreasonid, x.name, x.modifieddate), + unsafeSet = (rs, i, a) => { + ScrapreasonId.put.unsafeSetNonNullable(rs, i + 0, a.scrapreasonid) + Name.put.unsafeSetNonNullable(rs, i + 1, a.name) + TypoLocalDateTime.put.unsafeSetNonNullable(rs, i + 2, a.modifieddate) + }, + unsafeUpdate = (ps, i, a) => { + ScrapreasonId.put.unsafeUpdateNonNullable(ps, i + 0, a.scrapreasonid) + Name.put.unsafeUpdateNonNullable(ps, i + 1, a.name) + TypoLocalDateTime.put.unsafeUpdateNonNullable(ps, i + 2, a.modifieddate) + } + ) } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/transactionhistory/TransactionhistoryRepo.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/transactionhistory/TransactionhistoryRepo.scala index 1a2dd68217..22c9f5fd7a 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/transactionhistory/TransactionhistoryRepo.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/transactionhistory/TransactionhistoryRepo.scala @@ -30,4 +30,7 @@ trait TransactionhistoryRepo { def update: UpdateBuilder[TransactionhistoryFields, TransactionhistoryRow] def update(row: TransactionhistoryRow): ConnectionIO[Boolean] def upsert(unsaved: TransactionhistoryRow): ConnectionIO[TransactionhistoryRow] + def upsertBatch(unsaved: List[TransactionhistoryRow]): Stream[ConnectionIO, TransactionhistoryRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Stream[ConnectionIO, TransactionhistoryRow], batchSize: Int = 10000): ConnectionIO[Int] } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/transactionhistory/TransactionhistoryRepoImpl.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/transactionhistory/TransactionhistoryRepoImpl.scala index 83252db021..ac1291791e 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/transactionhistory/TransactionhistoryRepoImpl.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/transactionhistory/TransactionhistoryRepoImpl.scala @@ -10,6 +10,7 @@ package transactionhistory import adventureworks.customtypes.Defaulted import adventureworks.customtypes.TypoLocalDateTime import adventureworks.production.product.ProductId +import cats.instances.list.catsStdInstancesForList import doobie.free.connection.ConnectionIO import doobie.postgres.syntax.FragmentOps import doobie.syntax.SqlInterpolator.SingleFragment.fromWrite @@ -17,6 +18,7 @@ import doobie.syntax.string.toSqlInterpolator import doobie.util.Write import doobie.util.fragment.Fragment import doobie.util.meta.Meta +import doobie.util.update.Update import fs2.Stream import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder @@ -148,4 +150,43 @@ class TransactionhistoryRepoImpl extends TransactionhistoryRepo { returning "transactionid", "productid", "referenceorderid", "referenceorderlineid", "transactiondate"::text, "transactiontype", "quantity", "actualcost", "modifieddate"::text """.query(using TransactionhistoryRow.read).unique } + override def upsertBatch(unsaved: List[TransactionhistoryRow]): Stream[ConnectionIO, TransactionhistoryRow] = { + Update[TransactionhistoryRow]( + s"""insert into production.transactionhistory("transactionid", "productid", "referenceorderid", "referenceorderlineid", "transactiondate", "transactiontype", "quantity", "actualcost", "modifieddate") + values (?::int4,?::int4,?::int4,?::int4,?::timestamp,?::bpchar,?::int4,?::numeric,?::timestamp) + on conflict ("transactionid") + do update set + "productid" = EXCLUDED."productid", + "referenceorderid" = EXCLUDED."referenceorderid", + "referenceorderlineid" = EXCLUDED."referenceorderlineid", + "transactiondate" = EXCLUDED."transactiondate", + "transactiontype" = EXCLUDED."transactiontype", + "quantity" = EXCLUDED."quantity", + "actualcost" = EXCLUDED."actualcost", + "modifieddate" = EXCLUDED."modifieddate" + returning "transactionid", "productid", "referenceorderid", "referenceorderlineid", "transactiondate"::text, "transactiontype", "quantity", "actualcost", "modifieddate"::text""" + )(using TransactionhistoryRow.write) + .updateManyWithGeneratedKeys[TransactionhistoryRow]("transactionid", "productid", "referenceorderid", "referenceorderlineid", "transactiondate", "transactiontype", "quantity", "actualcost", "modifieddate")(unsaved)(using catsStdInstancesForList, TransactionhistoryRow.read) + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, TransactionhistoryRow], batchSize: Int = 10000): ConnectionIO[Int] = { + for { + _ <- sql"create temporary table transactionhistory_TEMP (like production.transactionhistory) on commit drop".update.run + _ <- new FragmentOps(sql"""copy transactionhistory_TEMP("transactionid", "productid", "referenceorderid", "referenceorderlineid", "transactiondate", "transactiontype", "quantity", "actualcost", "modifieddate") from stdin""").copyIn(unsaved, batchSize)(using TransactionhistoryRow.text) + res <- sql"""insert into production.transactionhistory("transactionid", "productid", "referenceorderid", "referenceorderlineid", "transactiondate", "transactiontype", "quantity", "actualcost", "modifieddate") + select * from transactionhistory_TEMP + on conflict ("transactionid") + do update set + "productid" = EXCLUDED."productid", + "referenceorderid" = EXCLUDED."referenceorderid", + "referenceorderlineid" = EXCLUDED."referenceorderlineid", + "transactiondate" = EXCLUDED."transactiondate", + "transactiontype" = EXCLUDED."transactiontype", + "quantity" = EXCLUDED."quantity", + "actualcost" = EXCLUDED."actualcost", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table transactionhistory_TEMP;""".update.run + } yield res + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/transactionhistory/TransactionhistoryRepoMock.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/transactionhistory/TransactionhistoryRepoMock.scala index ed72c28afa..7e426383e1 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/transactionhistory/TransactionhistoryRepoMock.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/transactionhistory/TransactionhistoryRepoMock.scala @@ -105,4 +105,23 @@ class TransactionhistoryRepoMock(toRow: Function1[TransactionhistoryRowUnsaved, unsaved } } + override def upsertBatch(unsaved: List[TransactionhistoryRow]): Stream[ConnectionIO, TransactionhistoryRow] = { + Stream.emits { + unsaved.map { row => + map += (row.transactionid -> row) + row + } + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, TransactionhistoryRow], batchSize: Int = 10000): ConnectionIO[Int] = { + unsaved.compile.toList.map { rows => + var num = 0 + rows.foreach { row => + map += (row.transactionid -> row) + num += 1 + } + num + } + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/transactionhistory/TransactionhistoryRow.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/transactionhistory/TransactionhistoryRow.scala index c33f249610..6fab4ee6bc 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/transactionhistory/TransactionhistoryRow.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/transactionhistory/TransactionhistoryRow.scala @@ -13,6 +13,7 @@ import adventureworks.production.product.ProductId import doobie.enumerated.Nullability import doobie.postgres.Text import doobie.util.Read +import doobie.util.Write import doobie.util.meta.Meta import io.circe.Decoder import io.circe.Encoder @@ -97,4 +98,38 @@ object TransactionhistoryRow { sb.append(Text.DELIMETER) TypoLocalDateTime.text.unsafeEncode(row.modifieddate, sb) } + implicit lazy val write: Write[TransactionhistoryRow] = new Write[TransactionhistoryRow]( + puts = List((TransactionhistoryId.put, Nullability.NoNulls), + (ProductId.put, Nullability.NoNulls), + (Meta.IntMeta.put, Nullability.NoNulls), + (Meta.IntMeta.put, Nullability.NoNulls), + (TypoLocalDateTime.put, Nullability.NoNulls), + (Meta.StringMeta.put, Nullability.NoNulls), + (Meta.IntMeta.put, Nullability.NoNulls), + (Meta.ScalaBigDecimalMeta.put, Nullability.NoNulls), + (TypoLocalDateTime.put, Nullability.NoNulls)), + toList = x => List(x.transactionid, x.productid, x.referenceorderid, x.referenceorderlineid, x.transactiondate, x.transactiontype, x.quantity, x.actualcost, x.modifieddate), + unsafeSet = (rs, i, a) => { + TransactionhistoryId.put.unsafeSetNonNullable(rs, i + 0, a.transactionid) + ProductId.put.unsafeSetNonNullable(rs, i + 1, a.productid) + Meta.IntMeta.put.unsafeSetNonNullable(rs, i + 2, a.referenceorderid) + Meta.IntMeta.put.unsafeSetNonNullable(rs, i + 3, a.referenceorderlineid) + TypoLocalDateTime.put.unsafeSetNonNullable(rs, i + 4, a.transactiondate) + Meta.StringMeta.put.unsafeSetNonNullable(rs, i + 5, a.transactiontype) + Meta.IntMeta.put.unsafeSetNonNullable(rs, i + 6, a.quantity) + Meta.ScalaBigDecimalMeta.put.unsafeSetNonNullable(rs, i + 7, a.actualcost) + TypoLocalDateTime.put.unsafeSetNonNullable(rs, i + 8, a.modifieddate) + }, + unsafeUpdate = (ps, i, a) => { + TransactionhistoryId.put.unsafeUpdateNonNullable(ps, i + 0, a.transactionid) + ProductId.put.unsafeUpdateNonNullable(ps, i + 1, a.productid) + Meta.IntMeta.put.unsafeUpdateNonNullable(ps, i + 2, a.referenceorderid) + Meta.IntMeta.put.unsafeUpdateNonNullable(ps, i + 3, a.referenceorderlineid) + TypoLocalDateTime.put.unsafeUpdateNonNullable(ps, i + 4, a.transactiondate) + Meta.StringMeta.put.unsafeUpdateNonNullable(ps, i + 5, a.transactiontype) + Meta.IntMeta.put.unsafeUpdateNonNullable(ps, i + 6, a.quantity) + Meta.ScalaBigDecimalMeta.put.unsafeUpdateNonNullable(ps, i + 7, a.actualcost) + TypoLocalDateTime.put.unsafeUpdateNonNullable(ps, i + 8, a.modifieddate) + } + ) } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/transactionhistoryarchive/TransactionhistoryarchiveRepo.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/transactionhistoryarchive/TransactionhistoryarchiveRepo.scala index 13eac4daa4..fdb69f9ef6 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/transactionhistoryarchive/TransactionhistoryarchiveRepo.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/transactionhistoryarchive/TransactionhistoryarchiveRepo.scala @@ -30,4 +30,7 @@ trait TransactionhistoryarchiveRepo { def update: UpdateBuilder[TransactionhistoryarchiveFields, TransactionhistoryarchiveRow] def update(row: TransactionhistoryarchiveRow): ConnectionIO[Boolean] def upsert(unsaved: TransactionhistoryarchiveRow): ConnectionIO[TransactionhistoryarchiveRow] + def upsertBatch(unsaved: List[TransactionhistoryarchiveRow]): Stream[ConnectionIO, TransactionhistoryarchiveRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Stream[ConnectionIO, TransactionhistoryarchiveRow], batchSize: Int = 10000): ConnectionIO[Int] } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/transactionhistoryarchive/TransactionhistoryarchiveRepoImpl.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/transactionhistoryarchive/TransactionhistoryarchiveRepoImpl.scala index c79c17c4f2..5b67db2161 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/transactionhistoryarchive/TransactionhistoryarchiveRepoImpl.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/transactionhistoryarchive/TransactionhistoryarchiveRepoImpl.scala @@ -9,6 +9,7 @@ package transactionhistoryarchive import adventureworks.customtypes.Defaulted import adventureworks.customtypes.TypoLocalDateTime +import cats.instances.list.catsStdInstancesForList import doobie.free.connection.ConnectionIO import doobie.postgres.syntax.FragmentOps import doobie.syntax.SqlInterpolator.SingleFragment.fromWrite @@ -16,6 +17,7 @@ import doobie.syntax.string.toSqlInterpolator import doobie.util.Write import doobie.util.fragment.Fragment import doobie.util.meta.Meta +import doobie.util.update.Update import fs2.Stream import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder @@ -144,4 +146,43 @@ class TransactionhistoryarchiveRepoImpl extends TransactionhistoryarchiveRepo { returning "transactionid", "productid", "referenceorderid", "referenceorderlineid", "transactiondate"::text, "transactiontype", "quantity", "actualcost", "modifieddate"::text """.query(using TransactionhistoryarchiveRow.read).unique } + override def upsertBatch(unsaved: List[TransactionhistoryarchiveRow]): Stream[ConnectionIO, TransactionhistoryarchiveRow] = { + Update[TransactionhistoryarchiveRow]( + s"""insert into production.transactionhistoryarchive("transactionid", "productid", "referenceorderid", "referenceorderlineid", "transactiondate", "transactiontype", "quantity", "actualcost", "modifieddate") + values (?::int4,?::int4,?::int4,?::int4,?::timestamp,?::bpchar,?::int4,?::numeric,?::timestamp) + on conflict ("transactionid") + do update set + "productid" = EXCLUDED."productid", + "referenceorderid" = EXCLUDED."referenceorderid", + "referenceorderlineid" = EXCLUDED."referenceorderlineid", + "transactiondate" = EXCLUDED."transactiondate", + "transactiontype" = EXCLUDED."transactiontype", + "quantity" = EXCLUDED."quantity", + "actualcost" = EXCLUDED."actualcost", + "modifieddate" = EXCLUDED."modifieddate" + returning "transactionid", "productid", "referenceorderid", "referenceorderlineid", "transactiondate"::text, "transactiontype", "quantity", "actualcost", "modifieddate"::text""" + )(using TransactionhistoryarchiveRow.write) + .updateManyWithGeneratedKeys[TransactionhistoryarchiveRow]("transactionid", "productid", "referenceorderid", "referenceorderlineid", "transactiondate", "transactiontype", "quantity", "actualcost", "modifieddate")(unsaved)(using catsStdInstancesForList, TransactionhistoryarchiveRow.read) + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, TransactionhistoryarchiveRow], batchSize: Int = 10000): ConnectionIO[Int] = { + for { + _ <- sql"create temporary table transactionhistoryarchive_TEMP (like production.transactionhistoryarchive) on commit drop".update.run + _ <- new FragmentOps(sql"""copy transactionhistoryarchive_TEMP("transactionid", "productid", "referenceorderid", "referenceorderlineid", "transactiondate", "transactiontype", "quantity", "actualcost", "modifieddate") from stdin""").copyIn(unsaved, batchSize)(using TransactionhistoryarchiveRow.text) + res <- sql"""insert into production.transactionhistoryarchive("transactionid", "productid", "referenceorderid", "referenceorderlineid", "transactiondate", "transactiontype", "quantity", "actualcost", "modifieddate") + select * from transactionhistoryarchive_TEMP + on conflict ("transactionid") + do update set + "productid" = EXCLUDED."productid", + "referenceorderid" = EXCLUDED."referenceorderid", + "referenceorderlineid" = EXCLUDED."referenceorderlineid", + "transactiondate" = EXCLUDED."transactiondate", + "transactiontype" = EXCLUDED."transactiontype", + "quantity" = EXCLUDED."quantity", + "actualcost" = EXCLUDED."actualcost", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table transactionhistoryarchive_TEMP;""".update.run + } yield res + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/transactionhistoryarchive/TransactionhistoryarchiveRepoMock.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/transactionhistoryarchive/TransactionhistoryarchiveRepoMock.scala index 3114db3487..fda35a872b 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/transactionhistoryarchive/TransactionhistoryarchiveRepoMock.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/transactionhistoryarchive/TransactionhistoryarchiveRepoMock.scala @@ -105,4 +105,23 @@ class TransactionhistoryarchiveRepoMock(toRow: Function1[Transactionhistoryarchi unsaved } } + override def upsertBatch(unsaved: List[TransactionhistoryarchiveRow]): Stream[ConnectionIO, TransactionhistoryarchiveRow] = { + Stream.emits { + unsaved.map { row => + map += (row.transactionid -> row) + row + } + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, TransactionhistoryarchiveRow], batchSize: Int = 10000): ConnectionIO[Int] = { + unsaved.compile.toList.map { rows => + var num = 0 + rows.foreach { row => + map += (row.transactionid -> row) + num += 1 + } + num + } + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/transactionhistoryarchive/TransactionhistoryarchiveRow.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/transactionhistoryarchive/TransactionhistoryarchiveRow.scala index a468b213a5..8c7c8a74cf 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/transactionhistoryarchive/TransactionhistoryarchiveRow.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/transactionhistoryarchive/TransactionhistoryarchiveRow.scala @@ -12,6 +12,7 @@ import adventureworks.customtypes.TypoLocalDateTime import doobie.enumerated.Nullability import doobie.postgres.Text import doobie.util.Read +import doobie.util.Write import doobie.util.meta.Meta import io.circe.Decoder import io.circe.Encoder @@ -94,4 +95,38 @@ object TransactionhistoryarchiveRow { sb.append(Text.DELIMETER) TypoLocalDateTime.text.unsafeEncode(row.modifieddate, sb) } + implicit lazy val write: Write[TransactionhistoryarchiveRow] = new Write[TransactionhistoryarchiveRow]( + puts = List((TransactionhistoryarchiveId.put, Nullability.NoNulls), + (Meta.IntMeta.put, Nullability.NoNulls), + (Meta.IntMeta.put, Nullability.NoNulls), + (Meta.IntMeta.put, Nullability.NoNulls), + (TypoLocalDateTime.put, Nullability.NoNulls), + (Meta.StringMeta.put, Nullability.NoNulls), + (Meta.IntMeta.put, Nullability.NoNulls), + (Meta.ScalaBigDecimalMeta.put, Nullability.NoNulls), + (TypoLocalDateTime.put, Nullability.NoNulls)), + toList = x => List(x.transactionid, x.productid, x.referenceorderid, x.referenceorderlineid, x.transactiondate, x.transactiontype, x.quantity, x.actualcost, x.modifieddate), + unsafeSet = (rs, i, a) => { + TransactionhistoryarchiveId.put.unsafeSetNonNullable(rs, i + 0, a.transactionid) + Meta.IntMeta.put.unsafeSetNonNullable(rs, i + 1, a.productid) + Meta.IntMeta.put.unsafeSetNonNullable(rs, i + 2, a.referenceorderid) + Meta.IntMeta.put.unsafeSetNonNullable(rs, i + 3, a.referenceorderlineid) + TypoLocalDateTime.put.unsafeSetNonNullable(rs, i + 4, a.transactiondate) + Meta.StringMeta.put.unsafeSetNonNullable(rs, i + 5, a.transactiontype) + Meta.IntMeta.put.unsafeSetNonNullable(rs, i + 6, a.quantity) + Meta.ScalaBigDecimalMeta.put.unsafeSetNonNullable(rs, i + 7, a.actualcost) + TypoLocalDateTime.put.unsafeSetNonNullable(rs, i + 8, a.modifieddate) + }, + unsafeUpdate = (ps, i, a) => { + TransactionhistoryarchiveId.put.unsafeUpdateNonNullable(ps, i + 0, a.transactionid) + Meta.IntMeta.put.unsafeUpdateNonNullable(ps, i + 1, a.productid) + Meta.IntMeta.put.unsafeUpdateNonNullable(ps, i + 2, a.referenceorderid) + Meta.IntMeta.put.unsafeUpdateNonNullable(ps, i + 3, a.referenceorderlineid) + TypoLocalDateTime.put.unsafeUpdateNonNullable(ps, i + 4, a.transactiondate) + Meta.StringMeta.put.unsafeUpdateNonNullable(ps, i + 5, a.transactiontype) + Meta.IntMeta.put.unsafeUpdateNonNullable(ps, i + 6, a.quantity) + Meta.ScalaBigDecimalMeta.put.unsafeUpdateNonNullable(ps, i + 7, a.actualcost) + TypoLocalDateTime.put.unsafeUpdateNonNullable(ps, i + 8, a.modifieddate) + } + ) } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/unitmeasure/UnitmeasureRepo.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/unitmeasure/UnitmeasureRepo.scala index fc98c1f282..01c49bbe2d 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/unitmeasure/UnitmeasureRepo.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/unitmeasure/UnitmeasureRepo.scala @@ -30,4 +30,7 @@ trait UnitmeasureRepo { def update: UpdateBuilder[UnitmeasureFields, UnitmeasureRow] def update(row: UnitmeasureRow): ConnectionIO[Boolean] def upsert(unsaved: UnitmeasureRow): ConnectionIO[UnitmeasureRow] + def upsertBatch(unsaved: List[UnitmeasureRow]): Stream[ConnectionIO, UnitmeasureRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Stream[ConnectionIO, UnitmeasureRow], batchSize: Int = 10000): ConnectionIO[Int] } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/unitmeasure/UnitmeasureRepoImpl.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/unitmeasure/UnitmeasureRepoImpl.scala index 4ac62d7c78..92f8198c23 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/unitmeasure/UnitmeasureRepoImpl.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/unitmeasure/UnitmeasureRepoImpl.scala @@ -10,12 +10,14 @@ package unitmeasure import adventureworks.customtypes.Defaulted import adventureworks.customtypes.TypoLocalDateTime import adventureworks.public.Name +import cats.instances.list.catsStdInstancesForList import doobie.free.connection.ConnectionIO import doobie.postgres.syntax.FragmentOps import doobie.syntax.SqlInterpolator.SingleFragment.fromWrite import doobie.syntax.string.toSqlInterpolator import doobie.util.Write import doobie.util.fragment.Fragment +import doobie.util.update.Update import fs2.Stream import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder @@ -114,4 +116,31 @@ class UnitmeasureRepoImpl extends UnitmeasureRepo { returning "unitmeasurecode", "name", "modifieddate"::text """.query(using UnitmeasureRow.read).unique } + override def upsertBatch(unsaved: List[UnitmeasureRow]): Stream[ConnectionIO, UnitmeasureRow] = { + Update[UnitmeasureRow]( + s"""insert into production.unitmeasure("unitmeasurecode", "name", "modifieddate") + values (?::bpchar,?::varchar,?::timestamp) + on conflict ("unitmeasurecode") + do update set + "name" = EXCLUDED."name", + "modifieddate" = EXCLUDED."modifieddate" + returning "unitmeasurecode", "name", "modifieddate"::text""" + )(using UnitmeasureRow.write) + .updateManyWithGeneratedKeys[UnitmeasureRow]("unitmeasurecode", "name", "modifieddate")(unsaved)(using catsStdInstancesForList, UnitmeasureRow.read) + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, UnitmeasureRow], batchSize: Int = 10000): ConnectionIO[Int] = { + for { + _ <- sql"create temporary table unitmeasure_TEMP (like production.unitmeasure) on commit drop".update.run + _ <- new FragmentOps(sql"""copy unitmeasure_TEMP("unitmeasurecode", "name", "modifieddate") from stdin""").copyIn(unsaved, batchSize)(using UnitmeasureRow.text) + res <- sql"""insert into production.unitmeasure("unitmeasurecode", "name", "modifieddate") + select * from unitmeasure_TEMP + on conflict ("unitmeasurecode") + do update set + "name" = EXCLUDED."name", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table unitmeasure_TEMP;""".update.run + } yield res + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/unitmeasure/UnitmeasureRepoMock.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/unitmeasure/UnitmeasureRepoMock.scala index 9d6ff9e629..6bbf563842 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/unitmeasure/UnitmeasureRepoMock.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/unitmeasure/UnitmeasureRepoMock.scala @@ -105,4 +105,23 @@ class UnitmeasureRepoMock(toRow: Function1[UnitmeasureRowUnsaved, UnitmeasureRow unsaved } } + override def upsertBatch(unsaved: List[UnitmeasureRow]): Stream[ConnectionIO, UnitmeasureRow] = { + Stream.emits { + unsaved.map { row => + map += (row.unitmeasurecode -> row) + row + } + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, UnitmeasureRow], batchSize: Int = 10000): ConnectionIO[Int] = { + unsaved.compile.toList.map { rows => + var num = 0 + rows.foreach { row => + map += (row.unitmeasurecode -> row) + num += 1 + } + num + } + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/unitmeasure/UnitmeasureRow.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/unitmeasure/UnitmeasureRow.scala index 334faa1a85..34a631e618 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/unitmeasure/UnitmeasureRow.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/unitmeasure/UnitmeasureRow.scala @@ -13,6 +13,7 @@ import adventureworks.public.Name import doobie.enumerated.Nullability import doobie.postgres.Text import doobie.util.Read +import doobie.util.Write import io.circe.Decoder import io.circe.Encoder import java.sql.ResultSet @@ -55,4 +56,20 @@ object UnitmeasureRow { sb.append(Text.DELIMETER) TypoLocalDateTime.text.unsafeEncode(row.modifieddate, sb) } + implicit lazy val write: Write[UnitmeasureRow] = new Write[UnitmeasureRow]( + puts = List((UnitmeasureId.put, Nullability.NoNulls), + (Name.put, Nullability.NoNulls), + (TypoLocalDateTime.put, Nullability.NoNulls)), + toList = x => List(x.unitmeasurecode, x.name, x.modifieddate), + unsafeSet = (rs, i, a) => { + UnitmeasureId.put.unsafeSetNonNullable(rs, i + 0, a.unitmeasurecode) + Name.put.unsafeSetNonNullable(rs, i + 1, a.name) + TypoLocalDateTime.put.unsafeSetNonNullable(rs, i + 2, a.modifieddate) + }, + unsafeUpdate = (ps, i, a) => { + UnitmeasureId.put.unsafeUpdateNonNullable(ps, i + 0, a.unitmeasurecode) + Name.put.unsafeUpdateNonNullable(ps, i + 1, a.name) + TypoLocalDateTime.put.unsafeUpdateNonNullable(ps, i + 2, a.modifieddate) + } + ) } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/workorder/WorkorderRepo.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/workorder/WorkorderRepo.scala index 1a92eb4ba9..ac6b353b05 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/workorder/WorkorderRepo.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/workorder/WorkorderRepo.scala @@ -30,4 +30,7 @@ trait WorkorderRepo { def update: UpdateBuilder[WorkorderFields, WorkorderRow] def update(row: WorkorderRow): ConnectionIO[Boolean] def upsert(unsaved: WorkorderRow): ConnectionIO[WorkorderRow] + def upsertBatch(unsaved: List[WorkorderRow]): Stream[ConnectionIO, WorkorderRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Stream[ConnectionIO, WorkorderRow], batchSize: Int = 10000): ConnectionIO[Int] } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/workorder/WorkorderRepoImpl.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/workorder/WorkorderRepoImpl.scala index 81dd795484..0dc8211fc0 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/workorder/WorkorderRepoImpl.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/workorder/WorkorderRepoImpl.scala @@ -12,6 +12,7 @@ import adventureworks.customtypes.TypoLocalDateTime import adventureworks.customtypes.TypoShort import adventureworks.production.product.ProductId import adventureworks.production.scrapreason.ScrapreasonId +import cats.instances.list.catsStdInstancesForList import doobie.free.connection.ConnectionIO import doobie.postgres.syntax.FragmentOps import doobie.syntax.SqlInterpolator.SingleFragment.fromWrite @@ -19,6 +20,7 @@ import doobie.syntax.string.toSqlInterpolator import doobie.util.Write import doobie.util.fragment.Fragment import doobie.util.meta.Meta +import doobie.util.update.Update import fs2.Stream import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder @@ -144,4 +146,43 @@ class WorkorderRepoImpl extends WorkorderRepo { returning "workorderid", "productid", "orderqty", "scrappedqty", "startdate"::text, "enddate"::text, "duedate"::text, "scrapreasonid", "modifieddate"::text """.query(using WorkorderRow.read).unique } + override def upsertBatch(unsaved: List[WorkorderRow]): Stream[ConnectionIO, WorkorderRow] = { + Update[WorkorderRow]( + s"""insert into production.workorder("workorderid", "productid", "orderqty", "scrappedqty", "startdate", "enddate", "duedate", "scrapreasonid", "modifieddate") + values (?::int4,?::int4,?::int4,?::int2,?::timestamp,?::timestamp,?::timestamp,?::int2,?::timestamp) + on conflict ("workorderid") + do update set + "productid" = EXCLUDED."productid", + "orderqty" = EXCLUDED."orderqty", + "scrappedqty" = EXCLUDED."scrappedqty", + "startdate" = EXCLUDED."startdate", + "enddate" = EXCLUDED."enddate", + "duedate" = EXCLUDED."duedate", + "scrapreasonid" = EXCLUDED."scrapreasonid", + "modifieddate" = EXCLUDED."modifieddate" + returning "workorderid", "productid", "orderqty", "scrappedqty", "startdate"::text, "enddate"::text, "duedate"::text, "scrapreasonid", "modifieddate"::text""" + )(using WorkorderRow.write) + .updateManyWithGeneratedKeys[WorkorderRow]("workorderid", "productid", "orderqty", "scrappedqty", "startdate", "enddate", "duedate", "scrapreasonid", "modifieddate")(unsaved)(using catsStdInstancesForList, WorkorderRow.read) + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, WorkorderRow], batchSize: Int = 10000): ConnectionIO[Int] = { + for { + _ <- sql"create temporary table workorder_TEMP (like production.workorder) on commit drop".update.run + _ <- new FragmentOps(sql"""copy workorder_TEMP("workorderid", "productid", "orderqty", "scrappedqty", "startdate", "enddate", "duedate", "scrapreasonid", "modifieddate") from stdin""").copyIn(unsaved, batchSize)(using WorkorderRow.text) + res <- sql"""insert into production.workorder("workorderid", "productid", "orderqty", "scrappedqty", "startdate", "enddate", "duedate", "scrapreasonid", "modifieddate") + select * from workorder_TEMP + on conflict ("workorderid") + do update set + "productid" = EXCLUDED."productid", + "orderqty" = EXCLUDED."orderqty", + "scrappedqty" = EXCLUDED."scrappedqty", + "startdate" = EXCLUDED."startdate", + "enddate" = EXCLUDED."enddate", + "duedate" = EXCLUDED."duedate", + "scrapreasonid" = EXCLUDED."scrapreasonid", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table workorder_TEMP;""".update.run + } yield res + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/workorder/WorkorderRepoMock.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/workorder/WorkorderRepoMock.scala index 98318108e5..1f085b9786 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/workorder/WorkorderRepoMock.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/workorder/WorkorderRepoMock.scala @@ -105,4 +105,23 @@ class WorkorderRepoMock(toRow: Function1[WorkorderRowUnsaved, WorkorderRow], unsaved } } + override def upsertBatch(unsaved: List[WorkorderRow]): Stream[ConnectionIO, WorkorderRow] = { + Stream.emits { + unsaved.map { row => + map += (row.workorderid -> row) + row + } + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, WorkorderRow], batchSize: Int = 10000): ConnectionIO[Int] = { + unsaved.compile.toList.map { rows => + var num = 0 + rows.foreach { row => + map += (row.workorderid -> row) + num += 1 + } + num + } + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/workorder/WorkorderRow.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/workorder/WorkorderRow.scala index 8507457113..f57d5b8d94 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/workorder/WorkorderRow.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/workorder/WorkorderRow.scala @@ -15,6 +15,7 @@ import adventureworks.production.scrapreason.ScrapreasonId import doobie.enumerated.Nullability import doobie.postgres.Text import doobie.util.Read +import doobie.util.Write import doobie.util.meta.Meta import io.circe.Decoder import io.circe.Encoder @@ -101,4 +102,38 @@ object WorkorderRow { sb.append(Text.DELIMETER) TypoLocalDateTime.text.unsafeEncode(row.modifieddate, sb) } + implicit lazy val write: Write[WorkorderRow] = new Write[WorkorderRow]( + puts = List((WorkorderId.put, Nullability.NoNulls), + (ProductId.put, Nullability.NoNulls), + (Meta.IntMeta.put, Nullability.NoNulls), + (TypoShort.put, Nullability.NoNulls), + (TypoLocalDateTime.put, Nullability.NoNulls), + (TypoLocalDateTime.put, Nullability.Nullable), + (TypoLocalDateTime.put, Nullability.NoNulls), + (ScrapreasonId.put, Nullability.Nullable), + (TypoLocalDateTime.put, Nullability.NoNulls)), + toList = x => List(x.workorderid, x.productid, x.orderqty, x.scrappedqty, x.startdate, x.enddate, x.duedate, x.scrapreasonid, x.modifieddate), + unsafeSet = (rs, i, a) => { + WorkorderId.put.unsafeSetNonNullable(rs, i + 0, a.workorderid) + ProductId.put.unsafeSetNonNullable(rs, i + 1, a.productid) + Meta.IntMeta.put.unsafeSetNonNullable(rs, i + 2, a.orderqty) + TypoShort.put.unsafeSetNonNullable(rs, i + 3, a.scrappedqty) + TypoLocalDateTime.put.unsafeSetNonNullable(rs, i + 4, a.startdate) + TypoLocalDateTime.put.unsafeSetNullable(rs, i + 5, a.enddate) + TypoLocalDateTime.put.unsafeSetNonNullable(rs, i + 6, a.duedate) + ScrapreasonId.put.unsafeSetNullable(rs, i + 7, a.scrapreasonid) + TypoLocalDateTime.put.unsafeSetNonNullable(rs, i + 8, a.modifieddate) + }, + unsafeUpdate = (ps, i, a) => { + WorkorderId.put.unsafeUpdateNonNullable(ps, i + 0, a.workorderid) + ProductId.put.unsafeUpdateNonNullable(ps, i + 1, a.productid) + Meta.IntMeta.put.unsafeUpdateNonNullable(ps, i + 2, a.orderqty) + TypoShort.put.unsafeUpdateNonNullable(ps, i + 3, a.scrappedqty) + TypoLocalDateTime.put.unsafeUpdateNonNullable(ps, i + 4, a.startdate) + TypoLocalDateTime.put.unsafeUpdateNullable(ps, i + 5, a.enddate) + TypoLocalDateTime.put.unsafeUpdateNonNullable(ps, i + 6, a.duedate) + ScrapreasonId.put.unsafeUpdateNullable(ps, i + 7, a.scrapreasonid) + TypoLocalDateTime.put.unsafeUpdateNonNullable(ps, i + 8, a.modifieddate) + } + ) } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/workorderrouting/WorkorderroutingRepo.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/workorderrouting/WorkorderroutingRepo.scala index 57e3d06fbf..8d736904cd 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/workorderrouting/WorkorderroutingRepo.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/workorderrouting/WorkorderroutingRepo.scala @@ -30,4 +30,7 @@ trait WorkorderroutingRepo { def update: UpdateBuilder[WorkorderroutingFields, WorkorderroutingRow] def update(row: WorkorderroutingRow): ConnectionIO[Boolean] def upsert(unsaved: WorkorderroutingRow): ConnectionIO[WorkorderroutingRow] + def upsertBatch(unsaved: List[WorkorderroutingRow]): Stream[ConnectionIO, WorkorderroutingRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Stream[ConnectionIO, WorkorderroutingRow], batchSize: Int = 10000): ConnectionIO[Int] } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/workorderrouting/WorkorderroutingRepoImpl.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/workorderrouting/WorkorderroutingRepoImpl.scala index 938e836f6d..42abe87fd3 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/workorderrouting/WorkorderroutingRepoImpl.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/workorderrouting/WorkorderroutingRepoImpl.scala @@ -12,6 +12,7 @@ import adventureworks.customtypes.TypoLocalDateTime import adventureworks.customtypes.TypoShort import adventureworks.production.location.LocationId import adventureworks.production.workorder.WorkorderId +import cats.instances.list.catsStdInstancesForList import doobie.free.connection.ConnectionIO import doobie.postgres.syntax.FragmentOps import doobie.syntax.SqlInterpolator.SingleFragment.fromWrite @@ -19,6 +20,7 @@ import doobie.syntax.string.toSqlInterpolator import doobie.util.Write import doobie.util.fragment.Fragment import doobie.util.meta.Meta +import doobie.util.update.Update import fs2.Stream import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder @@ -165,4 +167,45 @@ class WorkorderroutingRepoImpl extends WorkorderroutingRepo { returning "workorderid", "productid", "operationsequence", "locationid", "scheduledstartdate"::text, "scheduledenddate"::text, "actualstartdate"::text, "actualenddate"::text, "actualresourcehrs", "plannedcost", "actualcost", "modifieddate"::text """.query(using WorkorderroutingRow.read).unique } + override def upsertBatch(unsaved: List[WorkorderroutingRow]): Stream[ConnectionIO, WorkorderroutingRow] = { + Update[WorkorderroutingRow]( + s"""insert into production.workorderrouting("workorderid", "productid", "operationsequence", "locationid", "scheduledstartdate", "scheduledenddate", "actualstartdate", "actualenddate", "actualresourcehrs", "plannedcost", "actualcost", "modifieddate") + values (?::int4,?::int4,?::int2,?::int2,?::timestamp,?::timestamp,?::timestamp,?::timestamp,?::numeric,?::numeric,?::numeric,?::timestamp) + on conflict ("workorderid", "productid", "operationsequence") + do update set + "locationid" = EXCLUDED."locationid", + "scheduledstartdate" = EXCLUDED."scheduledstartdate", + "scheduledenddate" = EXCLUDED."scheduledenddate", + "actualstartdate" = EXCLUDED."actualstartdate", + "actualenddate" = EXCLUDED."actualenddate", + "actualresourcehrs" = EXCLUDED."actualresourcehrs", + "plannedcost" = EXCLUDED."plannedcost", + "actualcost" = EXCLUDED."actualcost", + "modifieddate" = EXCLUDED."modifieddate" + returning "workorderid", "productid", "operationsequence", "locationid", "scheduledstartdate"::text, "scheduledenddate"::text, "actualstartdate"::text, "actualenddate"::text, "actualresourcehrs", "plannedcost", "actualcost", "modifieddate"::text""" + )(using WorkorderroutingRow.write) + .updateManyWithGeneratedKeys[WorkorderroutingRow]("workorderid", "productid", "operationsequence", "locationid", "scheduledstartdate", "scheduledenddate", "actualstartdate", "actualenddate", "actualresourcehrs", "plannedcost", "actualcost", "modifieddate")(unsaved)(using catsStdInstancesForList, WorkorderroutingRow.read) + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, WorkorderroutingRow], batchSize: Int = 10000): ConnectionIO[Int] = { + for { + _ <- sql"create temporary table workorderrouting_TEMP (like production.workorderrouting) on commit drop".update.run + _ <- new FragmentOps(sql"""copy workorderrouting_TEMP("workorderid", "productid", "operationsequence", "locationid", "scheduledstartdate", "scheduledenddate", "actualstartdate", "actualenddate", "actualresourcehrs", "plannedcost", "actualcost", "modifieddate") from stdin""").copyIn(unsaved, batchSize)(using WorkorderroutingRow.text) + res <- sql"""insert into production.workorderrouting("workorderid", "productid", "operationsequence", "locationid", "scheduledstartdate", "scheduledenddate", "actualstartdate", "actualenddate", "actualresourcehrs", "plannedcost", "actualcost", "modifieddate") + select * from workorderrouting_TEMP + on conflict ("workorderid", "productid", "operationsequence") + do update set + "locationid" = EXCLUDED."locationid", + "scheduledstartdate" = EXCLUDED."scheduledstartdate", + "scheduledenddate" = EXCLUDED."scheduledenddate", + "actualstartdate" = EXCLUDED."actualstartdate", + "actualenddate" = EXCLUDED."actualenddate", + "actualresourcehrs" = EXCLUDED."actualresourcehrs", + "plannedcost" = EXCLUDED."plannedcost", + "actualcost" = EXCLUDED."actualcost", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table workorderrouting_TEMP;""".update.run + } yield res + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/workorderrouting/WorkorderroutingRepoMock.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/workorderrouting/WorkorderroutingRepoMock.scala index f695135c50..cadb2c7c86 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/workorderrouting/WorkorderroutingRepoMock.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/workorderrouting/WorkorderroutingRepoMock.scala @@ -105,4 +105,23 @@ class WorkorderroutingRepoMock(toRow: Function1[WorkorderroutingRowUnsaved, Work unsaved } } + override def upsertBatch(unsaved: List[WorkorderroutingRow]): Stream[ConnectionIO, WorkorderroutingRow] = { + Stream.emits { + unsaved.map { row => + map += (row.compositeId -> row) + row + } + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, WorkorderroutingRow], batchSize: Int = 10000): ConnectionIO[Int] = { + unsaved.compile.toList.map { rows => + var num = 0 + rows.foreach { row => + map += (row.compositeId -> row) + num += 1 + } + num + } + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/workorderrouting/WorkorderroutingRow.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/workorderrouting/WorkorderroutingRow.scala index 10de940063..0e3dc46783 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/production/workorderrouting/WorkorderroutingRow.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/production/workorderrouting/WorkorderroutingRow.scala @@ -15,6 +15,7 @@ import adventureworks.production.workorder.WorkorderId import doobie.enumerated.Nullability import doobie.postgres.Text import doobie.util.Read +import doobie.util.Write import doobie.util.meta.Meta import io.circe.Decoder import io.circe.Encoder @@ -124,4 +125,47 @@ object WorkorderroutingRow { sb.append(Text.DELIMETER) TypoLocalDateTime.text.unsafeEncode(row.modifieddate, sb) } + implicit lazy val write: Write[WorkorderroutingRow] = new Write[WorkorderroutingRow]( + puts = List((WorkorderId.put, Nullability.NoNulls), + (Meta.IntMeta.put, Nullability.NoNulls), + (TypoShort.put, Nullability.NoNulls), + (LocationId.put, Nullability.NoNulls), + (TypoLocalDateTime.put, Nullability.NoNulls), + (TypoLocalDateTime.put, Nullability.NoNulls), + (TypoLocalDateTime.put, Nullability.Nullable), + (TypoLocalDateTime.put, Nullability.Nullable), + (Meta.ScalaBigDecimalMeta.put, Nullability.Nullable), + (Meta.ScalaBigDecimalMeta.put, Nullability.NoNulls), + (Meta.ScalaBigDecimalMeta.put, Nullability.Nullable), + (TypoLocalDateTime.put, Nullability.NoNulls)), + toList = x => List(x.workorderid, x.productid, x.operationsequence, x.locationid, x.scheduledstartdate, x.scheduledenddate, x.actualstartdate, x.actualenddate, x.actualresourcehrs, x.plannedcost, x.actualcost, x.modifieddate), + unsafeSet = (rs, i, a) => { + WorkorderId.put.unsafeSetNonNullable(rs, i + 0, a.workorderid) + Meta.IntMeta.put.unsafeSetNonNullable(rs, i + 1, a.productid) + TypoShort.put.unsafeSetNonNullable(rs, i + 2, a.operationsequence) + LocationId.put.unsafeSetNonNullable(rs, i + 3, a.locationid) + TypoLocalDateTime.put.unsafeSetNonNullable(rs, i + 4, a.scheduledstartdate) + TypoLocalDateTime.put.unsafeSetNonNullable(rs, i + 5, a.scheduledenddate) + TypoLocalDateTime.put.unsafeSetNullable(rs, i + 6, a.actualstartdate) + TypoLocalDateTime.put.unsafeSetNullable(rs, i + 7, a.actualenddate) + Meta.ScalaBigDecimalMeta.put.unsafeSetNullable(rs, i + 8, a.actualresourcehrs) + Meta.ScalaBigDecimalMeta.put.unsafeSetNonNullable(rs, i + 9, a.plannedcost) + Meta.ScalaBigDecimalMeta.put.unsafeSetNullable(rs, i + 10, a.actualcost) + TypoLocalDateTime.put.unsafeSetNonNullable(rs, i + 11, a.modifieddate) + }, + unsafeUpdate = (ps, i, a) => { + WorkorderId.put.unsafeUpdateNonNullable(ps, i + 0, a.workorderid) + Meta.IntMeta.put.unsafeUpdateNonNullable(ps, i + 1, a.productid) + TypoShort.put.unsafeUpdateNonNullable(ps, i + 2, a.operationsequence) + LocationId.put.unsafeUpdateNonNullable(ps, i + 3, a.locationid) + TypoLocalDateTime.put.unsafeUpdateNonNullable(ps, i + 4, a.scheduledstartdate) + TypoLocalDateTime.put.unsafeUpdateNonNullable(ps, i + 5, a.scheduledenddate) + TypoLocalDateTime.put.unsafeUpdateNullable(ps, i + 6, a.actualstartdate) + TypoLocalDateTime.put.unsafeUpdateNullable(ps, i + 7, a.actualenddate) + Meta.ScalaBigDecimalMeta.put.unsafeUpdateNullable(ps, i + 8, a.actualresourcehrs) + Meta.ScalaBigDecimalMeta.put.unsafeUpdateNonNullable(ps, i + 9, a.plannedcost) + Meta.ScalaBigDecimalMeta.put.unsafeUpdateNullable(ps, i + 10, a.actualcost) + TypoLocalDateTime.put.unsafeUpdateNonNullable(ps, i + 11, a.modifieddate) + } + ) } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/public/flaff/FlaffRepo.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/public/flaff/FlaffRepo.scala index 2f8a4a5a82..9689ef735e 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/public/flaff/FlaffRepo.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/public/flaff/FlaffRepo.scala @@ -27,4 +27,7 @@ trait FlaffRepo { def update: UpdateBuilder[FlaffFields, FlaffRow] def update(row: FlaffRow): ConnectionIO[Boolean] def upsert(unsaved: FlaffRow): ConnectionIO[FlaffRow] + def upsertBatch(unsaved: List[FlaffRow]): Stream[ConnectionIO, FlaffRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Stream[ConnectionIO, FlaffRow], batchSize: Int = 10000): ConnectionIO[Int] } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/public/flaff/FlaffRepoImpl.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/public/flaff/FlaffRepoImpl.scala index 83b99eb18b..7e9ab7153b 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/public/flaff/FlaffRepoImpl.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/public/flaff/FlaffRepoImpl.scala @@ -7,12 +7,14 @@ package adventureworks package public package flaff +import cats.instances.list.catsStdInstancesForList import doobie.free.connection.ConnectionIO import doobie.postgres.syntax.FragmentOps import doobie.syntax.SqlInterpolator.SingleFragment.fromWrite import doobie.syntax.string.toSqlInterpolator import doobie.util.Write import doobie.util.meta.Meta +import doobie.util.update.Update import fs2.Stream import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder @@ -101,4 +103,29 @@ class FlaffRepoImpl extends FlaffRepo { returning "code", "another_code", "some_number", "specifier", "parentspecifier" """.query(using FlaffRow.read).unique } + override def upsertBatch(unsaved: List[FlaffRow]): Stream[ConnectionIO, FlaffRow] = { + Update[FlaffRow]( + s"""insert into public.flaff("code", "another_code", "some_number", "specifier", "parentspecifier") + values (?::text,?,?::int4,?::text,?::text) + on conflict ("code", "another_code", "some_number", "specifier") + do update set + "parentspecifier" = EXCLUDED."parentspecifier" + returning "code", "another_code", "some_number", "specifier", "parentspecifier"""" + )(using FlaffRow.write) + .updateManyWithGeneratedKeys[FlaffRow]("code", "another_code", "some_number", "specifier", "parentspecifier")(unsaved)(using catsStdInstancesForList, FlaffRow.read) + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, FlaffRow], batchSize: Int = 10000): ConnectionIO[Int] = { + for { + _ <- sql"create temporary table flaff_TEMP (like public.flaff) on commit drop".update.run + _ <- new FragmentOps(sql"""copy flaff_TEMP("code", "another_code", "some_number", "specifier", "parentspecifier") from stdin""").copyIn(unsaved, batchSize)(using FlaffRow.text) + res <- sql"""insert into public.flaff("code", "another_code", "some_number", "specifier", "parentspecifier") + select * from flaff_TEMP + on conflict ("code", "another_code", "some_number", "specifier") + do update set + "parentspecifier" = EXCLUDED."parentspecifier" + ; + drop table flaff_TEMP;""".update.run + } yield res + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/public/flaff/FlaffRepoMock.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/public/flaff/FlaffRepoMock.scala index 3ecac6119f..1694635db9 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/public/flaff/FlaffRepoMock.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/public/flaff/FlaffRepoMock.scala @@ -89,4 +89,23 @@ class FlaffRepoMock(map: scala.collection.mutable.Map[FlaffId, FlaffRow] = scala unsaved } } + override def upsertBatch(unsaved: List[FlaffRow]): Stream[ConnectionIO, FlaffRow] = { + Stream.emits { + unsaved.map { row => + map += (row.compositeId -> row) + row + } + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, FlaffRow], batchSize: Int = 10000): ConnectionIO[Int] = { + unsaved.compile.toList.map { rows => + var num = 0 + rows.foreach { row => + map += (row.compositeId -> row) + num += 1 + } + num + } + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/public/flaff/FlaffRow.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/public/flaff/FlaffRow.scala index 21172f45e1..dd47320914 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/public/flaff/FlaffRow.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/public/flaff/FlaffRow.scala @@ -10,6 +10,7 @@ package flaff import doobie.enumerated.Nullability import doobie.postgres.Text import doobie.util.Read +import doobie.util.Write import doobie.util.meta.Meta import io.circe.Decoder import io.circe.Encoder @@ -64,4 +65,26 @@ object FlaffRow { sb.append(Text.DELIMETER) Text.option(ShortText.text).unsafeEncode(row.parentspecifier, sb) } + implicit lazy val write: Write[FlaffRow] = new Write[FlaffRow]( + puts = List((ShortText.put, Nullability.NoNulls), + (Meta.StringMeta.put, Nullability.NoNulls), + (Meta.IntMeta.put, Nullability.NoNulls), + (ShortText.put, Nullability.NoNulls), + (ShortText.put, Nullability.Nullable)), + toList = x => List(x.code, x.anotherCode, x.someNumber, x.specifier, x.parentspecifier), + unsafeSet = (rs, i, a) => { + ShortText.put.unsafeSetNonNullable(rs, i + 0, a.code) + Meta.StringMeta.put.unsafeSetNonNullable(rs, i + 1, a.anotherCode) + Meta.IntMeta.put.unsafeSetNonNullable(rs, i + 2, a.someNumber) + ShortText.put.unsafeSetNonNullable(rs, i + 3, a.specifier) + ShortText.put.unsafeSetNullable(rs, i + 4, a.parentspecifier) + }, + unsafeUpdate = (ps, i, a) => { + ShortText.put.unsafeUpdateNonNullable(ps, i + 0, a.code) + Meta.StringMeta.put.unsafeUpdateNonNullable(ps, i + 1, a.anotherCode) + Meta.IntMeta.put.unsafeUpdateNonNullable(ps, i + 2, a.someNumber) + ShortText.put.unsafeUpdateNonNullable(ps, i + 3, a.specifier) + ShortText.put.unsafeUpdateNullable(ps, i + 4, a.parentspecifier) + } + ) } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/public/identity_test/IdentityTestRepo.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/public/identity_test/IdentityTestRepo.scala index 126c7aaa51..4b07896fb4 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/public/identity_test/IdentityTestRepo.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/public/identity_test/IdentityTestRepo.scala @@ -30,4 +30,7 @@ trait IdentityTestRepo { def update: UpdateBuilder[IdentityTestFields, IdentityTestRow] def update(row: IdentityTestRow): ConnectionIO[Boolean] def upsert(unsaved: IdentityTestRow): ConnectionIO[IdentityTestRow] + def upsertBatch(unsaved: List[IdentityTestRow]): Stream[ConnectionIO, IdentityTestRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Stream[ConnectionIO, IdentityTestRow], batchSize: Int = 10000): ConnectionIO[Int] } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/public/identity_test/IdentityTestRepoImpl.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/public/identity_test/IdentityTestRepoImpl.scala index c54ae45a2c..7b60b6694f 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/public/identity_test/IdentityTestRepoImpl.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/public/identity_test/IdentityTestRepoImpl.scala @@ -8,6 +8,7 @@ package public package identity_test import adventureworks.customtypes.Defaulted +import cats.instances.list.catsStdInstancesForList import doobie.free.connection.ConnectionIO import doobie.postgres.syntax.FragmentOps import doobie.syntax.SqlInterpolator.SingleFragment.fromWrite @@ -15,6 +16,7 @@ import doobie.syntax.string.toSqlInterpolator import doobie.util.Write import doobie.util.fragment.Fragment import doobie.util.meta.Meta +import doobie.util.update.Update import fs2.Stream import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder @@ -112,4 +114,31 @@ class IdentityTestRepoImpl extends IdentityTestRepo { returning "always_generated", "default_generated", "name" """.query(using IdentityTestRow.read).unique } + override def upsertBatch(unsaved: List[IdentityTestRow]): Stream[ConnectionIO, IdentityTestRow] = { + Update[IdentityTestRow]( + s"""insert into public.identity-test("always_generated", "default_generated", "name") + values (?::int4,?::int4,?) + on conflict ("name") + do update set + "always_generated" = EXCLUDED."always_generated", + "default_generated" = EXCLUDED."default_generated" + returning "always_generated", "default_generated", "name"""" + )(using IdentityTestRow.write) + .updateManyWithGeneratedKeys[IdentityTestRow]("always_generated", "default_generated", "name")(unsaved)(using catsStdInstancesForList, IdentityTestRow.read) + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, IdentityTestRow], batchSize: Int = 10000): ConnectionIO[Int] = { + for { + _ <- sql"create temporary table identity-test_TEMP (like public.identity-test) on commit drop".update.run + _ <- new FragmentOps(sql"""copy identity-test_TEMP("always_generated", "default_generated", "name") from stdin""").copyIn(unsaved, batchSize)(using IdentityTestRow.text) + res <- sql"""insert into public.identity-test("always_generated", "default_generated", "name") + select * from identity-test_TEMP + on conflict ("name") + do update set + "always_generated" = EXCLUDED."always_generated", + "default_generated" = EXCLUDED."default_generated" + ; + drop table identity-test_TEMP;""".update.run + } yield res + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/public/identity_test/IdentityTestRepoMock.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/public/identity_test/IdentityTestRepoMock.scala index 3635a9fc61..ece36dd427 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/public/identity_test/IdentityTestRepoMock.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/public/identity_test/IdentityTestRepoMock.scala @@ -105,4 +105,23 @@ class IdentityTestRepoMock(toRow: Function1[IdentityTestRowUnsaved, IdentityTest unsaved } } + override def upsertBatch(unsaved: List[IdentityTestRow]): Stream[ConnectionIO, IdentityTestRow] = { + Stream.emits { + unsaved.map { row => + map += (row.name -> row) + row + } + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, IdentityTestRow], batchSize: Int = 10000): ConnectionIO[Int] = { + unsaved.compile.toList.map { rows => + var num = 0 + rows.foreach { row => + map += (row.name -> row) + num += 1 + } + num + } + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/public/identity_test/IdentityTestRow.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/public/identity_test/IdentityTestRow.scala index f3b51cef77..1837195e58 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/public/identity_test/IdentityTestRow.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/public/identity_test/IdentityTestRow.scala @@ -11,6 +11,7 @@ import adventureworks.customtypes.Defaulted import doobie.enumerated.Nullability import doobie.postgres.Text import doobie.util.Read +import doobie.util.Write import doobie.util.meta.Meta import io.circe.Decoder import io.circe.Encoder @@ -52,4 +53,20 @@ object IdentityTestRow { sb.append(Text.DELIMETER) IdentityTestId.text.unsafeEncode(row.name, sb) } + implicit lazy val write: Write[IdentityTestRow] = new Write[IdentityTestRow]( + puts = List((Meta.IntMeta.put, Nullability.NoNulls), + (Meta.IntMeta.put, Nullability.NoNulls), + (IdentityTestId.put, Nullability.NoNulls)), + toList = x => List(x.alwaysGenerated, x.defaultGenerated, x.name), + unsafeSet = (rs, i, a) => { + Meta.IntMeta.put.unsafeSetNonNullable(rs, i + 0, a.alwaysGenerated) + Meta.IntMeta.put.unsafeSetNonNullable(rs, i + 1, a.defaultGenerated) + IdentityTestId.put.unsafeSetNonNullable(rs, i + 2, a.name) + }, + unsafeUpdate = (ps, i, a) => { + Meta.IntMeta.put.unsafeUpdateNonNullable(ps, i + 0, a.alwaysGenerated) + Meta.IntMeta.put.unsafeUpdateNonNullable(ps, i + 1, a.defaultGenerated) + IdentityTestId.put.unsafeUpdateNonNullable(ps, i + 2, a.name) + } + ) } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/public/pgtest/PgtestRow.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/public/pgtest/PgtestRow.scala index 27c3e255d5..1a2382de9b 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/public/pgtest/PgtestRow.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/public/pgtest/PgtestRow.scala @@ -34,6 +34,7 @@ import adventureworks.customtypes.TypoXml import doobie.enumerated.Nullability import doobie.postgres.Text import doobie.util.Read +import doobie.util.Write import doobie.util.meta.Meta import io.circe.Decoder import io.circe.DecodingFailure @@ -559,4 +560,221 @@ object PgtestRow { sb.append(Text.DELIMETER) Text.iterableInstance[Array, TypoXml](TypoXml.text, implicitly).unsafeEncode(row.xmles, sb) } + implicit lazy val write: Write[PgtestRow] = new Write[PgtestRow]( + puts = List((Meta.BooleanMeta.put, Nullability.NoNulls), + (TypoBox.put, Nullability.NoNulls), + (Meta.StringMeta.put, Nullability.NoNulls), + (TypoBytea.put, Nullability.NoNulls), + (Meta.StringMeta.put, Nullability.NoNulls), + (TypoCircle.put, Nullability.NoNulls), + (TypoLocalDate.put, Nullability.NoNulls), + (Meta.FloatMeta.put, Nullability.NoNulls), + (Meta.DoubleMeta.put, Nullability.NoNulls), + (TypoHStore.put, Nullability.NoNulls), + (TypoInet.put, Nullability.NoNulls), + (TypoShort.put, Nullability.NoNulls), + (TypoInt2Vector.put, Nullability.NoNulls), + (Meta.IntMeta.put, Nullability.NoNulls), + (Meta.LongMeta.put, Nullability.NoNulls), + (TypoInterval.put, Nullability.NoNulls), + (TypoJson.put, Nullability.NoNulls), + (TypoJsonb.put, Nullability.NoNulls), + (TypoLine.put, Nullability.NoNulls), + (TypoLineSegment.put, Nullability.NoNulls), + (TypoMoney.put, Nullability.NoNulls), + (Mydomain.put, Nullability.NoNulls), + (Myenum.put, Nullability.NoNulls), + (Meta.StringMeta.put, Nullability.NoNulls), + (Meta.ScalaBigDecimalMeta.put, Nullability.NoNulls), + (TypoPath.put, Nullability.NoNulls), + (TypoPoint.put, Nullability.NoNulls), + (TypoPolygon.put, Nullability.NoNulls), + (Meta.StringMeta.put, Nullability.NoNulls), + (TypoLocalTime.put, Nullability.NoNulls), + (TypoLocalDateTime.put, Nullability.NoNulls), + (TypoInstant.put, Nullability.NoNulls), + (TypoOffsetTime.put, Nullability.NoNulls), + (TypoUUID.put, Nullability.NoNulls), + (Meta.StringMeta.put, Nullability.NoNulls), + (TypoVector.put, Nullability.NoNulls), + (TypoXml.put, Nullability.NoNulls), + (TypoBox.arrayPut, Nullability.NoNulls), + (adventureworks.StringArrayMeta.put, Nullability.NoNulls), + (adventureworks.StringArrayMeta.put, Nullability.NoNulls), + (TypoCircle.arrayPut, Nullability.NoNulls), + (TypoLocalDate.arrayPut, Nullability.NoNulls), + (adventureworks.FloatArrayMeta.put, Nullability.NoNulls), + (adventureworks.DoubleArrayMeta.put, Nullability.NoNulls), + (TypoInet.arrayPut, Nullability.NoNulls), + (TypoShort.arrayPut, Nullability.NoNulls), + (TypoInt2Vector.arrayPut, Nullability.NoNulls), + (adventureworks.IntegerArrayMeta.put, Nullability.NoNulls), + (adventureworks.LongArrayMeta.put, Nullability.NoNulls), + (TypoInterval.arrayPut, Nullability.NoNulls), + (TypoJson.arrayPut, Nullability.NoNulls), + (TypoJsonb.arrayPut, Nullability.NoNulls), + (TypoLine.arrayPut, Nullability.NoNulls), + (TypoLineSegment.arrayPut, Nullability.NoNulls), + (TypoMoney.arrayPut, Nullability.NoNulls), + (Mydomain.arrayPut, Nullability.NoNulls), + (Myenum.arrayPut, Nullability.NoNulls), + (adventureworks.StringArrayMeta.put, Nullability.NoNulls), + (adventureworks.BigDecimalMeta.put, Nullability.NoNulls), + (TypoPath.arrayPut, Nullability.NoNulls), + (TypoPoint.arrayPut, Nullability.NoNulls), + (TypoPolygon.arrayPut, Nullability.NoNulls), + (adventureworks.StringArrayMeta.put, Nullability.NoNulls), + (TypoLocalTime.arrayPut, Nullability.NoNulls), + (TypoLocalDateTime.arrayPut, Nullability.NoNulls), + (TypoInstant.arrayPut, Nullability.NoNulls), + (TypoOffsetTime.arrayPut, Nullability.NoNulls), + (TypoUUID.arrayPut, Nullability.NoNulls), + (adventureworks.StringArrayMeta.put, Nullability.NoNulls), + (TypoXml.arrayPut, Nullability.NoNulls)), + toList = x => List(x.bool, x.box, x.bpchar, x.bytea, x.char, x.circle, x.date, x.float4, x.float8, x.hstore, x.inet, x.int2, x.int2vector, x.int4, x.int8, x.interval, x.json, x.jsonb, x.line, x.lseg, x.money, x.mydomain, x.myenum, x.name, x.numeric, x.path, x.point, x.polygon, x.text, x.time, x.timestamp, x.timestampz, x.timez, x.uuid, x.varchar, x.vector, x.xml, x.boxes, x.bpchares, x.chares, x.circlees, x.datees, x.float4es, x.float8es, x.inetes, x.int2es, x.int2vectores, x.int4es, x.int8es, x.intervales, x.jsones, x.jsonbes, x.linees, x.lseges, x.moneyes, x.mydomaines, x.myenumes, x.namees, x.numerices, x.pathes, x.pointes, x.polygones, x.textes, x.timees, x.timestampes, x.timestampzes, x.timezes, x.uuides, x.varchares, x.xmles), + unsafeSet = (rs, i, a) => { + Meta.BooleanMeta.put.unsafeSetNonNullable(rs, i + 0, a.bool) + TypoBox.put.unsafeSetNonNullable(rs, i + 1, a.box) + Meta.StringMeta.put.unsafeSetNonNullable(rs, i + 2, a.bpchar) + TypoBytea.put.unsafeSetNonNullable(rs, i + 3, a.bytea) + Meta.StringMeta.put.unsafeSetNonNullable(rs, i + 4, a.char) + TypoCircle.put.unsafeSetNonNullable(rs, i + 5, a.circle) + TypoLocalDate.put.unsafeSetNonNullable(rs, i + 6, a.date) + Meta.FloatMeta.put.unsafeSetNonNullable(rs, i + 7, a.float4) + Meta.DoubleMeta.put.unsafeSetNonNullable(rs, i + 8, a.float8) + TypoHStore.put.unsafeSetNonNullable(rs, i + 9, a.hstore) + TypoInet.put.unsafeSetNonNullable(rs, i + 10, a.inet) + TypoShort.put.unsafeSetNonNullable(rs, i + 11, a.int2) + TypoInt2Vector.put.unsafeSetNonNullable(rs, i + 12, a.int2vector) + Meta.IntMeta.put.unsafeSetNonNullable(rs, i + 13, a.int4) + Meta.LongMeta.put.unsafeSetNonNullable(rs, i + 14, a.int8) + TypoInterval.put.unsafeSetNonNullable(rs, i + 15, a.interval) + TypoJson.put.unsafeSetNonNullable(rs, i + 16, a.json) + TypoJsonb.put.unsafeSetNonNullable(rs, i + 17, a.jsonb) + TypoLine.put.unsafeSetNonNullable(rs, i + 18, a.line) + TypoLineSegment.put.unsafeSetNonNullable(rs, i + 19, a.lseg) + TypoMoney.put.unsafeSetNonNullable(rs, i + 20, a.money) + Mydomain.put.unsafeSetNonNullable(rs, i + 21, a.mydomain) + Myenum.put.unsafeSetNonNullable(rs, i + 22, a.myenum) + Meta.StringMeta.put.unsafeSetNonNullable(rs, i + 23, a.name) + Meta.ScalaBigDecimalMeta.put.unsafeSetNonNullable(rs, i + 24, a.numeric) + TypoPath.put.unsafeSetNonNullable(rs, i + 25, a.path) + TypoPoint.put.unsafeSetNonNullable(rs, i + 26, a.point) + TypoPolygon.put.unsafeSetNonNullable(rs, i + 27, a.polygon) + Meta.StringMeta.put.unsafeSetNonNullable(rs, i + 28, a.text) + TypoLocalTime.put.unsafeSetNonNullable(rs, i + 29, a.time) + TypoLocalDateTime.put.unsafeSetNonNullable(rs, i + 30, a.timestamp) + TypoInstant.put.unsafeSetNonNullable(rs, i + 31, a.timestampz) + TypoOffsetTime.put.unsafeSetNonNullable(rs, i + 32, a.timez) + TypoUUID.put.unsafeSetNonNullable(rs, i + 33, a.uuid) + Meta.StringMeta.put.unsafeSetNonNullable(rs, i + 34, a.varchar) + TypoVector.put.unsafeSetNonNullable(rs, i + 35, a.vector) + TypoXml.put.unsafeSetNonNullable(rs, i + 36, a.xml) + TypoBox.arrayPut.unsafeSetNonNullable(rs, i + 37, a.boxes) + adventureworks.StringArrayMeta.put.unsafeSetNonNullable(rs, i + 38, a.bpchares) + adventureworks.StringArrayMeta.put.unsafeSetNonNullable(rs, i + 39, a.chares) + TypoCircle.arrayPut.unsafeSetNonNullable(rs, i + 40, a.circlees) + TypoLocalDate.arrayPut.unsafeSetNonNullable(rs, i + 41, a.datees) + adventureworks.FloatArrayMeta.put.unsafeSetNonNullable(rs, i + 42, a.float4es) + adventureworks.DoubleArrayMeta.put.unsafeSetNonNullable(rs, i + 43, a.float8es) + TypoInet.arrayPut.unsafeSetNonNullable(rs, i + 44, a.inetes) + TypoShort.arrayPut.unsafeSetNonNullable(rs, i + 45, a.int2es) + TypoInt2Vector.arrayPut.unsafeSetNonNullable(rs, i + 46, a.int2vectores) + adventureworks.IntegerArrayMeta.put.unsafeSetNonNullable(rs, i + 47, a.int4es) + adventureworks.LongArrayMeta.put.unsafeSetNonNullable(rs, i + 48, a.int8es) + TypoInterval.arrayPut.unsafeSetNonNullable(rs, i + 49, a.intervales) + TypoJson.arrayPut.unsafeSetNonNullable(rs, i + 50, a.jsones) + TypoJsonb.arrayPut.unsafeSetNonNullable(rs, i + 51, a.jsonbes) + TypoLine.arrayPut.unsafeSetNonNullable(rs, i + 52, a.linees) + TypoLineSegment.arrayPut.unsafeSetNonNullable(rs, i + 53, a.lseges) + TypoMoney.arrayPut.unsafeSetNonNullable(rs, i + 54, a.moneyes) + Mydomain.arrayPut.unsafeSetNonNullable(rs, i + 55, a.mydomaines) + Myenum.arrayPut.unsafeSetNonNullable(rs, i + 56, a.myenumes) + adventureworks.StringArrayMeta.put.unsafeSetNonNullable(rs, i + 57, a.namees) + adventureworks.BigDecimalMeta.put.unsafeSetNonNullable(rs, i + 58, a.numerices) + TypoPath.arrayPut.unsafeSetNonNullable(rs, i + 59, a.pathes) + TypoPoint.arrayPut.unsafeSetNonNullable(rs, i + 60, a.pointes) + TypoPolygon.arrayPut.unsafeSetNonNullable(rs, i + 61, a.polygones) + adventureworks.StringArrayMeta.put.unsafeSetNonNullable(rs, i + 62, a.textes) + TypoLocalTime.arrayPut.unsafeSetNonNullable(rs, i + 63, a.timees) + TypoLocalDateTime.arrayPut.unsafeSetNonNullable(rs, i + 64, a.timestampes) + TypoInstant.arrayPut.unsafeSetNonNullable(rs, i + 65, a.timestampzes) + TypoOffsetTime.arrayPut.unsafeSetNonNullable(rs, i + 66, a.timezes) + TypoUUID.arrayPut.unsafeSetNonNullable(rs, i + 67, a.uuides) + adventureworks.StringArrayMeta.put.unsafeSetNonNullable(rs, i + 68, a.varchares) + TypoXml.arrayPut.unsafeSetNonNullable(rs, i + 69, a.xmles) + }, + unsafeUpdate = (ps, i, a) => { + Meta.BooleanMeta.put.unsafeUpdateNonNullable(ps, i + 0, a.bool) + TypoBox.put.unsafeUpdateNonNullable(ps, i + 1, a.box) + Meta.StringMeta.put.unsafeUpdateNonNullable(ps, i + 2, a.bpchar) + TypoBytea.put.unsafeUpdateNonNullable(ps, i + 3, a.bytea) + Meta.StringMeta.put.unsafeUpdateNonNullable(ps, i + 4, a.char) + TypoCircle.put.unsafeUpdateNonNullable(ps, i + 5, a.circle) + TypoLocalDate.put.unsafeUpdateNonNullable(ps, i + 6, a.date) + Meta.FloatMeta.put.unsafeUpdateNonNullable(ps, i + 7, a.float4) + Meta.DoubleMeta.put.unsafeUpdateNonNullable(ps, i + 8, a.float8) + TypoHStore.put.unsafeUpdateNonNullable(ps, i + 9, a.hstore) + TypoInet.put.unsafeUpdateNonNullable(ps, i + 10, a.inet) + TypoShort.put.unsafeUpdateNonNullable(ps, i + 11, a.int2) + TypoInt2Vector.put.unsafeUpdateNonNullable(ps, i + 12, a.int2vector) + Meta.IntMeta.put.unsafeUpdateNonNullable(ps, i + 13, a.int4) + Meta.LongMeta.put.unsafeUpdateNonNullable(ps, i + 14, a.int8) + TypoInterval.put.unsafeUpdateNonNullable(ps, i + 15, a.interval) + TypoJson.put.unsafeUpdateNonNullable(ps, i + 16, a.json) + TypoJsonb.put.unsafeUpdateNonNullable(ps, i + 17, a.jsonb) + TypoLine.put.unsafeUpdateNonNullable(ps, i + 18, a.line) + TypoLineSegment.put.unsafeUpdateNonNullable(ps, i + 19, a.lseg) + TypoMoney.put.unsafeUpdateNonNullable(ps, i + 20, a.money) + Mydomain.put.unsafeUpdateNonNullable(ps, i + 21, a.mydomain) + Myenum.put.unsafeUpdateNonNullable(ps, i + 22, a.myenum) + Meta.StringMeta.put.unsafeUpdateNonNullable(ps, i + 23, a.name) + Meta.ScalaBigDecimalMeta.put.unsafeUpdateNonNullable(ps, i + 24, a.numeric) + TypoPath.put.unsafeUpdateNonNullable(ps, i + 25, a.path) + TypoPoint.put.unsafeUpdateNonNullable(ps, i + 26, a.point) + TypoPolygon.put.unsafeUpdateNonNullable(ps, i + 27, a.polygon) + Meta.StringMeta.put.unsafeUpdateNonNullable(ps, i + 28, a.text) + TypoLocalTime.put.unsafeUpdateNonNullable(ps, i + 29, a.time) + TypoLocalDateTime.put.unsafeUpdateNonNullable(ps, i + 30, a.timestamp) + TypoInstant.put.unsafeUpdateNonNullable(ps, i + 31, a.timestampz) + TypoOffsetTime.put.unsafeUpdateNonNullable(ps, i + 32, a.timez) + TypoUUID.put.unsafeUpdateNonNullable(ps, i + 33, a.uuid) + Meta.StringMeta.put.unsafeUpdateNonNullable(ps, i + 34, a.varchar) + TypoVector.put.unsafeUpdateNonNullable(ps, i + 35, a.vector) + TypoXml.put.unsafeUpdateNonNullable(ps, i + 36, a.xml) + TypoBox.arrayPut.unsafeUpdateNonNullable(ps, i + 37, a.boxes) + adventureworks.StringArrayMeta.put.unsafeUpdateNonNullable(ps, i + 38, a.bpchares) + adventureworks.StringArrayMeta.put.unsafeUpdateNonNullable(ps, i + 39, a.chares) + TypoCircle.arrayPut.unsafeUpdateNonNullable(ps, i + 40, a.circlees) + TypoLocalDate.arrayPut.unsafeUpdateNonNullable(ps, i + 41, a.datees) + adventureworks.FloatArrayMeta.put.unsafeUpdateNonNullable(ps, i + 42, a.float4es) + adventureworks.DoubleArrayMeta.put.unsafeUpdateNonNullable(ps, i + 43, a.float8es) + TypoInet.arrayPut.unsafeUpdateNonNullable(ps, i + 44, a.inetes) + TypoShort.arrayPut.unsafeUpdateNonNullable(ps, i + 45, a.int2es) + TypoInt2Vector.arrayPut.unsafeUpdateNonNullable(ps, i + 46, a.int2vectores) + adventureworks.IntegerArrayMeta.put.unsafeUpdateNonNullable(ps, i + 47, a.int4es) + adventureworks.LongArrayMeta.put.unsafeUpdateNonNullable(ps, i + 48, a.int8es) + TypoInterval.arrayPut.unsafeUpdateNonNullable(ps, i + 49, a.intervales) + TypoJson.arrayPut.unsafeUpdateNonNullable(ps, i + 50, a.jsones) + TypoJsonb.arrayPut.unsafeUpdateNonNullable(ps, i + 51, a.jsonbes) + TypoLine.arrayPut.unsafeUpdateNonNullable(ps, i + 52, a.linees) + TypoLineSegment.arrayPut.unsafeUpdateNonNullable(ps, i + 53, a.lseges) + TypoMoney.arrayPut.unsafeUpdateNonNullable(ps, i + 54, a.moneyes) + Mydomain.arrayPut.unsafeUpdateNonNullable(ps, i + 55, a.mydomaines) + Myenum.arrayPut.unsafeUpdateNonNullable(ps, i + 56, a.myenumes) + adventureworks.StringArrayMeta.put.unsafeUpdateNonNullable(ps, i + 57, a.namees) + adventureworks.BigDecimalMeta.put.unsafeUpdateNonNullable(ps, i + 58, a.numerices) + TypoPath.arrayPut.unsafeUpdateNonNullable(ps, i + 59, a.pathes) + TypoPoint.arrayPut.unsafeUpdateNonNullable(ps, i + 60, a.pointes) + TypoPolygon.arrayPut.unsafeUpdateNonNullable(ps, i + 61, a.polygones) + adventureworks.StringArrayMeta.put.unsafeUpdateNonNullable(ps, i + 62, a.textes) + TypoLocalTime.arrayPut.unsafeUpdateNonNullable(ps, i + 63, a.timees) + TypoLocalDateTime.arrayPut.unsafeUpdateNonNullable(ps, i + 64, a.timestampes) + TypoInstant.arrayPut.unsafeUpdateNonNullable(ps, i + 65, a.timestampzes) + TypoOffsetTime.arrayPut.unsafeUpdateNonNullable(ps, i + 66, a.timezes) + TypoUUID.arrayPut.unsafeUpdateNonNullable(ps, i + 67, a.uuides) + adventureworks.StringArrayMeta.put.unsafeUpdateNonNullable(ps, i + 68, a.varchares) + TypoXml.arrayPut.unsafeUpdateNonNullable(ps, i + 69, a.xmles) + } + ) } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/public/pgtestnull/PgtestnullRow.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/public/pgtestnull/PgtestnullRow.scala index 890d126b30..0ca5e5d0a9 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/public/pgtestnull/PgtestnullRow.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/public/pgtestnull/PgtestnullRow.scala @@ -34,6 +34,7 @@ import adventureworks.customtypes.TypoXml import doobie.enumerated.Nullability import doobie.postgres.Text import doobie.util.Read +import doobie.util.Write import doobie.util.meta.Meta import io.circe.Decoder import io.circe.DecodingFailure @@ -559,4 +560,221 @@ object PgtestnullRow { sb.append(Text.DELIMETER) Text.option(Text.iterableInstance[Array, TypoXml](TypoXml.text, implicitly)).unsafeEncode(row.xmles, sb) } + implicit lazy val write: Write[PgtestnullRow] = new Write[PgtestnullRow]( + puts = List((Meta.BooleanMeta.put, Nullability.Nullable), + (TypoBox.put, Nullability.Nullable), + (Meta.StringMeta.put, Nullability.Nullable), + (TypoBytea.put, Nullability.Nullable), + (Meta.StringMeta.put, Nullability.Nullable), + (TypoCircle.put, Nullability.Nullable), + (TypoLocalDate.put, Nullability.Nullable), + (Meta.FloatMeta.put, Nullability.Nullable), + (Meta.DoubleMeta.put, Nullability.Nullable), + (TypoHStore.put, Nullability.Nullable), + (TypoInet.put, Nullability.Nullable), + (TypoShort.put, Nullability.Nullable), + (TypoInt2Vector.put, Nullability.Nullable), + (Meta.IntMeta.put, Nullability.Nullable), + (Meta.LongMeta.put, Nullability.Nullable), + (TypoInterval.put, Nullability.Nullable), + (TypoJson.put, Nullability.Nullable), + (TypoJsonb.put, Nullability.Nullable), + (TypoLine.put, Nullability.Nullable), + (TypoLineSegment.put, Nullability.Nullable), + (TypoMoney.put, Nullability.Nullable), + (Mydomain.put, Nullability.Nullable), + (Myenum.put, Nullability.Nullable), + (Meta.StringMeta.put, Nullability.Nullable), + (Meta.ScalaBigDecimalMeta.put, Nullability.Nullable), + (TypoPath.put, Nullability.Nullable), + (TypoPoint.put, Nullability.Nullable), + (TypoPolygon.put, Nullability.Nullable), + (Meta.StringMeta.put, Nullability.Nullable), + (TypoLocalTime.put, Nullability.Nullable), + (TypoLocalDateTime.put, Nullability.Nullable), + (TypoInstant.put, Nullability.Nullable), + (TypoOffsetTime.put, Nullability.Nullable), + (TypoUUID.put, Nullability.Nullable), + (Meta.StringMeta.put, Nullability.Nullable), + (TypoVector.put, Nullability.Nullable), + (TypoXml.put, Nullability.Nullable), + (TypoBox.arrayPut, Nullability.Nullable), + (adventureworks.StringArrayMeta.put, Nullability.Nullable), + (adventureworks.StringArrayMeta.put, Nullability.Nullable), + (TypoCircle.arrayPut, Nullability.Nullable), + (TypoLocalDate.arrayPut, Nullability.Nullable), + (adventureworks.FloatArrayMeta.put, Nullability.Nullable), + (adventureworks.DoubleArrayMeta.put, Nullability.Nullable), + (TypoInet.arrayPut, Nullability.Nullable), + (TypoShort.arrayPut, Nullability.Nullable), + (TypoInt2Vector.arrayPut, Nullability.Nullable), + (adventureworks.IntegerArrayMeta.put, Nullability.Nullable), + (adventureworks.LongArrayMeta.put, Nullability.Nullable), + (TypoInterval.arrayPut, Nullability.Nullable), + (TypoJson.arrayPut, Nullability.Nullable), + (TypoJsonb.arrayPut, Nullability.Nullable), + (TypoLine.arrayPut, Nullability.Nullable), + (TypoLineSegment.arrayPut, Nullability.Nullable), + (TypoMoney.arrayPut, Nullability.Nullable), + (Mydomain.arrayPut, Nullability.Nullable), + (Myenum.arrayPut, Nullability.Nullable), + (adventureworks.StringArrayMeta.put, Nullability.Nullable), + (adventureworks.BigDecimalMeta.put, Nullability.Nullable), + (TypoPath.arrayPut, Nullability.Nullable), + (TypoPoint.arrayPut, Nullability.Nullable), + (TypoPolygon.arrayPut, Nullability.Nullable), + (adventureworks.StringArrayMeta.put, Nullability.Nullable), + (TypoLocalTime.arrayPut, Nullability.Nullable), + (TypoLocalDateTime.arrayPut, Nullability.Nullable), + (TypoInstant.arrayPut, Nullability.Nullable), + (TypoOffsetTime.arrayPut, Nullability.Nullable), + (TypoUUID.arrayPut, Nullability.Nullable), + (adventureworks.StringArrayMeta.put, Nullability.Nullable), + (TypoXml.arrayPut, Nullability.Nullable)), + toList = x => List(x.bool, x.box, x.bpchar, x.bytea, x.char, x.circle, x.date, x.float4, x.float8, x.hstore, x.inet, x.int2, x.int2vector, x.int4, x.int8, x.interval, x.json, x.jsonb, x.line, x.lseg, x.money, x.mydomain, x.myenum, x.name, x.numeric, x.path, x.point, x.polygon, x.text, x.time, x.timestamp, x.timestampz, x.timez, x.uuid, x.varchar, x.vector, x.xml, x.boxes, x.bpchares, x.chares, x.circlees, x.datees, x.float4es, x.float8es, x.inetes, x.int2es, x.int2vectores, x.int4es, x.int8es, x.intervales, x.jsones, x.jsonbes, x.linees, x.lseges, x.moneyes, x.mydomaines, x.myenumes, x.namees, x.numerices, x.pathes, x.pointes, x.polygones, x.textes, x.timees, x.timestampes, x.timestampzes, x.timezes, x.uuides, x.varchares, x.xmles), + unsafeSet = (rs, i, a) => { + Meta.BooleanMeta.put.unsafeSetNullable(rs, i + 0, a.bool) + TypoBox.put.unsafeSetNullable(rs, i + 1, a.box) + Meta.StringMeta.put.unsafeSetNullable(rs, i + 2, a.bpchar) + TypoBytea.put.unsafeSetNullable(rs, i + 3, a.bytea) + Meta.StringMeta.put.unsafeSetNullable(rs, i + 4, a.char) + TypoCircle.put.unsafeSetNullable(rs, i + 5, a.circle) + TypoLocalDate.put.unsafeSetNullable(rs, i + 6, a.date) + Meta.FloatMeta.put.unsafeSetNullable(rs, i + 7, a.float4) + Meta.DoubleMeta.put.unsafeSetNullable(rs, i + 8, a.float8) + TypoHStore.put.unsafeSetNullable(rs, i + 9, a.hstore) + TypoInet.put.unsafeSetNullable(rs, i + 10, a.inet) + TypoShort.put.unsafeSetNullable(rs, i + 11, a.int2) + TypoInt2Vector.put.unsafeSetNullable(rs, i + 12, a.int2vector) + Meta.IntMeta.put.unsafeSetNullable(rs, i + 13, a.int4) + Meta.LongMeta.put.unsafeSetNullable(rs, i + 14, a.int8) + TypoInterval.put.unsafeSetNullable(rs, i + 15, a.interval) + TypoJson.put.unsafeSetNullable(rs, i + 16, a.json) + TypoJsonb.put.unsafeSetNullable(rs, i + 17, a.jsonb) + TypoLine.put.unsafeSetNullable(rs, i + 18, a.line) + TypoLineSegment.put.unsafeSetNullable(rs, i + 19, a.lseg) + TypoMoney.put.unsafeSetNullable(rs, i + 20, a.money) + Mydomain.put.unsafeSetNullable(rs, i + 21, a.mydomain) + Myenum.put.unsafeSetNullable(rs, i + 22, a.myenum) + Meta.StringMeta.put.unsafeSetNullable(rs, i + 23, a.name) + Meta.ScalaBigDecimalMeta.put.unsafeSetNullable(rs, i + 24, a.numeric) + TypoPath.put.unsafeSetNullable(rs, i + 25, a.path) + TypoPoint.put.unsafeSetNullable(rs, i + 26, a.point) + TypoPolygon.put.unsafeSetNullable(rs, i + 27, a.polygon) + Meta.StringMeta.put.unsafeSetNullable(rs, i + 28, a.text) + TypoLocalTime.put.unsafeSetNullable(rs, i + 29, a.time) + TypoLocalDateTime.put.unsafeSetNullable(rs, i + 30, a.timestamp) + TypoInstant.put.unsafeSetNullable(rs, i + 31, a.timestampz) + TypoOffsetTime.put.unsafeSetNullable(rs, i + 32, a.timez) + TypoUUID.put.unsafeSetNullable(rs, i + 33, a.uuid) + Meta.StringMeta.put.unsafeSetNullable(rs, i + 34, a.varchar) + TypoVector.put.unsafeSetNullable(rs, i + 35, a.vector) + TypoXml.put.unsafeSetNullable(rs, i + 36, a.xml) + TypoBox.arrayPut.unsafeSetNullable(rs, i + 37, a.boxes) + adventureworks.StringArrayMeta.put.unsafeSetNullable(rs, i + 38, a.bpchares) + adventureworks.StringArrayMeta.put.unsafeSetNullable(rs, i + 39, a.chares) + TypoCircle.arrayPut.unsafeSetNullable(rs, i + 40, a.circlees) + TypoLocalDate.arrayPut.unsafeSetNullable(rs, i + 41, a.datees) + adventureworks.FloatArrayMeta.put.unsafeSetNullable(rs, i + 42, a.float4es) + adventureworks.DoubleArrayMeta.put.unsafeSetNullable(rs, i + 43, a.float8es) + TypoInet.arrayPut.unsafeSetNullable(rs, i + 44, a.inetes) + TypoShort.arrayPut.unsafeSetNullable(rs, i + 45, a.int2es) + TypoInt2Vector.arrayPut.unsafeSetNullable(rs, i + 46, a.int2vectores) + adventureworks.IntegerArrayMeta.put.unsafeSetNullable(rs, i + 47, a.int4es) + adventureworks.LongArrayMeta.put.unsafeSetNullable(rs, i + 48, a.int8es) + TypoInterval.arrayPut.unsafeSetNullable(rs, i + 49, a.intervales) + TypoJson.arrayPut.unsafeSetNullable(rs, i + 50, a.jsones) + TypoJsonb.arrayPut.unsafeSetNullable(rs, i + 51, a.jsonbes) + TypoLine.arrayPut.unsafeSetNullable(rs, i + 52, a.linees) + TypoLineSegment.arrayPut.unsafeSetNullable(rs, i + 53, a.lseges) + TypoMoney.arrayPut.unsafeSetNullable(rs, i + 54, a.moneyes) + Mydomain.arrayPut.unsafeSetNullable(rs, i + 55, a.mydomaines) + Myenum.arrayPut.unsafeSetNullable(rs, i + 56, a.myenumes) + adventureworks.StringArrayMeta.put.unsafeSetNullable(rs, i + 57, a.namees) + adventureworks.BigDecimalMeta.put.unsafeSetNullable(rs, i + 58, a.numerices) + TypoPath.arrayPut.unsafeSetNullable(rs, i + 59, a.pathes) + TypoPoint.arrayPut.unsafeSetNullable(rs, i + 60, a.pointes) + TypoPolygon.arrayPut.unsafeSetNullable(rs, i + 61, a.polygones) + adventureworks.StringArrayMeta.put.unsafeSetNullable(rs, i + 62, a.textes) + TypoLocalTime.arrayPut.unsafeSetNullable(rs, i + 63, a.timees) + TypoLocalDateTime.arrayPut.unsafeSetNullable(rs, i + 64, a.timestampes) + TypoInstant.arrayPut.unsafeSetNullable(rs, i + 65, a.timestampzes) + TypoOffsetTime.arrayPut.unsafeSetNullable(rs, i + 66, a.timezes) + TypoUUID.arrayPut.unsafeSetNullable(rs, i + 67, a.uuides) + adventureworks.StringArrayMeta.put.unsafeSetNullable(rs, i + 68, a.varchares) + TypoXml.arrayPut.unsafeSetNullable(rs, i + 69, a.xmles) + }, + unsafeUpdate = (ps, i, a) => { + Meta.BooleanMeta.put.unsafeUpdateNullable(ps, i + 0, a.bool) + TypoBox.put.unsafeUpdateNullable(ps, i + 1, a.box) + Meta.StringMeta.put.unsafeUpdateNullable(ps, i + 2, a.bpchar) + TypoBytea.put.unsafeUpdateNullable(ps, i + 3, a.bytea) + Meta.StringMeta.put.unsafeUpdateNullable(ps, i + 4, a.char) + TypoCircle.put.unsafeUpdateNullable(ps, i + 5, a.circle) + TypoLocalDate.put.unsafeUpdateNullable(ps, i + 6, a.date) + Meta.FloatMeta.put.unsafeUpdateNullable(ps, i + 7, a.float4) + Meta.DoubleMeta.put.unsafeUpdateNullable(ps, i + 8, a.float8) + TypoHStore.put.unsafeUpdateNullable(ps, i + 9, a.hstore) + TypoInet.put.unsafeUpdateNullable(ps, i + 10, a.inet) + TypoShort.put.unsafeUpdateNullable(ps, i + 11, a.int2) + TypoInt2Vector.put.unsafeUpdateNullable(ps, i + 12, a.int2vector) + Meta.IntMeta.put.unsafeUpdateNullable(ps, i + 13, a.int4) + Meta.LongMeta.put.unsafeUpdateNullable(ps, i + 14, a.int8) + TypoInterval.put.unsafeUpdateNullable(ps, i + 15, a.interval) + TypoJson.put.unsafeUpdateNullable(ps, i + 16, a.json) + TypoJsonb.put.unsafeUpdateNullable(ps, i + 17, a.jsonb) + TypoLine.put.unsafeUpdateNullable(ps, i + 18, a.line) + TypoLineSegment.put.unsafeUpdateNullable(ps, i + 19, a.lseg) + TypoMoney.put.unsafeUpdateNullable(ps, i + 20, a.money) + Mydomain.put.unsafeUpdateNullable(ps, i + 21, a.mydomain) + Myenum.put.unsafeUpdateNullable(ps, i + 22, a.myenum) + Meta.StringMeta.put.unsafeUpdateNullable(ps, i + 23, a.name) + Meta.ScalaBigDecimalMeta.put.unsafeUpdateNullable(ps, i + 24, a.numeric) + TypoPath.put.unsafeUpdateNullable(ps, i + 25, a.path) + TypoPoint.put.unsafeUpdateNullable(ps, i + 26, a.point) + TypoPolygon.put.unsafeUpdateNullable(ps, i + 27, a.polygon) + Meta.StringMeta.put.unsafeUpdateNullable(ps, i + 28, a.text) + TypoLocalTime.put.unsafeUpdateNullable(ps, i + 29, a.time) + TypoLocalDateTime.put.unsafeUpdateNullable(ps, i + 30, a.timestamp) + TypoInstant.put.unsafeUpdateNullable(ps, i + 31, a.timestampz) + TypoOffsetTime.put.unsafeUpdateNullable(ps, i + 32, a.timez) + TypoUUID.put.unsafeUpdateNullable(ps, i + 33, a.uuid) + Meta.StringMeta.put.unsafeUpdateNullable(ps, i + 34, a.varchar) + TypoVector.put.unsafeUpdateNullable(ps, i + 35, a.vector) + TypoXml.put.unsafeUpdateNullable(ps, i + 36, a.xml) + TypoBox.arrayPut.unsafeUpdateNullable(ps, i + 37, a.boxes) + adventureworks.StringArrayMeta.put.unsafeUpdateNullable(ps, i + 38, a.bpchares) + adventureworks.StringArrayMeta.put.unsafeUpdateNullable(ps, i + 39, a.chares) + TypoCircle.arrayPut.unsafeUpdateNullable(ps, i + 40, a.circlees) + TypoLocalDate.arrayPut.unsafeUpdateNullable(ps, i + 41, a.datees) + adventureworks.FloatArrayMeta.put.unsafeUpdateNullable(ps, i + 42, a.float4es) + adventureworks.DoubleArrayMeta.put.unsafeUpdateNullable(ps, i + 43, a.float8es) + TypoInet.arrayPut.unsafeUpdateNullable(ps, i + 44, a.inetes) + TypoShort.arrayPut.unsafeUpdateNullable(ps, i + 45, a.int2es) + TypoInt2Vector.arrayPut.unsafeUpdateNullable(ps, i + 46, a.int2vectores) + adventureworks.IntegerArrayMeta.put.unsafeUpdateNullable(ps, i + 47, a.int4es) + adventureworks.LongArrayMeta.put.unsafeUpdateNullable(ps, i + 48, a.int8es) + TypoInterval.arrayPut.unsafeUpdateNullable(ps, i + 49, a.intervales) + TypoJson.arrayPut.unsafeUpdateNullable(ps, i + 50, a.jsones) + TypoJsonb.arrayPut.unsafeUpdateNullable(ps, i + 51, a.jsonbes) + TypoLine.arrayPut.unsafeUpdateNullable(ps, i + 52, a.linees) + TypoLineSegment.arrayPut.unsafeUpdateNullable(ps, i + 53, a.lseges) + TypoMoney.arrayPut.unsafeUpdateNullable(ps, i + 54, a.moneyes) + Mydomain.arrayPut.unsafeUpdateNullable(ps, i + 55, a.mydomaines) + Myenum.arrayPut.unsafeUpdateNullable(ps, i + 56, a.myenumes) + adventureworks.StringArrayMeta.put.unsafeUpdateNullable(ps, i + 57, a.namees) + adventureworks.BigDecimalMeta.put.unsafeUpdateNullable(ps, i + 58, a.numerices) + TypoPath.arrayPut.unsafeUpdateNullable(ps, i + 59, a.pathes) + TypoPoint.arrayPut.unsafeUpdateNullable(ps, i + 60, a.pointes) + TypoPolygon.arrayPut.unsafeUpdateNullable(ps, i + 61, a.polygones) + adventureworks.StringArrayMeta.put.unsafeUpdateNullable(ps, i + 62, a.textes) + TypoLocalTime.arrayPut.unsafeUpdateNullable(ps, i + 63, a.timees) + TypoLocalDateTime.arrayPut.unsafeUpdateNullable(ps, i + 64, a.timestampes) + TypoInstant.arrayPut.unsafeUpdateNullable(ps, i + 65, a.timestampzes) + TypoOffsetTime.arrayPut.unsafeUpdateNullable(ps, i + 66, a.timezes) + TypoUUID.arrayPut.unsafeUpdateNullable(ps, i + 67, a.uuides) + adventureworks.StringArrayMeta.put.unsafeUpdateNullable(ps, i + 68, a.varchares) + TypoXml.arrayPut.unsafeUpdateNullable(ps, i + 69, a.xmles) + } + ) } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/public/users/UsersRepo.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/public/users/UsersRepo.scala index d7f095e13a..b0b13c26fa 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/public/users/UsersRepo.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/public/users/UsersRepo.scala @@ -32,4 +32,7 @@ trait UsersRepo { def update: UpdateBuilder[UsersFields, UsersRow] def update(row: UsersRow): ConnectionIO[Boolean] def upsert(unsaved: UsersRow): ConnectionIO[UsersRow] + def upsertBatch(unsaved: List[UsersRow]): Stream[ConnectionIO, UsersRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Stream[ConnectionIO, UsersRow], batchSize: Int = 10000): ConnectionIO[Int] } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/public/users/UsersRepoImpl.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/public/users/UsersRepoImpl.scala index 7df717b98c..5f71691e7b 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/public/users/UsersRepoImpl.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/public/users/UsersRepoImpl.scala @@ -10,6 +10,7 @@ package users import adventureworks.customtypes.Defaulted import adventureworks.customtypes.TypoInstant import adventureworks.customtypes.TypoUnknownCitext +import cats.instances.list.catsStdInstancesForList import doobie.free.connection.ConnectionIO import doobie.postgres.syntax.FragmentOps import doobie.syntax.SqlInterpolator.SingleFragment.fromWrite @@ -17,6 +18,7 @@ import doobie.syntax.string.toSqlInterpolator import doobie.util.Write import doobie.util.fragment.Fragment import doobie.util.meta.Meta +import doobie.util.update.Update import fs2.Stream import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder @@ -137,4 +139,39 @@ class UsersRepoImpl extends UsersRepo { returning "user_id", "name", "last_name", "email"::text, "password", "created_at"::text, "verified_on"::text """.query(using UsersRow.read).unique } + override def upsertBatch(unsaved: List[UsersRow]): Stream[ConnectionIO, UsersRow] = { + Update[UsersRow]( + s"""insert into public.users("user_id", "name", "last_name", "email", "password", "created_at", "verified_on") + values (?::uuid,?,?,?::citext,?,?::timestamptz,?::timestamptz) + on conflict ("user_id") + do update set + "name" = EXCLUDED."name", + "last_name" = EXCLUDED."last_name", + "email" = EXCLUDED."email", + "password" = EXCLUDED."password", + "created_at" = EXCLUDED."created_at", + "verified_on" = EXCLUDED."verified_on" + returning "user_id", "name", "last_name", "email"::text, "password", "created_at"::text, "verified_on"::text""" + )(using UsersRow.write) + .updateManyWithGeneratedKeys[UsersRow]("user_id", "name", "last_name", "email", "password", "created_at", "verified_on")(unsaved)(using catsStdInstancesForList, UsersRow.read) + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, UsersRow], batchSize: Int = 10000): ConnectionIO[Int] = { + for { + _ <- sql"create temporary table users_TEMP (like public.users) on commit drop".update.run + _ <- new FragmentOps(sql"""copy users_TEMP("user_id", "name", "last_name", "email", "password", "created_at", "verified_on") from stdin""").copyIn(unsaved, batchSize)(using UsersRow.text) + res <- sql"""insert into public.users("user_id", "name", "last_name", "email", "password", "created_at", "verified_on") + select * from users_TEMP + on conflict ("user_id") + do update set + "name" = EXCLUDED."name", + "last_name" = EXCLUDED."last_name", + "email" = EXCLUDED."email", + "password" = EXCLUDED."password", + "created_at" = EXCLUDED."created_at", + "verified_on" = EXCLUDED."verified_on" + ; + drop table users_TEMP;""".update.run + } yield res + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/public/users/UsersRepoMock.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/public/users/UsersRepoMock.scala index 17c513c080..a75ac81070 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/public/users/UsersRepoMock.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/public/users/UsersRepoMock.scala @@ -109,4 +109,23 @@ class UsersRepoMock(toRow: Function1[UsersRowUnsaved, UsersRow], unsaved } } + override def upsertBatch(unsaved: List[UsersRow]): Stream[ConnectionIO, UsersRow] = { + Stream.emits { + unsaved.map { row => + map += (row.userId -> row) + row + } + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, UsersRow], batchSize: Int = 10000): ConnectionIO[Int] = { + unsaved.compile.toList.map { rows => + var num = 0 + rows.foreach { row => + map += (row.userId -> row) + num += 1 + } + num + } + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/public/users/UsersRow.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/public/users/UsersRow.scala index 6b7c7f5e2b..cb941331c1 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/public/users/UsersRow.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/public/users/UsersRow.scala @@ -13,6 +13,7 @@ import adventureworks.customtypes.TypoUnknownCitext import doobie.enumerated.Nullability import doobie.postgres.Text import doobie.util.Read +import doobie.util.Write import doobie.util.meta.Meta import io.circe.Decoder import io.circe.Encoder @@ -73,4 +74,32 @@ object UsersRow { sb.append(Text.DELIMETER) Text.option(TypoInstant.text).unsafeEncode(row.verifiedOn, sb) } + implicit lazy val write: Write[UsersRow] = new Write[UsersRow]( + puts = List((UsersId.put, Nullability.NoNulls), + (Meta.StringMeta.put, Nullability.NoNulls), + (Meta.StringMeta.put, Nullability.Nullable), + (TypoUnknownCitext.put, Nullability.NoNulls), + (Meta.StringMeta.put, Nullability.NoNulls), + (TypoInstant.put, Nullability.NoNulls), + (TypoInstant.put, Nullability.Nullable)), + toList = x => List(x.userId, x.name, x.lastName, x.email, x.password, x.createdAt, x.verifiedOn), + unsafeSet = (rs, i, a) => { + UsersId.put.unsafeSetNonNullable(rs, i + 0, a.userId) + Meta.StringMeta.put.unsafeSetNonNullable(rs, i + 1, a.name) + Meta.StringMeta.put.unsafeSetNullable(rs, i + 2, a.lastName) + TypoUnknownCitext.put.unsafeSetNonNullable(rs, i + 3, a.email) + Meta.StringMeta.put.unsafeSetNonNullable(rs, i + 4, a.password) + TypoInstant.put.unsafeSetNonNullable(rs, i + 5, a.createdAt) + TypoInstant.put.unsafeSetNullable(rs, i + 6, a.verifiedOn) + }, + unsafeUpdate = (ps, i, a) => { + UsersId.put.unsafeUpdateNonNullable(ps, i + 0, a.userId) + Meta.StringMeta.put.unsafeUpdateNonNullable(ps, i + 1, a.name) + Meta.StringMeta.put.unsafeUpdateNullable(ps, i + 2, a.lastName) + TypoUnknownCitext.put.unsafeUpdateNonNullable(ps, i + 3, a.email) + Meta.StringMeta.put.unsafeUpdateNonNullable(ps, i + 4, a.password) + TypoInstant.put.unsafeUpdateNonNullable(ps, i + 5, a.createdAt) + TypoInstant.put.unsafeUpdateNullable(ps, i + 6, a.verifiedOn) + } + ) } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/purchasing/productvendor/ProductvendorRepo.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/purchasing/productvendor/ProductvendorRepo.scala index 9fe44954c7..052eaa6a2e 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/purchasing/productvendor/ProductvendorRepo.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/purchasing/productvendor/ProductvendorRepo.scala @@ -30,4 +30,7 @@ trait ProductvendorRepo { def update: UpdateBuilder[ProductvendorFields, ProductvendorRow] def update(row: ProductvendorRow): ConnectionIO[Boolean] def upsert(unsaved: ProductvendorRow): ConnectionIO[ProductvendorRow] + def upsertBatch(unsaved: List[ProductvendorRow]): Stream[ConnectionIO, ProductvendorRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Stream[ConnectionIO, ProductvendorRow], batchSize: Int = 10000): ConnectionIO[Int] } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/purchasing/productvendor/ProductvendorRepoImpl.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/purchasing/productvendor/ProductvendorRepoImpl.scala index b72c441973..32b589893f 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/purchasing/productvendor/ProductvendorRepoImpl.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/purchasing/productvendor/ProductvendorRepoImpl.scala @@ -12,6 +12,7 @@ import adventureworks.customtypes.TypoLocalDateTime import adventureworks.person.businessentity.BusinessentityId import adventureworks.production.product.ProductId import adventureworks.production.unitmeasure.UnitmeasureId +import cats.instances.list.catsStdInstancesForList import doobie.free.connection.ConnectionIO import doobie.postgres.syntax.FragmentOps import doobie.syntax.SqlInterpolator.SingleFragment.fromWrite @@ -19,6 +20,7 @@ import doobie.syntax.string.toSqlInterpolator import doobie.util.Write import doobie.util.fragment.Fragment import doobie.util.meta.Meta +import doobie.util.update.Update import fs2.Stream import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder @@ -161,4 +163,45 @@ class ProductvendorRepoImpl extends ProductvendorRepo { returning "productid", "businessentityid", "averageleadtime", "standardprice", "lastreceiptcost", "lastreceiptdate"::text, "minorderqty", "maxorderqty", "onorderqty", "unitmeasurecode", "modifieddate"::text """.query(using ProductvendorRow.read).unique } + override def upsertBatch(unsaved: List[ProductvendorRow]): Stream[ConnectionIO, ProductvendorRow] = { + Update[ProductvendorRow]( + s"""insert into purchasing.productvendor("productid", "businessentityid", "averageleadtime", "standardprice", "lastreceiptcost", "lastreceiptdate", "minorderqty", "maxorderqty", "onorderqty", "unitmeasurecode", "modifieddate") + values (?::int4,?::int4,?::int4,?::numeric,?::numeric,?::timestamp,?::int4,?::int4,?::int4,?::bpchar,?::timestamp) + on conflict ("productid", "businessentityid") + do update set + "averageleadtime" = EXCLUDED."averageleadtime", + "standardprice" = EXCLUDED."standardprice", + "lastreceiptcost" = EXCLUDED."lastreceiptcost", + "lastreceiptdate" = EXCLUDED."lastreceiptdate", + "minorderqty" = EXCLUDED."minorderqty", + "maxorderqty" = EXCLUDED."maxorderqty", + "onorderqty" = EXCLUDED."onorderqty", + "unitmeasurecode" = EXCLUDED."unitmeasurecode", + "modifieddate" = EXCLUDED."modifieddate" + returning "productid", "businessentityid", "averageleadtime", "standardprice", "lastreceiptcost", "lastreceiptdate"::text, "minorderqty", "maxorderqty", "onorderqty", "unitmeasurecode", "modifieddate"::text""" + )(using ProductvendorRow.write) + .updateManyWithGeneratedKeys[ProductvendorRow]("productid", "businessentityid", "averageleadtime", "standardprice", "lastreceiptcost", "lastreceiptdate", "minorderqty", "maxorderqty", "onorderqty", "unitmeasurecode", "modifieddate")(unsaved)(using catsStdInstancesForList, ProductvendorRow.read) + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, ProductvendorRow], batchSize: Int = 10000): ConnectionIO[Int] = { + for { + _ <- sql"create temporary table productvendor_TEMP (like purchasing.productvendor) on commit drop".update.run + _ <- new FragmentOps(sql"""copy productvendor_TEMP("productid", "businessentityid", "averageleadtime", "standardprice", "lastreceiptcost", "lastreceiptdate", "minorderqty", "maxorderqty", "onorderqty", "unitmeasurecode", "modifieddate") from stdin""").copyIn(unsaved, batchSize)(using ProductvendorRow.text) + res <- sql"""insert into purchasing.productvendor("productid", "businessentityid", "averageleadtime", "standardprice", "lastreceiptcost", "lastreceiptdate", "minorderqty", "maxorderqty", "onorderqty", "unitmeasurecode", "modifieddate") + select * from productvendor_TEMP + on conflict ("productid", "businessentityid") + do update set + "averageleadtime" = EXCLUDED."averageleadtime", + "standardprice" = EXCLUDED."standardprice", + "lastreceiptcost" = EXCLUDED."lastreceiptcost", + "lastreceiptdate" = EXCLUDED."lastreceiptdate", + "minorderqty" = EXCLUDED."minorderqty", + "maxorderqty" = EXCLUDED."maxorderqty", + "onorderqty" = EXCLUDED."onorderqty", + "unitmeasurecode" = EXCLUDED."unitmeasurecode", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table productvendor_TEMP;""".update.run + } yield res + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/purchasing/productvendor/ProductvendorRepoMock.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/purchasing/productvendor/ProductvendorRepoMock.scala index 1a0511bf90..87768946ab 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/purchasing/productvendor/ProductvendorRepoMock.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/purchasing/productvendor/ProductvendorRepoMock.scala @@ -105,4 +105,23 @@ class ProductvendorRepoMock(toRow: Function1[ProductvendorRowUnsaved, Productven unsaved } } + override def upsertBatch(unsaved: List[ProductvendorRow]): Stream[ConnectionIO, ProductvendorRow] = { + Stream.emits { + unsaved.map { row => + map += (row.compositeId -> row) + row + } + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, ProductvendorRow], batchSize: Int = 10000): ConnectionIO[Int] = { + unsaved.compile.toList.map { rows => + var num = 0 + rows.foreach { row => + map += (row.compositeId -> row) + num += 1 + } + num + } + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/purchasing/productvendor/ProductvendorRow.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/purchasing/productvendor/ProductvendorRow.scala index 7e7919972f..70e91b70c5 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/purchasing/productvendor/ProductvendorRow.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/purchasing/productvendor/ProductvendorRow.scala @@ -15,6 +15,7 @@ import adventureworks.production.unitmeasure.UnitmeasureId import doobie.enumerated.Nullability import doobie.postgres.Text import doobie.util.Read +import doobie.util.Write import doobie.util.meta.Meta import io.circe.Decoder import io.circe.Encoder @@ -118,4 +119,44 @@ object ProductvendorRow { sb.append(Text.DELIMETER) TypoLocalDateTime.text.unsafeEncode(row.modifieddate, sb) } + implicit lazy val write: Write[ProductvendorRow] = new Write[ProductvendorRow]( + puts = List((ProductId.put, Nullability.NoNulls), + (BusinessentityId.put, Nullability.NoNulls), + (Meta.IntMeta.put, Nullability.NoNulls), + (Meta.ScalaBigDecimalMeta.put, Nullability.NoNulls), + (Meta.ScalaBigDecimalMeta.put, Nullability.Nullable), + (TypoLocalDateTime.put, Nullability.Nullable), + (Meta.IntMeta.put, Nullability.NoNulls), + (Meta.IntMeta.put, Nullability.NoNulls), + (Meta.IntMeta.put, Nullability.Nullable), + (UnitmeasureId.put, Nullability.NoNulls), + (TypoLocalDateTime.put, Nullability.NoNulls)), + toList = x => List(x.productid, x.businessentityid, x.averageleadtime, x.standardprice, x.lastreceiptcost, x.lastreceiptdate, x.minorderqty, x.maxorderqty, x.onorderqty, x.unitmeasurecode, x.modifieddate), + unsafeSet = (rs, i, a) => { + ProductId.put.unsafeSetNonNullable(rs, i + 0, a.productid) + BusinessentityId.put.unsafeSetNonNullable(rs, i + 1, a.businessentityid) + Meta.IntMeta.put.unsafeSetNonNullable(rs, i + 2, a.averageleadtime) + Meta.ScalaBigDecimalMeta.put.unsafeSetNonNullable(rs, i + 3, a.standardprice) + Meta.ScalaBigDecimalMeta.put.unsafeSetNullable(rs, i + 4, a.lastreceiptcost) + TypoLocalDateTime.put.unsafeSetNullable(rs, i + 5, a.lastreceiptdate) + Meta.IntMeta.put.unsafeSetNonNullable(rs, i + 6, a.minorderqty) + Meta.IntMeta.put.unsafeSetNonNullable(rs, i + 7, a.maxorderqty) + Meta.IntMeta.put.unsafeSetNullable(rs, i + 8, a.onorderqty) + UnitmeasureId.put.unsafeSetNonNullable(rs, i + 9, a.unitmeasurecode) + TypoLocalDateTime.put.unsafeSetNonNullable(rs, i + 10, a.modifieddate) + }, + unsafeUpdate = (ps, i, a) => { + ProductId.put.unsafeUpdateNonNullable(ps, i + 0, a.productid) + BusinessentityId.put.unsafeUpdateNonNullable(ps, i + 1, a.businessentityid) + Meta.IntMeta.put.unsafeUpdateNonNullable(ps, i + 2, a.averageleadtime) + Meta.ScalaBigDecimalMeta.put.unsafeUpdateNonNullable(ps, i + 3, a.standardprice) + Meta.ScalaBigDecimalMeta.put.unsafeUpdateNullable(ps, i + 4, a.lastreceiptcost) + TypoLocalDateTime.put.unsafeUpdateNullable(ps, i + 5, a.lastreceiptdate) + Meta.IntMeta.put.unsafeUpdateNonNullable(ps, i + 6, a.minorderqty) + Meta.IntMeta.put.unsafeUpdateNonNullable(ps, i + 7, a.maxorderqty) + Meta.IntMeta.put.unsafeUpdateNullable(ps, i + 8, a.onorderqty) + UnitmeasureId.put.unsafeUpdateNonNullable(ps, i + 9, a.unitmeasurecode) + TypoLocalDateTime.put.unsafeUpdateNonNullable(ps, i + 10, a.modifieddate) + } + ) } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/purchasing/purchaseorderdetail/PurchaseorderdetailRow.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/purchasing/purchaseorderdetail/PurchaseorderdetailRow.scala index a97e0fa6d8..403259997b 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/purchasing/purchaseorderdetail/PurchaseorderdetailRow.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/purchasing/purchaseorderdetail/PurchaseorderdetailRow.scala @@ -15,6 +15,7 @@ import adventureworks.purchasing.purchaseorderheader.PurchaseorderheaderId import doobie.enumerated.Nullability import doobie.postgres.Text import doobie.util.Read +import doobie.util.Write import doobie.util.meta.Meta import io.circe.Decoder import io.circe.Encoder @@ -104,4 +105,38 @@ object PurchaseorderdetailRow { sb.append(Text.DELIMETER) TypoLocalDateTime.text.unsafeEncode(row.modifieddate, sb) } + implicit lazy val write: Write[PurchaseorderdetailRow] = new Write[PurchaseorderdetailRow]( + puts = List((PurchaseorderheaderId.put, Nullability.NoNulls), + (Meta.IntMeta.put, Nullability.NoNulls), + (TypoLocalDateTime.put, Nullability.NoNulls), + (TypoShort.put, Nullability.NoNulls), + (ProductId.put, Nullability.NoNulls), + (Meta.ScalaBigDecimalMeta.put, Nullability.NoNulls), + (Meta.ScalaBigDecimalMeta.put, Nullability.NoNulls), + (Meta.ScalaBigDecimalMeta.put, Nullability.NoNulls), + (TypoLocalDateTime.put, Nullability.NoNulls)), + toList = x => List(x.purchaseorderid, x.purchaseorderdetailid, x.duedate, x.orderqty, x.productid, x.unitprice, x.receivedqty, x.rejectedqty, x.modifieddate), + unsafeSet = (rs, i, a) => { + PurchaseorderheaderId.put.unsafeSetNonNullable(rs, i + 0, a.purchaseorderid) + Meta.IntMeta.put.unsafeSetNonNullable(rs, i + 1, a.purchaseorderdetailid) + TypoLocalDateTime.put.unsafeSetNonNullable(rs, i + 2, a.duedate) + TypoShort.put.unsafeSetNonNullable(rs, i + 3, a.orderqty) + ProductId.put.unsafeSetNonNullable(rs, i + 4, a.productid) + Meta.ScalaBigDecimalMeta.put.unsafeSetNonNullable(rs, i + 5, a.unitprice) + Meta.ScalaBigDecimalMeta.put.unsafeSetNonNullable(rs, i + 6, a.receivedqty) + Meta.ScalaBigDecimalMeta.put.unsafeSetNonNullable(rs, i + 7, a.rejectedqty) + TypoLocalDateTime.put.unsafeSetNonNullable(rs, i + 8, a.modifieddate) + }, + unsafeUpdate = (ps, i, a) => { + PurchaseorderheaderId.put.unsafeUpdateNonNullable(ps, i + 0, a.purchaseorderid) + Meta.IntMeta.put.unsafeUpdateNonNullable(ps, i + 1, a.purchaseorderdetailid) + TypoLocalDateTime.put.unsafeUpdateNonNullable(ps, i + 2, a.duedate) + TypoShort.put.unsafeUpdateNonNullable(ps, i + 3, a.orderqty) + ProductId.put.unsafeUpdateNonNullable(ps, i + 4, a.productid) + Meta.ScalaBigDecimalMeta.put.unsafeUpdateNonNullable(ps, i + 5, a.unitprice) + Meta.ScalaBigDecimalMeta.put.unsafeUpdateNonNullable(ps, i + 6, a.receivedqty) + Meta.ScalaBigDecimalMeta.put.unsafeUpdateNonNullable(ps, i + 7, a.rejectedqty) + TypoLocalDateTime.put.unsafeUpdateNonNullable(ps, i + 8, a.modifieddate) + } + ) } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/purchasing/purchaseorderheader/PurchaseorderheaderRepo.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/purchasing/purchaseorderheader/PurchaseorderheaderRepo.scala index 9d98ca58bd..96272c6d1e 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/purchasing/purchaseorderheader/PurchaseorderheaderRepo.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/purchasing/purchaseorderheader/PurchaseorderheaderRepo.scala @@ -30,4 +30,7 @@ trait PurchaseorderheaderRepo { def update: UpdateBuilder[PurchaseorderheaderFields, PurchaseorderheaderRow] def update(row: PurchaseorderheaderRow): ConnectionIO[Boolean] def upsert(unsaved: PurchaseorderheaderRow): ConnectionIO[PurchaseorderheaderRow] + def upsertBatch(unsaved: List[PurchaseorderheaderRow]): Stream[ConnectionIO, PurchaseorderheaderRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Stream[ConnectionIO, PurchaseorderheaderRow], batchSize: Int = 10000): ConnectionIO[Int] } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/purchasing/purchaseorderheader/PurchaseorderheaderRepoImpl.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/purchasing/purchaseorderheader/PurchaseorderheaderRepoImpl.scala index 2231f92f45..abe180c108 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/purchasing/purchaseorderheader/PurchaseorderheaderRepoImpl.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/purchasing/purchaseorderheader/PurchaseorderheaderRepoImpl.scala @@ -12,6 +12,7 @@ import adventureworks.customtypes.TypoLocalDateTime import adventureworks.customtypes.TypoShort import adventureworks.person.businessentity.BusinessentityId import adventureworks.purchasing.shipmethod.ShipmethodId +import cats.instances.list.catsStdInstancesForList import doobie.free.connection.ConnectionIO import doobie.postgres.syntax.FragmentOps import doobie.syntax.SqlInterpolator.SingleFragment.fromWrite @@ -19,6 +20,7 @@ import doobie.syntax.string.toSqlInterpolator import doobie.util.Write import doobie.util.fragment.Fragment import doobie.util.meta.Meta +import doobie.util.update.Update import fs2.Stream import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder @@ -174,4 +176,49 @@ class PurchaseorderheaderRepoImpl extends PurchaseorderheaderRepo { returning "purchaseorderid", "revisionnumber", "status", "employeeid", "vendorid", "shipmethodid", "orderdate"::text, "shipdate"::text, "subtotal", "taxamt", "freight", "modifieddate"::text """.query(using PurchaseorderheaderRow.read).unique } + override def upsertBatch(unsaved: List[PurchaseorderheaderRow]): Stream[ConnectionIO, PurchaseorderheaderRow] = { + Update[PurchaseorderheaderRow]( + s"""insert into purchasing.purchaseorderheader("purchaseorderid", "revisionnumber", "status", "employeeid", "vendorid", "shipmethodid", "orderdate", "shipdate", "subtotal", "taxamt", "freight", "modifieddate") + values (?::int4,?::int2,?::int2,?::int4,?::int4,?::int4,?::timestamp,?::timestamp,?::numeric,?::numeric,?::numeric,?::timestamp) + on conflict ("purchaseorderid") + do update set + "revisionnumber" = EXCLUDED."revisionnumber", + "status" = EXCLUDED."status", + "employeeid" = EXCLUDED."employeeid", + "vendorid" = EXCLUDED."vendorid", + "shipmethodid" = EXCLUDED."shipmethodid", + "orderdate" = EXCLUDED."orderdate", + "shipdate" = EXCLUDED."shipdate", + "subtotal" = EXCLUDED."subtotal", + "taxamt" = EXCLUDED."taxamt", + "freight" = EXCLUDED."freight", + "modifieddate" = EXCLUDED."modifieddate" + returning "purchaseorderid", "revisionnumber", "status", "employeeid", "vendorid", "shipmethodid", "orderdate"::text, "shipdate"::text, "subtotal", "taxamt", "freight", "modifieddate"::text""" + )(using PurchaseorderheaderRow.write) + .updateManyWithGeneratedKeys[PurchaseorderheaderRow]("purchaseorderid", "revisionnumber", "status", "employeeid", "vendorid", "shipmethodid", "orderdate", "shipdate", "subtotal", "taxamt", "freight", "modifieddate")(unsaved)(using catsStdInstancesForList, PurchaseorderheaderRow.read) + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, PurchaseorderheaderRow], batchSize: Int = 10000): ConnectionIO[Int] = { + for { + _ <- sql"create temporary table purchaseorderheader_TEMP (like purchasing.purchaseorderheader) on commit drop".update.run + _ <- new FragmentOps(sql"""copy purchaseorderheader_TEMP("purchaseorderid", "revisionnumber", "status", "employeeid", "vendorid", "shipmethodid", "orderdate", "shipdate", "subtotal", "taxamt", "freight", "modifieddate") from stdin""").copyIn(unsaved, batchSize)(using PurchaseorderheaderRow.text) + res <- sql"""insert into purchasing.purchaseorderheader("purchaseorderid", "revisionnumber", "status", "employeeid", "vendorid", "shipmethodid", "orderdate", "shipdate", "subtotal", "taxamt", "freight", "modifieddate") + select * from purchaseorderheader_TEMP + on conflict ("purchaseorderid") + do update set + "revisionnumber" = EXCLUDED."revisionnumber", + "status" = EXCLUDED."status", + "employeeid" = EXCLUDED."employeeid", + "vendorid" = EXCLUDED."vendorid", + "shipmethodid" = EXCLUDED."shipmethodid", + "orderdate" = EXCLUDED."orderdate", + "shipdate" = EXCLUDED."shipdate", + "subtotal" = EXCLUDED."subtotal", + "taxamt" = EXCLUDED."taxamt", + "freight" = EXCLUDED."freight", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table purchaseorderheader_TEMP;""".update.run + } yield res + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/purchasing/purchaseorderheader/PurchaseorderheaderRepoMock.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/purchasing/purchaseorderheader/PurchaseorderheaderRepoMock.scala index 554a88c920..ac27d9dbdb 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/purchasing/purchaseorderheader/PurchaseorderheaderRepoMock.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/purchasing/purchaseorderheader/PurchaseorderheaderRepoMock.scala @@ -105,4 +105,23 @@ class PurchaseorderheaderRepoMock(toRow: Function1[PurchaseorderheaderRowUnsaved unsaved } } + override def upsertBatch(unsaved: List[PurchaseorderheaderRow]): Stream[ConnectionIO, PurchaseorderheaderRow] = { + Stream.emits { + unsaved.map { row => + map += (row.purchaseorderid -> row) + row + } + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, PurchaseorderheaderRow], batchSize: Int = 10000): ConnectionIO[Int] = { + unsaved.compile.toList.map { rows => + var num = 0 + rows.foreach { row => + map += (row.purchaseorderid -> row) + num += 1 + } + num + } + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/purchasing/purchaseorderheader/PurchaseorderheaderRow.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/purchasing/purchaseorderheader/PurchaseorderheaderRow.scala index df28d84b8a..7a1f8db943 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/purchasing/purchaseorderheader/PurchaseorderheaderRow.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/purchasing/purchaseorderheader/PurchaseorderheaderRow.scala @@ -15,6 +15,7 @@ import adventureworks.purchasing.shipmethod.ShipmethodId import doobie.enumerated.Nullability import doobie.postgres.Text import doobie.util.Read +import doobie.util.Write import doobie.util.meta.Meta import io.circe.Decoder import io.circe.Encoder @@ -128,4 +129,47 @@ object PurchaseorderheaderRow { sb.append(Text.DELIMETER) TypoLocalDateTime.text.unsafeEncode(row.modifieddate, sb) } + implicit lazy val write: Write[PurchaseorderheaderRow] = new Write[PurchaseorderheaderRow]( + puts = List((PurchaseorderheaderId.put, Nullability.NoNulls), + (TypoShort.put, Nullability.NoNulls), + (TypoShort.put, Nullability.NoNulls), + (BusinessentityId.put, Nullability.NoNulls), + (BusinessentityId.put, Nullability.NoNulls), + (ShipmethodId.put, Nullability.NoNulls), + (TypoLocalDateTime.put, Nullability.NoNulls), + (TypoLocalDateTime.put, Nullability.Nullable), + (Meta.ScalaBigDecimalMeta.put, Nullability.NoNulls), + (Meta.ScalaBigDecimalMeta.put, Nullability.NoNulls), + (Meta.ScalaBigDecimalMeta.put, Nullability.NoNulls), + (TypoLocalDateTime.put, Nullability.NoNulls)), + toList = x => List(x.purchaseorderid, x.revisionnumber, x.status, x.employeeid, x.vendorid, x.shipmethodid, x.orderdate, x.shipdate, x.subtotal, x.taxamt, x.freight, x.modifieddate), + unsafeSet = (rs, i, a) => { + PurchaseorderheaderId.put.unsafeSetNonNullable(rs, i + 0, a.purchaseorderid) + TypoShort.put.unsafeSetNonNullable(rs, i + 1, a.revisionnumber) + TypoShort.put.unsafeSetNonNullable(rs, i + 2, a.status) + BusinessentityId.put.unsafeSetNonNullable(rs, i + 3, a.employeeid) + BusinessentityId.put.unsafeSetNonNullable(rs, i + 4, a.vendorid) + ShipmethodId.put.unsafeSetNonNullable(rs, i + 5, a.shipmethodid) + TypoLocalDateTime.put.unsafeSetNonNullable(rs, i + 6, a.orderdate) + TypoLocalDateTime.put.unsafeSetNullable(rs, i + 7, a.shipdate) + Meta.ScalaBigDecimalMeta.put.unsafeSetNonNullable(rs, i + 8, a.subtotal) + Meta.ScalaBigDecimalMeta.put.unsafeSetNonNullable(rs, i + 9, a.taxamt) + Meta.ScalaBigDecimalMeta.put.unsafeSetNonNullable(rs, i + 10, a.freight) + TypoLocalDateTime.put.unsafeSetNonNullable(rs, i + 11, a.modifieddate) + }, + unsafeUpdate = (ps, i, a) => { + PurchaseorderheaderId.put.unsafeUpdateNonNullable(ps, i + 0, a.purchaseorderid) + TypoShort.put.unsafeUpdateNonNullable(ps, i + 1, a.revisionnumber) + TypoShort.put.unsafeUpdateNonNullable(ps, i + 2, a.status) + BusinessentityId.put.unsafeUpdateNonNullable(ps, i + 3, a.employeeid) + BusinessentityId.put.unsafeUpdateNonNullable(ps, i + 4, a.vendorid) + ShipmethodId.put.unsafeUpdateNonNullable(ps, i + 5, a.shipmethodid) + TypoLocalDateTime.put.unsafeUpdateNonNullable(ps, i + 6, a.orderdate) + TypoLocalDateTime.put.unsafeUpdateNullable(ps, i + 7, a.shipdate) + Meta.ScalaBigDecimalMeta.put.unsafeUpdateNonNullable(ps, i + 8, a.subtotal) + Meta.ScalaBigDecimalMeta.put.unsafeUpdateNonNullable(ps, i + 9, a.taxamt) + Meta.ScalaBigDecimalMeta.put.unsafeUpdateNonNullable(ps, i + 10, a.freight) + TypoLocalDateTime.put.unsafeUpdateNonNullable(ps, i + 11, a.modifieddate) + } + ) } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/purchasing/shipmethod/ShipmethodRepo.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/purchasing/shipmethod/ShipmethodRepo.scala index 1e0e982125..6e7d70bd49 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/purchasing/shipmethod/ShipmethodRepo.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/purchasing/shipmethod/ShipmethodRepo.scala @@ -30,4 +30,7 @@ trait ShipmethodRepo { def update: UpdateBuilder[ShipmethodFields, ShipmethodRow] def update(row: ShipmethodRow): ConnectionIO[Boolean] def upsert(unsaved: ShipmethodRow): ConnectionIO[ShipmethodRow] + def upsertBatch(unsaved: List[ShipmethodRow]): Stream[ConnectionIO, ShipmethodRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Stream[ConnectionIO, ShipmethodRow], batchSize: Int = 10000): ConnectionIO[Int] } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/purchasing/shipmethod/ShipmethodRepoImpl.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/purchasing/shipmethod/ShipmethodRepoImpl.scala index 65c02c86cb..b2291c283e 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/purchasing/shipmethod/ShipmethodRepoImpl.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/purchasing/shipmethod/ShipmethodRepoImpl.scala @@ -11,6 +11,7 @@ import adventureworks.customtypes.Defaulted import adventureworks.customtypes.TypoLocalDateTime import adventureworks.customtypes.TypoUUID import adventureworks.public.Name +import cats.instances.list.catsStdInstancesForList import doobie.free.connection.ConnectionIO import doobie.postgres.syntax.FragmentOps import doobie.syntax.SqlInterpolator.SingleFragment.fromWrite @@ -18,6 +19,7 @@ import doobie.syntax.string.toSqlInterpolator import doobie.util.Write import doobie.util.fragment.Fragment import doobie.util.meta.Meta +import doobie.util.update.Update import fs2.Stream import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder @@ -140,4 +142,37 @@ class ShipmethodRepoImpl extends ShipmethodRepo { returning "shipmethodid", "name", "shipbase", "shiprate", "rowguid", "modifieddate"::text """.query(using ShipmethodRow.read).unique } + override def upsertBatch(unsaved: List[ShipmethodRow]): Stream[ConnectionIO, ShipmethodRow] = { + Update[ShipmethodRow]( + s"""insert into purchasing.shipmethod("shipmethodid", "name", "shipbase", "shiprate", "rowguid", "modifieddate") + values (?::int4,?::varchar,?::numeric,?::numeric,?::uuid,?::timestamp) + on conflict ("shipmethodid") + do update set + "name" = EXCLUDED."name", + "shipbase" = EXCLUDED."shipbase", + "shiprate" = EXCLUDED."shiprate", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + returning "shipmethodid", "name", "shipbase", "shiprate", "rowguid", "modifieddate"::text""" + )(using ShipmethodRow.write) + .updateManyWithGeneratedKeys[ShipmethodRow]("shipmethodid", "name", "shipbase", "shiprate", "rowguid", "modifieddate")(unsaved)(using catsStdInstancesForList, ShipmethodRow.read) + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, ShipmethodRow], batchSize: Int = 10000): ConnectionIO[Int] = { + for { + _ <- sql"create temporary table shipmethod_TEMP (like purchasing.shipmethod) on commit drop".update.run + _ <- new FragmentOps(sql"""copy shipmethod_TEMP("shipmethodid", "name", "shipbase", "shiprate", "rowguid", "modifieddate") from stdin""").copyIn(unsaved, batchSize)(using ShipmethodRow.text) + res <- sql"""insert into purchasing.shipmethod("shipmethodid", "name", "shipbase", "shiprate", "rowguid", "modifieddate") + select * from shipmethod_TEMP + on conflict ("shipmethodid") + do update set + "name" = EXCLUDED."name", + "shipbase" = EXCLUDED."shipbase", + "shiprate" = EXCLUDED."shiprate", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table shipmethod_TEMP;""".update.run + } yield res + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/purchasing/shipmethod/ShipmethodRepoMock.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/purchasing/shipmethod/ShipmethodRepoMock.scala index a9772608db..207da1e7ec 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/purchasing/shipmethod/ShipmethodRepoMock.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/purchasing/shipmethod/ShipmethodRepoMock.scala @@ -105,4 +105,23 @@ class ShipmethodRepoMock(toRow: Function1[ShipmethodRowUnsaved, ShipmethodRow], unsaved } } + override def upsertBatch(unsaved: List[ShipmethodRow]): Stream[ConnectionIO, ShipmethodRow] = { + Stream.emits { + unsaved.map { row => + map += (row.shipmethodid -> row) + row + } + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, ShipmethodRow], batchSize: Int = 10000): ConnectionIO[Int] = { + unsaved.compile.toList.map { rows => + var num = 0 + rows.foreach { row => + map += (row.shipmethodid -> row) + num += 1 + } + num + } + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/purchasing/shipmethod/ShipmethodRow.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/purchasing/shipmethod/ShipmethodRow.scala index df146d7da6..a2ce260cd5 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/purchasing/shipmethod/ShipmethodRow.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/purchasing/shipmethod/ShipmethodRow.scala @@ -14,6 +14,7 @@ import adventureworks.public.Name import doobie.enumerated.Nullability import doobie.postgres.Text import doobie.util.Read +import doobie.util.Write import doobie.util.meta.Meta import io.circe.Decoder import io.circe.Encoder @@ -80,4 +81,29 @@ object ShipmethodRow { sb.append(Text.DELIMETER) TypoLocalDateTime.text.unsafeEncode(row.modifieddate, sb) } + implicit lazy val write: Write[ShipmethodRow] = new Write[ShipmethodRow]( + puts = List((ShipmethodId.put, Nullability.NoNulls), + (Name.put, Nullability.NoNulls), + (Meta.ScalaBigDecimalMeta.put, Nullability.NoNulls), + (Meta.ScalaBigDecimalMeta.put, Nullability.NoNulls), + (TypoUUID.put, Nullability.NoNulls), + (TypoLocalDateTime.put, Nullability.NoNulls)), + toList = x => List(x.shipmethodid, x.name, x.shipbase, x.shiprate, x.rowguid, x.modifieddate), + unsafeSet = (rs, i, a) => { + ShipmethodId.put.unsafeSetNonNullable(rs, i + 0, a.shipmethodid) + Name.put.unsafeSetNonNullable(rs, i + 1, a.name) + Meta.ScalaBigDecimalMeta.put.unsafeSetNonNullable(rs, i + 2, a.shipbase) + Meta.ScalaBigDecimalMeta.put.unsafeSetNonNullable(rs, i + 3, a.shiprate) + TypoUUID.put.unsafeSetNonNullable(rs, i + 4, a.rowguid) + TypoLocalDateTime.put.unsafeSetNonNullable(rs, i + 5, a.modifieddate) + }, + unsafeUpdate = (ps, i, a) => { + ShipmethodId.put.unsafeUpdateNonNullable(ps, i + 0, a.shipmethodid) + Name.put.unsafeUpdateNonNullable(ps, i + 1, a.name) + Meta.ScalaBigDecimalMeta.put.unsafeUpdateNonNullable(ps, i + 2, a.shipbase) + Meta.ScalaBigDecimalMeta.put.unsafeUpdateNonNullable(ps, i + 3, a.shiprate) + TypoUUID.put.unsafeUpdateNonNullable(ps, i + 4, a.rowguid) + TypoLocalDateTime.put.unsafeUpdateNonNullable(ps, i + 5, a.modifieddate) + } + ) } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/purchasing/vendor/VendorRepo.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/purchasing/vendor/VendorRepo.scala index 00bd84cf6f..fa90607130 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/purchasing/vendor/VendorRepo.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/purchasing/vendor/VendorRepo.scala @@ -31,4 +31,7 @@ trait VendorRepo { def update: UpdateBuilder[VendorFields, VendorRow] def update(row: VendorRow): ConnectionIO[Boolean] def upsert(unsaved: VendorRow): ConnectionIO[VendorRow] + def upsertBatch(unsaved: List[VendorRow]): Stream[ConnectionIO, VendorRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Stream[ConnectionIO, VendorRow], batchSize: Int = 10000): ConnectionIO[Int] } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/purchasing/vendor/VendorRepoImpl.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/purchasing/vendor/VendorRepoImpl.scala index 2b2288dc69..546dd6368e 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/purchasing/vendor/VendorRepoImpl.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/purchasing/vendor/VendorRepoImpl.scala @@ -14,6 +14,7 @@ import adventureworks.person.businessentity.BusinessentityId import adventureworks.public.AccountNumber import adventureworks.public.Flag import adventureworks.public.Name +import cats.instances.list.catsStdInstancesForList import doobie.free.connection.ConnectionIO import doobie.postgres.syntax.FragmentOps import doobie.syntax.SqlInterpolator.SingleFragment.fromWrite @@ -21,6 +22,7 @@ import doobie.syntax.string.toSqlInterpolator import doobie.util.Write import doobie.util.fragment.Fragment import doobie.util.meta.Meta +import doobie.util.update.Update import fs2.Stream import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder @@ -145,4 +147,41 @@ class VendorRepoImpl extends VendorRepo { returning "businessentityid", "accountnumber", "name", "creditrating", "preferredvendorstatus", "activeflag", "purchasingwebserviceurl", "modifieddate"::text """.query(using VendorRow.read).unique } + override def upsertBatch(unsaved: List[VendorRow]): Stream[ConnectionIO, VendorRow] = { + Update[VendorRow]( + s"""insert into purchasing.vendor("businessentityid", "accountnumber", "name", "creditrating", "preferredvendorstatus", "activeflag", "purchasingwebserviceurl", "modifieddate") + values (?::int4,?::varchar,?::varchar,?::int2,?::bool,?::bool,?,?::timestamp) + on conflict ("businessentityid") + do update set + "accountnumber" = EXCLUDED."accountnumber", + "name" = EXCLUDED."name", + "creditrating" = EXCLUDED."creditrating", + "preferredvendorstatus" = EXCLUDED."preferredvendorstatus", + "activeflag" = EXCLUDED."activeflag", + "purchasingwebserviceurl" = EXCLUDED."purchasingwebserviceurl", + "modifieddate" = EXCLUDED."modifieddate" + returning "businessentityid", "accountnumber", "name", "creditrating", "preferredvendorstatus", "activeflag", "purchasingwebserviceurl", "modifieddate"::text""" + )(using VendorRow.write) + .updateManyWithGeneratedKeys[VendorRow]("businessentityid", "accountnumber", "name", "creditrating", "preferredvendorstatus", "activeflag", "purchasingwebserviceurl", "modifieddate")(unsaved)(using catsStdInstancesForList, VendorRow.read) + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, VendorRow], batchSize: Int = 10000): ConnectionIO[Int] = { + for { + _ <- sql"create temporary table vendor_TEMP (like purchasing.vendor) on commit drop".update.run + _ <- new FragmentOps(sql"""copy vendor_TEMP("businessentityid", "accountnumber", "name", "creditrating", "preferredvendorstatus", "activeflag", "purchasingwebserviceurl", "modifieddate") from stdin""").copyIn(unsaved, batchSize)(using VendorRow.text) + res <- sql"""insert into purchasing.vendor("businessentityid", "accountnumber", "name", "creditrating", "preferredvendorstatus", "activeflag", "purchasingwebserviceurl", "modifieddate") + select * from vendor_TEMP + on conflict ("businessentityid") + do update set + "accountnumber" = EXCLUDED."accountnumber", + "name" = EXCLUDED."name", + "creditrating" = EXCLUDED."creditrating", + "preferredvendorstatus" = EXCLUDED."preferredvendorstatus", + "activeflag" = EXCLUDED."activeflag", + "purchasingwebserviceurl" = EXCLUDED."purchasingwebserviceurl", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table vendor_TEMP;""".update.run + } yield res + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/purchasing/vendor/VendorRepoMock.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/purchasing/vendor/VendorRepoMock.scala index 2f573a7a05..01a0a3fcc3 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/purchasing/vendor/VendorRepoMock.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/purchasing/vendor/VendorRepoMock.scala @@ -106,4 +106,23 @@ class VendorRepoMock(toRow: Function1[VendorRowUnsaved, VendorRow], unsaved } } + override def upsertBatch(unsaved: List[VendorRow]): Stream[ConnectionIO, VendorRow] = { + Stream.emits { + unsaved.map { row => + map += (row.businessentityid -> row) + row + } + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, VendorRow], batchSize: Int = 10000): ConnectionIO[Int] = { + unsaved.compile.toList.map { rows => + var num = 0 + rows.foreach { row => + map += (row.businessentityid -> row) + num += 1 + } + num + } + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/purchasing/vendor/VendorRow.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/purchasing/vendor/VendorRow.scala index 5cb3c9732e..bf93102c6d 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/purchasing/vendor/VendorRow.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/purchasing/vendor/VendorRow.scala @@ -17,6 +17,7 @@ import adventureworks.public.Name import doobie.enumerated.Nullability import doobie.postgres.Text import doobie.util.Read +import doobie.util.Write import doobie.util.meta.Meta import io.circe.Decoder import io.circe.Encoder @@ -94,4 +95,35 @@ object VendorRow { sb.append(Text.DELIMETER) TypoLocalDateTime.text.unsafeEncode(row.modifieddate, sb) } + implicit lazy val write: Write[VendorRow] = new Write[VendorRow]( + puts = List((BusinessentityId.put, Nullability.NoNulls), + (AccountNumber.put, Nullability.NoNulls), + (Name.put, Nullability.NoNulls), + (TypoShort.put, Nullability.NoNulls), + (Flag.put, Nullability.NoNulls), + (Flag.put, Nullability.NoNulls), + (Meta.StringMeta.put, Nullability.Nullable), + (TypoLocalDateTime.put, Nullability.NoNulls)), + toList = x => List(x.businessentityid, x.accountnumber, x.name, x.creditrating, x.preferredvendorstatus, x.activeflag, x.purchasingwebserviceurl, x.modifieddate), + unsafeSet = (rs, i, a) => { + BusinessentityId.put.unsafeSetNonNullable(rs, i + 0, a.businessentityid) + AccountNumber.put.unsafeSetNonNullable(rs, i + 1, a.accountnumber) + Name.put.unsafeSetNonNullable(rs, i + 2, a.name) + TypoShort.put.unsafeSetNonNullable(rs, i + 3, a.creditrating) + Flag.put.unsafeSetNonNullable(rs, i + 4, a.preferredvendorstatus) + Flag.put.unsafeSetNonNullable(rs, i + 5, a.activeflag) + Meta.StringMeta.put.unsafeSetNullable(rs, i + 6, a.purchasingwebserviceurl) + TypoLocalDateTime.put.unsafeSetNonNullable(rs, i + 7, a.modifieddate) + }, + unsafeUpdate = (ps, i, a) => { + BusinessentityId.put.unsafeUpdateNonNullable(ps, i + 0, a.businessentityid) + AccountNumber.put.unsafeUpdateNonNullable(ps, i + 1, a.accountnumber) + Name.put.unsafeUpdateNonNullable(ps, i + 2, a.name) + TypoShort.put.unsafeUpdateNonNullable(ps, i + 3, a.creditrating) + Flag.put.unsafeUpdateNonNullable(ps, i + 4, a.preferredvendorstatus) + Flag.put.unsafeUpdateNonNullable(ps, i + 5, a.activeflag) + Meta.StringMeta.put.unsafeUpdateNullable(ps, i + 6, a.purchasingwebserviceurl) + TypoLocalDateTime.put.unsafeUpdateNonNullable(ps, i + 7, a.modifieddate) + } + ) } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/countryregioncurrency/CountryregioncurrencyRepo.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/countryregioncurrency/CountryregioncurrencyRepo.scala index 06424c6222..9e970c96e0 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/countryregioncurrency/CountryregioncurrencyRepo.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/countryregioncurrency/CountryregioncurrencyRepo.scala @@ -30,4 +30,7 @@ trait CountryregioncurrencyRepo { def update: UpdateBuilder[CountryregioncurrencyFields, CountryregioncurrencyRow] def update(row: CountryregioncurrencyRow): ConnectionIO[Boolean] def upsert(unsaved: CountryregioncurrencyRow): ConnectionIO[CountryregioncurrencyRow] + def upsertBatch(unsaved: List[CountryregioncurrencyRow]): Stream[ConnectionIO, CountryregioncurrencyRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Stream[ConnectionIO, CountryregioncurrencyRow], batchSize: Int = 10000): ConnectionIO[Int] } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/countryregioncurrency/CountryregioncurrencyRepoImpl.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/countryregioncurrency/CountryregioncurrencyRepoImpl.scala index 87e4e74af4..7520aef880 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/countryregioncurrency/CountryregioncurrencyRepoImpl.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/countryregioncurrency/CountryregioncurrencyRepoImpl.scala @@ -11,12 +11,14 @@ import adventureworks.customtypes.Defaulted import adventureworks.customtypes.TypoLocalDateTime import adventureworks.person.countryregion.CountryregionId import adventureworks.sales.currency.CurrencyId +import cats.instances.list.catsStdInstancesForList import doobie.free.connection.ConnectionIO import doobie.postgres.syntax.FragmentOps import doobie.syntax.SqlInterpolator.SingleFragment.fromWrite import doobie.syntax.string.toSqlInterpolator import doobie.util.Write import doobie.util.fragment.Fragment +import doobie.util.update.Update import fs2.Stream import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder @@ -127,4 +129,29 @@ class CountryregioncurrencyRepoImpl extends CountryregioncurrencyRepo { returning "countryregioncode", "currencycode", "modifieddate"::text """.query(using CountryregioncurrencyRow.read).unique } + override def upsertBatch(unsaved: List[CountryregioncurrencyRow]): Stream[ConnectionIO, CountryregioncurrencyRow] = { + Update[CountryregioncurrencyRow]( + s"""insert into sales.countryregioncurrency("countryregioncode", "currencycode", "modifieddate") + values (?,?::bpchar,?::timestamp) + on conflict ("countryregioncode", "currencycode") + do update set + "modifieddate" = EXCLUDED."modifieddate" + returning "countryregioncode", "currencycode", "modifieddate"::text""" + )(using CountryregioncurrencyRow.write) + .updateManyWithGeneratedKeys[CountryregioncurrencyRow]("countryregioncode", "currencycode", "modifieddate")(unsaved)(using catsStdInstancesForList, CountryregioncurrencyRow.read) + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, CountryregioncurrencyRow], batchSize: Int = 10000): ConnectionIO[Int] = { + for { + _ <- sql"create temporary table countryregioncurrency_TEMP (like sales.countryregioncurrency) on commit drop".update.run + _ <- new FragmentOps(sql"""copy countryregioncurrency_TEMP("countryregioncode", "currencycode", "modifieddate") from stdin""").copyIn(unsaved, batchSize)(using CountryregioncurrencyRow.text) + res <- sql"""insert into sales.countryregioncurrency("countryregioncode", "currencycode", "modifieddate") + select * from countryregioncurrency_TEMP + on conflict ("countryregioncode", "currencycode") + do update set + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table countryregioncurrency_TEMP;""".update.run + } yield res + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/countryregioncurrency/CountryregioncurrencyRepoMock.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/countryregioncurrency/CountryregioncurrencyRepoMock.scala index 3076e5a2d6..4782a3ced5 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/countryregioncurrency/CountryregioncurrencyRepoMock.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/countryregioncurrency/CountryregioncurrencyRepoMock.scala @@ -105,4 +105,23 @@ class CountryregioncurrencyRepoMock(toRow: Function1[CountryregioncurrencyRowUns unsaved } } + override def upsertBatch(unsaved: List[CountryregioncurrencyRow]): Stream[ConnectionIO, CountryregioncurrencyRow] = { + Stream.emits { + unsaved.map { row => + map += (row.compositeId -> row) + row + } + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, CountryregioncurrencyRow], batchSize: Int = 10000): ConnectionIO[Int] = { + unsaved.compile.toList.map { rows => + var num = 0 + rows.foreach { row => + map += (row.compositeId -> row) + num += 1 + } + num + } + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/countryregioncurrency/CountryregioncurrencyRow.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/countryregioncurrency/CountryregioncurrencyRow.scala index dcd7215d4d..665289f121 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/countryregioncurrency/CountryregioncurrencyRow.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/countryregioncurrency/CountryregioncurrencyRow.scala @@ -14,6 +14,7 @@ import adventureworks.sales.currency.CurrencyId import doobie.enumerated.Nullability import doobie.postgres.Text import doobie.util.Read +import doobie.util.Write import io.circe.Decoder import io.circe.Encoder import java.sql.ResultSet @@ -61,4 +62,20 @@ object CountryregioncurrencyRow { sb.append(Text.DELIMETER) TypoLocalDateTime.text.unsafeEncode(row.modifieddate, sb) } + implicit lazy val write: Write[CountryregioncurrencyRow] = new Write[CountryregioncurrencyRow]( + puts = List((CountryregionId.put, Nullability.NoNulls), + (CurrencyId.put, Nullability.NoNulls), + (TypoLocalDateTime.put, Nullability.NoNulls)), + toList = x => List(x.countryregioncode, x.currencycode, x.modifieddate), + unsafeSet = (rs, i, a) => { + CountryregionId.put.unsafeSetNonNullable(rs, i + 0, a.countryregioncode) + CurrencyId.put.unsafeSetNonNullable(rs, i + 1, a.currencycode) + TypoLocalDateTime.put.unsafeSetNonNullable(rs, i + 2, a.modifieddate) + }, + unsafeUpdate = (ps, i, a) => { + CountryregionId.put.unsafeUpdateNonNullable(ps, i + 0, a.countryregioncode) + CurrencyId.put.unsafeUpdateNonNullable(ps, i + 1, a.currencycode) + TypoLocalDateTime.put.unsafeUpdateNonNullable(ps, i + 2, a.modifieddate) + } + ) } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/creditcard/CreditcardRepo.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/creditcard/CreditcardRepo.scala index a9f293d7af..0fb195f7ef 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/creditcard/CreditcardRepo.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/creditcard/CreditcardRepo.scala @@ -32,4 +32,7 @@ trait CreditcardRepo { def update: UpdateBuilder[CreditcardFields, CreditcardRow] def update(row: CreditcardRow): ConnectionIO[Boolean] def upsert(unsaved: CreditcardRow): ConnectionIO[CreditcardRow] + def upsertBatch(unsaved: List[CreditcardRow]): Stream[ConnectionIO, CreditcardRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Stream[ConnectionIO, CreditcardRow], batchSize: Int = 10000): ConnectionIO[Int] } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/creditcard/CreditcardRepoImpl.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/creditcard/CreditcardRepoImpl.scala index 1dc0c0d389..ce86cfb94b 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/creditcard/CreditcardRepoImpl.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/creditcard/CreditcardRepoImpl.scala @@ -11,6 +11,7 @@ import adventureworks.customtypes.Defaulted import adventureworks.customtypes.TypoLocalDateTime import adventureworks.customtypes.TypoShort import adventureworks.userdefined.CustomCreditcardId +import cats.instances.list.catsStdInstancesForList import doobie.free.connection.ConnectionIO import doobie.postgres.syntax.FragmentOps import doobie.syntax.SqlInterpolator.SingleFragment.fromWrite @@ -19,6 +20,7 @@ import doobie.util.Put import doobie.util.Write import doobie.util.fragment.Fragment import doobie.util.meta.Meta +import doobie.util.update.Update import fs2.Stream import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder @@ -132,4 +134,37 @@ class CreditcardRepoImpl extends CreditcardRepo { returning "creditcardid", "cardtype", "cardnumber", "expmonth", "expyear", "modifieddate"::text """.query(using CreditcardRow.read).unique } + override def upsertBatch(unsaved: List[CreditcardRow]): Stream[ConnectionIO, CreditcardRow] = { + Update[CreditcardRow]( + s"""insert into sales.creditcard("creditcardid", "cardtype", "cardnumber", "expmonth", "expyear", "modifieddate") + values (?::int4,?,?,?::int2,?::int2,?::timestamp) + on conflict ("creditcardid") + do update set + "cardtype" = EXCLUDED."cardtype", + "cardnumber" = EXCLUDED."cardnumber", + "expmonth" = EXCLUDED."expmonth", + "expyear" = EXCLUDED."expyear", + "modifieddate" = EXCLUDED."modifieddate" + returning "creditcardid", "cardtype", "cardnumber", "expmonth", "expyear", "modifieddate"::text""" + )(using CreditcardRow.write) + .updateManyWithGeneratedKeys[CreditcardRow]("creditcardid", "cardtype", "cardnumber", "expmonth", "expyear", "modifieddate")(unsaved)(using catsStdInstancesForList, CreditcardRow.read) + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, CreditcardRow], batchSize: Int = 10000): ConnectionIO[Int] = { + for { + _ <- sql"create temporary table creditcard_TEMP (like sales.creditcard) on commit drop".update.run + _ <- new FragmentOps(sql"""copy creditcard_TEMP("creditcardid", "cardtype", "cardnumber", "expmonth", "expyear", "modifieddate") from stdin""").copyIn(unsaved, batchSize)(using CreditcardRow.text) + res <- sql"""insert into sales.creditcard("creditcardid", "cardtype", "cardnumber", "expmonth", "expyear", "modifieddate") + select * from creditcard_TEMP + on conflict ("creditcardid") + do update set + "cardtype" = EXCLUDED."cardtype", + "cardnumber" = EXCLUDED."cardnumber", + "expmonth" = EXCLUDED."expmonth", + "expyear" = EXCLUDED."expyear", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table creditcard_TEMP;""".update.run + } yield res + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/creditcard/CreditcardRepoMock.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/creditcard/CreditcardRepoMock.scala index 64fab3b9ae..57b19c0acc 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/creditcard/CreditcardRepoMock.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/creditcard/CreditcardRepoMock.scala @@ -107,4 +107,23 @@ class CreditcardRepoMock(toRow: Function1[CreditcardRowUnsaved, CreditcardRow], unsaved } } + override def upsertBatch(unsaved: List[CreditcardRow]): Stream[ConnectionIO, CreditcardRow] = { + Stream.emits { + unsaved.map { row => + map += (row.creditcardid -> row) + row + } + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, CreditcardRow], batchSize: Int = 10000): ConnectionIO[Int] = { + unsaved.compile.toList.map { rows => + var num = 0 + rows.foreach { row => + map += (row.creditcardid -> row) + num += 1 + } + num + } + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/creditcard/CreditcardRow.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/creditcard/CreditcardRow.scala index 60edaa9406..fdd5c5943c 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/creditcard/CreditcardRow.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/creditcard/CreditcardRow.scala @@ -14,6 +14,7 @@ import adventureworks.userdefined.CustomCreditcardId import doobie.enumerated.Nullability import doobie.postgres.Text import doobie.util.Read +import doobie.util.Write import doobie.util.meta.Meta import io.circe.Decoder import io.circe.Encoder @@ -76,4 +77,29 @@ object CreditcardRow { sb.append(Text.DELIMETER) TypoLocalDateTime.text.unsafeEncode(row.modifieddate, sb) } + implicit lazy val write: Write[CreditcardRow] = new Write[CreditcardRow]( + puts = List((/* user-picked */ CustomCreditcardId.put, Nullability.NoNulls), + (Meta.StringMeta.put, Nullability.NoNulls), + (Meta.StringMeta.put, Nullability.NoNulls), + (TypoShort.put, Nullability.NoNulls), + (TypoShort.put, Nullability.NoNulls), + (TypoLocalDateTime.put, Nullability.NoNulls)), + toList = x => List(x.creditcardid, x.cardtype, x.cardnumber, x.expmonth, x.expyear, x.modifieddate), + unsafeSet = (rs, i, a) => { + /* user-picked */ CustomCreditcardId.put.unsafeSetNonNullable(rs, i + 0, a.creditcardid) + Meta.StringMeta.put.unsafeSetNonNullable(rs, i + 1, a.cardtype) + Meta.StringMeta.put.unsafeSetNonNullable(rs, i + 2, a.cardnumber) + TypoShort.put.unsafeSetNonNullable(rs, i + 3, a.expmonth) + TypoShort.put.unsafeSetNonNullable(rs, i + 4, a.expyear) + TypoLocalDateTime.put.unsafeSetNonNullable(rs, i + 5, a.modifieddate) + }, + unsafeUpdate = (ps, i, a) => { + /* user-picked */ CustomCreditcardId.put.unsafeUpdateNonNullable(ps, i + 0, a.creditcardid) + Meta.StringMeta.put.unsafeUpdateNonNullable(ps, i + 1, a.cardtype) + Meta.StringMeta.put.unsafeUpdateNonNullable(ps, i + 2, a.cardnumber) + TypoShort.put.unsafeUpdateNonNullable(ps, i + 3, a.expmonth) + TypoShort.put.unsafeUpdateNonNullable(ps, i + 4, a.expyear) + TypoLocalDateTime.put.unsafeUpdateNonNullable(ps, i + 5, a.modifieddate) + } + ) } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/currency/CurrencyRepo.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/currency/CurrencyRepo.scala index f9c279e711..c62d5c054e 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/currency/CurrencyRepo.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/currency/CurrencyRepo.scala @@ -30,4 +30,7 @@ trait CurrencyRepo { def update: UpdateBuilder[CurrencyFields, CurrencyRow] def update(row: CurrencyRow): ConnectionIO[Boolean] def upsert(unsaved: CurrencyRow): ConnectionIO[CurrencyRow] + def upsertBatch(unsaved: List[CurrencyRow]): Stream[ConnectionIO, CurrencyRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Stream[ConnectionIO, CurrencyRow], batchSize: Int = 10000): ConnectionIO[Int] } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/currency/CurrencyRepoImpl.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/currency/CurrencyRepoImpl.scala index 54c8e8d0c3..9f4ba2c3aa 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/currency/CurrencyRepoImpl.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/currency/CurrencyRepoImpl.scala @@ -10,12 +10,14 @@ package currency import adventureworks.customtypes.Defaulted import adventureworks.customtypes.TypoLocalDateTime import adventureworks.public.Name +import cats.instances.list.catsStdInstancesForList import doobie.free.connection.ConnectionIO import doobie.postgres.syntax.FragmentOps import doobie.syntax.SqlInterpolator.SingleFragment.fromWrite import doobie.syntax.string.toSqlInterpolator import doobie.util.Write import doobie.util.fragment.Fragment +import doobie.util.update.Update import fs2.Stream import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder @@ -114,4 +116,31 @@ class CurrencyRepoImpl extends CurrencyRepo { returning "currencycode", "name", "modifieddate"::text """.query(using CurrencyRow.read).unique } + override def upsertBatch(unsaved: List[CurrencyRow]): Stream[ConnectionIO, CurrencyRow] = { + Update[CurrencyRow]( + s"""insert into sales.currency("currencycode", "name", "modifieddate") + values (?::bpchar,?::varchar,?::timestamp) + on conflict ("currencycode") + do update set + "name" = EXCLUDED."name", + "modifieddate" = EXCLUDED."modifieddate" + returning "currencycode", "name", "modifieddate"::text""" + )(using CurrencyRow.write) + .updateManyWithGeneratedKeys[CurrencyRow]("currencycode", "name", "modifieddate")(unsaved)(using catsStdInstancesForList, CurrencyRow.read) + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, CurrencyRow], batchSize: Int = 10000): ConnectionIO[Int] = { + for { + _ <- sql"create temporary table currency_TEMP (like sales.currency) on commit drop".update.run + _ <- new FragmentOps(sql"""copy currency_TEMP("currencycode", "name", "modifieddate") from stdin""").copyIn(unsaved, batchSize)(using CurrencyRow.text) + res <- sql"""insert into sales.currency("currencycode", "name", "modifieddate") + select * from currency_TEMP + on conflict ("currencycode") + do update set + "name" = EXCLUDED."name", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table currency_TEMP;""".update.run + } yield res + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/currency/CurrencyRepoMock.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/currency/CurrencyRepoMock.scala index eb3d43fa12..c125ff7978 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/currency/CurrencyRepoMock.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/currency/CurrencyRepoMock.scala @@ -105,4 +105,23 @@ class CurrencyRepoMock(toRow: Function1[CurrencyRowUnsaved, CurrencyRow], unsaved } } + override def upsertBatch(unsaved: List[CurrencyRow]): Stream[ConnectionIO, CurrencyRow] = { + Stream.emits { + unsaved.map { row => + map += (row.currencycode -> row) + row + } + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, CurrencyRow], batchSize: Int = 10000): ConnectionIO[Int] = { + unsaved.compile.toList.map { rows => + var num = 0 + rows.foreach { row => + map += (row.currencycode -> row) + num += 1 + } + num + } + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/currency/CurrencyRow.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/currency/CurrencyRow.scala index 286b6a7692..7a90d8eae5 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/currency/CurrencyRow.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/currency/CurrencyRow.scala @@ -13,6 +13,7 @@ import adventureworks.public.Name import doobie.enumerated.Nullability import doobie.postgres.Text import doobie.util.Read +import doobie.util.Write import io.circe.Decoder import io.circe.Encoder import java.sql.ResultSet @@ -55,4 +56,20 @@ object CurrencyRow { sb.append(Text.DELIMETER) TypoLocalDateTime.text.unsafeEncode(row.modifieddate, sb) } + implicit lazy val write: Write[CurrencyRow] = new Write[CurrencyRow]( + puts = List((CurrencyId.put, Nullability.NoNulls), + (Name.put, Nullability.NoNulls), + (TypoLocalDateTime.put, Nullability.NoNulls)), + toList = x => List(x.currencycode, x.name, x.modifieddate), + unsafeSet = (rs, i, a) => { + CurrencyId.put.unsafeSetNonNullable(rs, i + 0, a.currencycode) + Name.put.unsafeSetNonNullable(rs, i + 1, a.name) + TypoLocalDateTime.put.unsafeSetNonNullable(rs, i + 2, a.modifieddate) + }, + unsafeUpdate = (ps, i, a) => { + CurrencyId.put.unsafeUpdateNonNullable(ps, i + 0, a.currencycode) + Name.put.unsafeUpdateNonNullable(ps, i + 1, a.name) + TypoLocalDateTime.put.unsafeUpdateNonNullable(ps, i + 2, a.modifieddate) + } + ) } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/currencyrate/CurrencyrateRepo.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/currencyrate/CurrencyrateRepo.scala index 22153e1ecb..3105dbd322 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/currencyrate/CurrencyrateRepo.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/currencyrate/CurrencyrateRepo.scala @@ -30,4 +30,7 @@ trait CurrencyrateRepo { def update: UpdateBuilder[CurrencyrateFields, CurrencyrateRow] def update(row: CurrencyrateRow): ConnectionIO[Boolean] def upsert(unsaved: CurrencyrateRow): ConnectionIO[CurrencyrateRow] + def upsertBatch(unsaved: List[CurrencyrateRow]): Stream[ConnectionIO, CurrencyrateRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Stream[ConnectionIO, CurrencyrateRow], batchSize: Int = 10000): ConnectionIO[Int] } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/currencyrate/CurrencyrateRepoImpl.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/currencyrate/CurrencyrateRepoImpl.scala index 1e30d60652..013d44dba3 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/currencyrate/CurrencyrateRepoImpl.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/currencyrate/CurrencyrateRepoImpl.scala @@ -10,6 +10,7 @@ package currencyrate import adventureworks.customtypes.Defaulted import adventureworks.customtypes.TypoLocalDateTime import adventureworks.sales.currency.CurrencyId +import cats.instances.list.catsStdInstancesForList import doobie.free.connection.ConnectionIO import doobie.postgres.syntax.FragmentOps import doobie.syntax.SqlInterpolator.SingleFragment.fromWrite @@ -17,6 +18,7 @@ import doobie.syntax.string.toSqlInterpolator import doobie.util.Write import doobie.util.fragment.Fragment import doobie.util.meta.Meta +import doobie.util.update.Update import fs2.Stream import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder @@ -134,4 +136,39 @@ class CurrencyrateRepoImpl extends CurrencyrateRepo { returning "currencyrateid", "currencyratedate"::text, "fromcurrencycode", "tocurrencycode", "averagerate", "endofdayrate", "modifieddate"::text """.query(using CurrencyrateRow.read).unique } + override def upsertBatch(unsaved: List[CurrencyrateRow]): Stream[ConnectionIO, CurrencyrateRow] = { + Update[CurrencyrateRow]( + s"""insert into sales.currencyrate("currencyrateid", "currencyratedate", "fromcurrencycode", "tocurrencycode", "averagerate", "endofdayrate", "modifieddate") + values (?::int4,?::timestamp,?::bpchar,?::bpchar,?::numeric,?::numeric,?::timestamp) + on conflict ("currencyrateid") + do update set + "currencyratedate" = EXCLUDED."currencyratedate", + "fromcurrencycode" = EXCLUDED."fromcurrencycode", + "tocurrencycode" = EXCLUDED."tocurrencycode", + "averagerate" = EXCLUDED."averagerate", + "endofdayrate" = EXCLUDED."endofdayrate", + "modifieddate" = EXCLUDED."modifieddate" + returning "currencyrateid", "currencyratedate"::text, "fromcurrencycode", "tocurrencycode", "averagerate", "endofdayrate", "modifieddate"::text""" + )(using CurrencyrateRow.write) + .updateManyWithGeneratedKeys[CurrencyrateRow]("currencyrateid", "currencyratedate", "fromcurrencycode", "tocurrencycode", "averagerate", "endofdayrate", "modifieddate")(unsaved)(using catsStdInstancesForList, CurrencyrateRow.read) + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, CurrencyrateRow], batchSize: Int = 10000): ConnectionIO[Int] = { + for { + _ <- sql"create temporary table currencyrate_TEMP (like sales.currencyrate) on commit drop".update.run + _ <- new FragmentOps(sql"""copy currencyrate_TEMP("currencyrateid", "currencyratedate", "fromcurrencycode", "tocurrencycode", "averagerate", "endofdayrate", "modifieddate") from stdin""").copyIn(unsaved, batchSize)(using CurrencyrateRow.text) + res <- sql"""insert into sales.currencyrate("currencyrateid", "currencyratedate", "fromcurrencycode", "tocurrencycode", "averagerate", "endofdayrate", "modifieddate") + select * from currencyrate_TEMP + on conflict ("currencyrateid") + do update set + "currencyratedate" = EXCLUDED."currencyratedate", + "fromcurrencycode" = EXCLUDED."fromcurrencycode", + "tocurrencycode" = EXCLUDED."tocurrencycode", + "averagerate" = EXCLUDED."averagerate", + "endofdayrate" = EXCLUDED."endofdayrate", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table currencyrate_TEMP;""".update.run + } yield res + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/currencyrate/CurrencyrateRepoMock.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/currencyrate/CurrencyrateRepoMock.scala index b40d4aede6..628090109b 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/currencyrate/CurrencyrateRepoMock.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/currencyrate/CurrencyrateRepoMock.scala @@ -105,4 +105,23 @@ class CurrencyrateRepoMock(toRow: Function1[CurrencyrateRowUnsaved, Currencyrate unsaved } } + override def upsertBatch(unsaved: List[CurrencyrateRow]): Stream[ConnectionIO, CurrencyrateRow] = { + Stream.emits { + unsaved.map { row => + map += (row.currencyrateid -> row) + row + } + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, CurrencyrateRow], batchSize: Int = 10000): ConnectionIO[Int] = { + unsaved.compile.toList.map { rows => + var num = 0 + rows.foreach { row => + map += (row.currencyrateid -> row) + num += 1 + } + num + } + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/currencyrate/CurrencyrateRow.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/currencyrate/CurrencyrateRow.scala index af99a66306..d3534f002e 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/currencyrate/CurrencyrateRow.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/currencyrate/CurrencyrateRow.scala @@ -13,6 +13,7 @@ import adventureworks.sales.currency.CurrencyId import doobie.enumerated.Nullability import doobie.postgres.Text import doobie.util.Read +import doobie.util.Write import doobie.util.meta.Meta import io.circe.Decoder import io.circe.Encoder @@ -83,4 +84,32 @@ object CurrencyrateRow { sb.append(Text.DELIMETER) TypoLocalDateTime.text.unsafeEncode(row.modifieddate, sb) } + implicit lazy val write: Write[CurrencyrateRow] = new Write[CurrencyrateRow]( + puts = List((CurrencyrateId.put, Nullability.NoNulls), + (TypoLocalDateTime.put, Nullability.NoNulls), + (CurrencyId.put, Nullability.NoNulls), + (CurrencyId.put, Nullability.NoNulls), + (Meta.ScalaBigDecimalMeta.put, Nullability.NoNulls), + (Meta.ScalaBigDecimalMeta.put, Nullability.NoNulls), + (TypoLocalDateTime.put, Nullability.NoNulls)), + toList = x => List(x.currencyrateid, x.currencyratedate, x.fromcurrencycode, x.tocurrencycode, x.averagerate, x.endofdayrate, x.modifieddate), + unsafeSet = (rs, i, a) => { + CurrencyrateId.put.unsafeSetNonNullable(rs, i + 0, a.currencyrateid) + TypoLocalDateTime.put.unsafeSetNonNullable(rs, i + 1, a.currencyratedate) + CurrencyId.put.unsafeSetNonNullable(rs, i + 2, a.fromcurrencycode) + CurrencyId.put.unsafeSetNonNullable(rs, i + 3, a.tocurrencycode) + Meta.ScalaBigDecimalMeta.put.unsafeSetNonNullable(rs, i + 4, a.averagerate) + Meta.ScalaBigDecimalMeta.put.unsafeSetNonNullable(rs, i + 5, a.endofdayrate) + TypoLocalDateTime.put.unsafeSetNonNullable(rs, i + 6, a.modifieddate) + }, + unsafeUpdate = (ps, i, a) => { + CurrencyrateId.put.unsafeUpdateNonNullable(ps, i + 0, a.currencyrateid) + TypoLocalDateTime.put.unsafeUpdateNonNullable(ps, i + 1, a.currencyratedate) + CurrencyId.put.unsafeUpdateNonNullable(ps, i + 2, a.fromcurrencycode) + CurrencyId.put.unsafeUpdateNonNullable(ps, i + 3, a.tocurrencycode) + Meta.ScalaBigDecimalMeta.put.unsafeUpdateNonNullable(ps, i + 4, a.averagerate) + Meta.ScalaBigDecimalMeta.put.unsafeUpdateNonNullable(ps, i + 5, a.endofdayrate) + TypoLocalDateTime.put.unsafeUpdateNonNullable(ps, i + 6, a.modifieddate) + } + ) } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/customer/CustomerRepo.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/customer/CustomerRepo.scala index dac21df469..77c17afeca 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/customer/CustomerRepo.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/customer/CustomerRepo.scala @@ -30,4 +30,7 @@ trait CustomerRepo { def update: UpdateBuilder[CustomerFields, CustomerRow] def update(row: CustomerRow): ConnectionIO[Boolean] def upsert(unsaved: CustomerRow): ConnectionIO[CustomerRow] + def upsertBatch(unsaved: List[CustomerRow]): Stream[ConnectionIO, CustomerRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Stream[ConnectionIO, CustomerRow], batchSize: Int = 10000): ConnectionIO[Int] } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/customer/CustomerRepoImpl.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/customer/CustomerRepoImpl.scala index 5c18b33cce..4d87d18347 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/customer/CustomerRepoImpl.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/customer/CustomerRepoImpl.scala @@ -12,12 +12,14 @@ import adventureworks.customtypes.TypoLocalDateTime import adventureworks.customtypes.TypoUUID import adventureworks.person.businessentity.BusinessentityId import adventureworks.sales.salesterritory.SalesterritoryId +import cats.instances.list.catsStdInstancesForList import doobie.free.connection.ConnectionIO import doobie.postgres.syntax.FragmentOps import doobie.syntax.SqlInterpolator.SingleFragment.fromWrite import doobie.syntax.string.toSqlInterpolator import doobie.util.Write import doobie.util.fragment.Fragment +import doobie.util.update.Update import fs2.Stream import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder @@ -134,4 +136,37 @@ class CustomerRepoImpl extends CustomerRepo { returning "customerid", "personid", "storeid", "territoryid", "rowguid", "modifieddate"::text """.query(using CustomerRow.read).unique } + override def upsertBatch(unsaved: List[CustomerRow]): Stream[ConnectionIO, CustomerRow] = { + Update[CustomerRow]( + s"""insert into sales.customer("customerid", "personid", "storeid", "territoryid", "rowguid", "modifieddate") + values (?::int4,?::int4,?::int4,?::int4,?::uuid,?::timestamp) + on conflict ("customerid") + do update set + "personid" = EXCLUDED."personid", + "storeid" = EXCLUDED."storeid", + "territoryid" = EXCLUDED."territoryid", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + returning "customerid", "personid", "storeid", "territoryid", "rowguid", "modifieddate"::text""" + )(using CustomerRow.write) + .updateManyWithGeneratedKeys[CustomerRow]("customerid", "personid", "storeid", "territoryid", "rowguid", "modifieddate")(unsaved)(using catsStdInstancesForList, CustomerRow.read) + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, CustomerRow], batchSize: Int = 10000): ConnectionIO[Int] = { + for { + _ <- sql"create temporary table customer_TEMP (like sales.customer) on commit drop".update.run + _ <- new FragmentOps(sql"""copy customer_TEMP("customerid", "personid", "storeid", "territoryid", "rowguid", "modifieddate") from stdin""").copyIn(unsaved, batchSize)(using CustomerRow.text) + res <- sql"""insert into sales.customer("customerid", "personid", "storeid", "territoryid", "rowguid", "modifieddate") + select * from customer_TEMP + on conflict ("customerid") + do update set + "personid" = EXCLUDED."personid", + "storeid" = EXCLUDED."storeid", + "territoryid" = EXCLUDED."territoryid", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table customer_TEMP;""".update.run + } yield res + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/customer/CustomerRepoMock.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/customer/CustomerRepoMock.scala index bd608e1289..663c846b19 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/customer/CustomerRepoMock.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/customer/CustomerRepoMock.scala @@ -105,4 +105,23 @@ class CustomerRepoMock(toRow: Function1[CustomerRowUnsaved, CustomerRow], unsaved } } + override def upsertBatch(unsaved: List[CustomerRow]): Stream[ConnectionIO, CustomerRow] = { + Stream.emits { + unsaved.map { row => + map += (row.customerid -> row) + row + } + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, CustomerRow], batchSize: Int = 10000): ConnectionIO[Int] = { + unsaved.compile.toList.map { rows => + var num = 0 + rows.foreach { row => + map += (row.customerid -> row) + num += 1 + } + num + } + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/customer/CustomerRow.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/customer/CustomerRow.scala index fa1714e210..4016a757dc 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/customer/CustomerRow.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/customer/CustomerRow.scala @@ -15,6 +15,7 @@ import adventureworks.sales.salesterritory.SalesterritoryId import doobie.enumerated.Nullability import doobie.postgres.Text import doobie.util.Read +import doobie.util.Write import io.circe.Decoder import io.circe.Encoder import java.sql.ResultSet @@ -79,4 +80,29 @@ object CustomerRow { sb.append(Text.DELIMETER) TypoLocalDateTime.text.unsafeEncode(row.modifieddate, sb) } + implicit lazy val write: Write[CustomerRow] = new Write[CustomerRow]( + puts = List((CustomerId.put, Nullability.NoNulls), + (BusinessentityId.put, Nullability.Nullable), + (BusinessentityId.put, Nullability.Nullable), + (SalesterritoryId.put, Nullability.Nullable), + (TypoUUID.put, Nullability.NoNulls), + (TypoLocalDateTime.put, Nullability.NoNulls)), + toList = x => List(x.customerid, x.personid, x.storeid, x.territoryid, x.rowguid, x.modifieddate), + unsafeSet = (rs, i, a) => { + CustomerId.put.unsafeSetNonNullable(rs, i + 0, a.customerid) + BusinessentityId.put.unsafeSetNullable(rs, i + 1, a.personid) + BusinessentityId.put.unsafeSetNullable(rs, i + 2, a.storeid) + SalesterritoryId.put.unsafeSetNullable(rs, i + 3, a.territoryid) + TypoUUID.put.unsafeSetNonNullable(rs, i + 4, a.rowguid) + TypoLocalDateTime.put.unsafeSetNonNullable(rs, i + 5, a.modifieddate) + }, + unsafeUpdate = (ps, i, a) => { + CustomerId.put.unsafeUpdateNonNullable(ps, i + 0, a.customerid) + BusinessentityId.put.unsafeUpdateNullable(ps, i + 1, a.personid) + BusinessentityId.put.unsafeUpdateNullable(ps, i + 2, a.storeid) + SalesterritoryId.put.unsafeUpdateNullable(ps, i + 3, a.territoryid) + TypoUUID.put.unsafeUpdateNonNullable(ps, i + 4, a.rowguid) + TypoLocalDateTime.put.unsafeUpdateNonNullable(ps, i + 5, a.modifieddate) + } + ) } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/personcreditcard/PersoncreditcardRepo.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/personcreditcard/PersoncreditcardRepo.scala index de08af799e..839ecd4cb0 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/personcreditcard/PersoncreditcardRepo.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/personcreditcard/PersoncreditcardRepo.scala @@ -32,4 +32,7 @@ trait PersoncreditcardRepo { def update: UpdateBuilder[PersoncreditcardFields, PersoncreditcardRow] def update(row: PersoncreditcardRow): ConnectionIO[Boolean] def upsert(unsaved: PersoncreditcardRow): ConnectionIO[PersoncreditcardRow] + def upsertBatch(unsaved: List[PersoncreditcardRow]): Stream[ConnectionIO, PersoncreditcardRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Stream[ConnectionIO, PersoncreditcardRow], batchSize: Int = 10000): ConnectionIO[Int] } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/personcreditcard/PersoncreditcardRepoImpl.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/personcreditcard/PersoncreditcardRepoImpl.scala index 4fe9f85a06..fb9dcd8390 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/personcreditcard/PersoncreditcardRepoImpl.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/personcreditcard/PersoncreditcardRepoImpl.scala @@ -11,6 +11,7 @@ import adventureworks.customtypes.Defaulted import adventureworks.customtypes.TypoLocalDateTime import adventureworks.person.businessentity.BusinessentityId import adventureworks.userdefined.CustomCreditcardId +import cats.instances.list.catsStdInstancesForList import doobie.free.connection.ConnectionIO import doobie.postgres.syntax.FragmentOps import doobie.syntax.SqlInterpolator.SingleFragment.fromWrite @@ -18,6 +19,7 @@ import doobie.syntax.string.toSqlInterpolator import doobie.util.Put import doobie.util.Write import doobie.util.fragment.Fragment +import doobie.util.update.Update import fs2.Stream import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder @@ -128,4 +130,29 @@ class PersoncreditcardRepoImpl extends PersoncreditcardRepo { returning "businessentityid", "creditcardid", "modifieddate"::text """.query(using PersoncreditcardRow.read).unique } + override def upsertBatch(unsaved: List[PersoncreditcardRow]): Stream[ConnectionIO, PersoncreditcardRow] = { + Update[PersoncreditcardRow]( + s"""insert into sales.personcreditcard("businessentityid", "creditcardid", "modifieddate") + values (?::int4,?::int4,?::timestamp) + on conflict ("businessentityid", "creditcardid") + do update set + "modifieddate" = EXCLUDED."modifieddate" + returning "businessentityid", "creditcardid", "modifieddate"::text""" + )(using PersoncreditcardRow.write) + .updateManyWithGeneratedKeys[PersoncreditcardRow]("businessentityid", "creditcardid", "modifieddate")(unsaved)(using catsStdInstancesForList, PersoncreditcardRow.read) + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, PersoncreditcardRow], batchSize: Int = 10000): ConnectionIO[Int] = { + for { + _ <- sql"create temporary table personcreditcard_TEMP (like sales.personcreditcard) on commit drop".update.run + _ <- new FragmentOps(sql"""copy personcreditcard_TEMP("businessentityid", "creditcardid", "modifieddate") from stdin""").copyIn(unsaved, batchSize)(using PersoncreditcardRow.text) + res <- sql"""insert into sales.personcreditcard("businessentityid", "creditcardid", "modifieddate") + select * from personcreditcard_TEMP + on conflict ("businessentityid", "creditcardid") + do update set + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table personcreditcard_TEMP;""".update.run + } yield res + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/personcreditcard/PersoncreditcardRepoMock.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/personcreditcard/PersoncreditcardRepoMock.scala index 6e43d0bccf..0a30ccdeac 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/personcreditcard/PersoncreditcardRepoMock.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/personcreditcard/PersoncreditcardRepoMock.scala @@ -107,4 +107,23 @@ class PersoncreditcardRepoMock(toRow: Function1[PersoncreditcardRowUnsaved, Pers unsaved } } + override def upsertBatch(unsaved: List[PersoncreditcardRow]): Stream[ConnectionIO, PersoncreditcardRow] = { + Stream.emits { + unsaved.map { row => + map += (row.compositeId -> row) + row + } + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, PersoncreditcardRow], batchSize: Int = 10000): ConnectionIO[Int] = { + unsaved.compile.toList.map { rows => + var num = 0 + rows.foreach { row => + map += (row.compositeId -> row) + num += 1 + } + num + } + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/personcreditcard/PersoncreditcardRow.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/personcreditcard/PersoncreditcardRow.scala index 21c36be43c..f8cda5a84c 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/personcreditcard/PersoncreditcardRow.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/personcreditcard/PersoncreditcardRow.scala @@ -14,6 +14,7 @@ import adventureworks.userdefined.CustomCreditcardId import doobie.enumerated.Nullability import doobie.postgres.Text import doobie.util.Read +import doobie.util.Write import io.circe.Decoder import io.circe.Encoder import java.sql.ResultSet @@ -61,4 +62,20 @@ object PersoncreditcardRow { sb.append(Text.DELIMETER) TypoLocalDateTime.text.unsafeEncode(row.modifieddate, sb) } + implicit lazy val write: Write[PersoncreditcardRow] = new Write[PersoncreditcardRow]( + puts = List((BusinessentityId.put, Nullability.NoNulls), + (/* user-picked */ CustomCreditcardId.put, Nullability.NoNulls), + (TypoLocalDateTime.put, Nullability.NoNulls)), + toList = x => List(x.businessentityid, x.creditcardid, x.modifieddate), + unsafeSet = (rs, i, a) => { + BusinessentityId.put.unsafeSetNonNullable(rs, i + 0, a.businessentityid) + /* user-picked */ CustomCreditcardId.put.unsafeSetNonNullable(rs, i + 1, a.creditcardid) + TypoLocalDateTime.put.unsafeSetNonNullable(rs, i + 2, a.modifieddate) + }, + unsafeUpdate = (ps, i, a) => { + BusinessentityId.put.unsafeUpdateNonNullable(ps, i + 0, a.businessentityid) + /* user-picked */ CustomCreditcardId.put.unsafeUpdateNonNullable(ps, i + 1, a.creditcardid) + TypoLocalDateTime.put.unsafeUpdateNonNullable(ps, i + 2, a.modifieddate) + } + ) } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salesorderdetail/SalesorderdetailRepo.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salesorderdetail/SalesorderdetailRepo.scala index 6487ee6453..5d21a35c22 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salesorderdetail/SalesorderdetailRepo.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salesorderdetail/SalesorderdetailRepo.scala @@ -30,4 +30,7 @@ trait SalesorderdetailRepo { def update: UpdateBuilder[SalesorderdetailFields, SalesorderdetailRow] def update(row: SalesorderdetailRow): ConnectionIO[Boolean] def upsert(unsaved: SalesorderdetailRow): ConnectionIO[SalesorderdetailRow] + def upsertBatch(unsaved: List[SalesorderdetailRow]): Stream[ConnectionIO, SalesorderdetailRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Stream[ConnectionIO, SalesorderdetailRow], batchSize: Int = 10000): ConnectionIO[Int] } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salesorderdetail/SalesorderdetailRepoImpl.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salesorderdetail/SalesorderdetailRepoImpl.scala index 6412a597c4..26a12afbed 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salesorderdetail/SalesorderdetailRepoImpl.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salesorderdetail/SalesorderdetailRepoImpl.scala @@ -14,6 +14,7 @@ import adventureworks.customtypes.TypoUUID import adventureworks.production.product.ProductId import adventureworks.sales.salesorderheader.SalesorderheaderId import adventureworks.sales.specialoffer.SpecialofferId +import cats.instances.list.catsStdInstancesForList import doobie.free.connection.ConnectionIO import doobie.postgres.syntax.FragmentOps import doobie.syntax.SqlInterpolator.SingleFragment.fromWrite @@ -21,6 +22,7 @@ import doobie.syntax.string.toSqlInterpolator import doobie.util.Write import doobie.util.fragment.Fragment import doobie.util.meta.Meta +import doobie.util.update.Update import fs2.Stream import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder @@ -168,4 +170,43 @@ class SalesorderdetailRepoImpl extends SalesorderdetailRepo { returning "salesorderid", "salesorderdetailid", "carriertrackingnumber", "orderqty", "productid", "specialofferid", "unitprice", "unitpricediscount", "rowguid", "modifieddate"::text """.query(using SalesorderdetailRow.read).unique } + override def upsertBatch(unsaved: List[SalesorderdetailRow]): Stream[ConnectionIO, SalesorderdetailRow] = { + Update[SalesorderdetailRow]( + s"""insert into sales.salesorderdetail("salesorderid", "salesorderdetailid", "carriertrackingnumber", "orderqty", "productid", "specialofferid", "unitprice", "unitpricediscount", "rowguid", "modifieddate") + values (?::int4,?::int4,?,?::int2,?::int4,?::int4,?::numeric,?::numeric,?::uuid,?::timestamp) + on conflict ("salesorderid", "salesorderdetailid") + do update set + "carriertrackingnumber" = EXCLUDED."carriertrackingnumber", + "orderqty" = EXCLUDED."orderqty", + "productid" = EXCLUDED."productid", + "specialofferid" = EXCLUDED."specialofferid", + "unitprice" = EXCLUDED."unitprice", + "unitpricediscount" = EXCLUDED."unitpricediscount", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + returning "salesorderid", "salesorderdetailid", "carriertrackingnumber", "orderqty", "productid", "specialofferid", "unitprice", "unitpricediscount", "rowguid", "modifieddate"::text""" + )(using SalesorderdetailRow.write) + .updateManyWithGeneratedKeys[SalesorderdetailRow]("salesorderid", "salesorderdetailid", "carriertrackingnumber", "orderqty", "productid", "specialofferid", "unitprice", "unitpricediscount", "rowguid", "modifieddate")(unsaved)(using catsStdInstancesForList, SalesorderdetailRow.read) + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, SalesorderdetailRow], batchSize: Int = 10000): ConnectionIO[Int] = { + for { + _ <- sql"create temporary table salesorderdetail_TEMP (like sales.salesorderdetail) on commit drop".update.run + _ <- new FragmentOps(sql"""copy salesorderdetail_TEMP("salesorderid", "salesorderdetailid", "carriertrackingnumber", "orderqty", "productid", "specialofferid", "unitprice", "unitpricediscount", "rowguid", "modifieddate") from stdin""").copyIn(unsaved, batchSize)(using SalesorderdetailRow.text) + res <- sql"""insert into sales.salesorderdetail("salesorderid", "salesorderdetailid", "carriertrackingnumber", "orderqty", "productid", "specialofferid", "unitprice", "unitpricediscount", "rowguid", "modifieddate") + select * from salesorderdetail_TEMP + on conflict ("salesorderid", "salesorderdetailid") + do update set + "carriertrackingnumber" = EXCLUDED."carriertrackingnumber", + "orderqty" = EXCLUDED."orderqty", + "productid" = EXCLUDED."productid", + "specialofferid" = EXCLUDED."specialofferid", + "unitprice" = EXCLUDED."unitprice", + "unitpricediscount" = EXCLUDED."unitpricediscount", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table salesorderdetail_TEMP;""".update.run + } yield res + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salesorderdetail/SalesorderdetailRepoMock.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salesorderdetail/SalesorderdetailRepoMock.scala index 54cae19efc..6317331f87 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salesorderdetail/SalesorderdetailRepoMock.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salesorderdetail/SalesorderdetailRepoMock.scala @@ -105,4 +105,23 @@ class SalesorderdetailRepoMock(toRow: Function1[SalesorderdetailRowUnsaved, Sale unsaved } } + override def upsertBatch(unsaved: List[SalesorderdetailRow]): Stream[ConnectionIO, SalesorderdetailRow] = { + Stream.emits { + unsaved.map { row => + map += (row.compositeId -> row) + row + } + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, SalesorderdetailRow], batchSize: Int = 10000): ConnectionIO[Int] = { + unsaved.compile.toList.map { rows => + var num = 0 + rows.foreach { row => + map += (row.compositeId -> row) + num += 1 + } + num + } + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salesorderdetail/SalesorderdetailRow.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salesorderdetail/SalesorderdetailRow.scala index 01b271263e..2133b39790 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salesorderdetail/SalesorderdetailRow.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salesorderdetail/SalesorderdetailRow.scala @@ -18,6 +18,7 @@ import adventureworks.sales.specialofferproduct.SpecialofferproductId import doobie.enumerated.Nullability import doobie.postgres.Text import doobie.util.Read +import doobie.util.Write import doobie.util.meta.Meta import io.circe.Decoder import io.circe.Encoder @@ -118,4 +119,41 @@ object SalesorderdetailRow { sb.append(Text.DELIMETER) TypoLocalDateTime.text.unsafeEncode(row.modifieddate, sb) } + implicit lazy val write: Write[SalesorderdetailRow] = new Write[SalesorderdetailRow]( + puts = List((SalesorderheaderId.put, Nullability.NoNulls), + (Meta.IntMeta.put, Nullability.NoNulls), + (Meta.StringMeta.put, Nullability.Nullable), + (TypoShort.put, Nullability.NoNulls), + (ProductId.put, Nullability.NoNulls), + (SpecialofferId.put, Nullability.NoNulls), + (Meta.ScalaBigDecimalMeta.put, Nullability.NoNulls), + (Meta.ScalaBigDecimalMeta.put, Nullability.NoNulls), + (TypoUUID.put, Nullability.NoNulls), + (TypoLocalDateTime.put, Nullability.NoNulls)), + toList = x => List(x.salesorderid, x.salesorderdetailid, x.carriertrackingnumber, x.orderqty, x.productid, x.specialofferid, x.unitprice, x.unitpricediscount, x.rowguid, x.modifieddate), + unsafeSet = (rs, i, a) => { + SalesorderheaderId.put.unsafeSetNonNullable(rs, i + 0, a.salesorderid) + Meta.IntMeta.put.unsafeSetNonNullable(rs, i + 1, a.salesorderdetailid) + Meta.StringMeta.put.unsafeSetNullable(rs, i + 2, a.carriertrackingnumber) + TypoShort.put.unsafeSetNonNullable(rs, i + 3, a.orderqty) + ProductId.put.unsafeSetNonNullable(rs, i + 4, a.productid) + SpecialofferId.put.unsafeSetNonNullable(rs, i + 5, a.specialofferid) + Meta.ScalaBigDecimalMeta.put.unsafeSetNonNullable(rs, i + 6, a.unitprice) + Meta.ScalaBigDecimalMeta.put.unsafeSetNonNullable(rs, i + 7, a.unitpricediscount) + TypoUUID.put.unsafeSetNonNullable(rs, i + 8, a.rowguid) + TypoLocalDateTime.put.unsafeSetNonNullable(rs, i + 9, a.modifieddate) + }, + unsafeUpdate = (ps, i, a) => { + SalesorderheaderId.put.unsafeUpdateNonNullable(ps, i + 0, a.salesorderid) + Meta.IntMeta.put.unsafeUpdateNonNullable(ps, i + 1, a.salesorderdetailid) + Meta.StringMeta.put.unsafeUpdateNullable(ps, i + 2, a.carriertrackingnumber) + TypoShort.put.unsafeUpdateNonNullable(ps, i + 3, a.orderqty) + ProductId.put.unsafeUpdateNonNullable(ps, i + 4, a.productid) + SpecialofferId.put.unsafeUpdateNonNullable(ps, i + 5, a.specialofferid) + Meta.ScalaBigDecimalMeta.put.unsafeUpdateNonNullable(ps, i + 6, a.unitprice) + Meta.ScalaBigDecimalMeta.put.unsafeUpdateNonNullable(ps, i + 7, a.unitpricediscount) + TypoUUID.put.unsafeUpdateNonNullable(ps, i + 8, a.rowguid) + TypoLocalDateTime.put.unsafeUpdateNonNullable(ps, i + 9, a.modifieddate) + } + ) } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salesorderheader/SalesorderheaderRepo.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salesorderheader/SalesorderheaderRepo.scala index 1b3c94bdca..bf8080cbfb 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salesorderheader/SalesorderheaderRepo.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salesorderheader/SalesorderheaderRepo.scala @@ -30,4 +30,7 @@ trait SalesorderheaderRepo { def update: UpdateBuilder[SalesorderheaderFields, SalesorderheaderRow] def update(row: SalesorderheaderRow): ConnectionIO[Boolean] def upsert(unsaved: SalesorderheaderRow): ConnectionIO[SalesorderheaderRow] + def upsertBatch(unsaved: List[SalesorderheaderRow]): Stream[ConnectionIO, SalesorderheaderRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Stream[ConnectionIO, SalesorderheaderRow], batchSize: Int = 10000): ConnectionIO[Int] } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salesorderheader/SalesorderheaderRepoImpl.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salesorderheader/SalesorderheaderRepoImpl.scala index 8849fb8e6d..9fcebe09bb 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salesorderheader/SalesorderheaderRepoImpl.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salesorderheader/SalesorderheaderRepoImpl.scala @@ -21,6 +21,7 @@ import adventureworks.sales.currencyrate.CurrencyrateId import adventureworks.sales.customer.CustomerId import adventureworks.sales.salesterritory.SalesterritoryId import adventureworks.userdefined.CustomCreditcardId +import cats.instances.list.catsStdInstancesForList import doobie.free.connection.ConnectionIO import doobie.postgres.syntax.FragmentOps import doobie.syntax.SqlInterpolator.SingleFragment.fromWrite @@ -28,6 +29,7 @@ import doobie.syntax.string.toSqlInterpolator import doobie.util.Write import doobie.util.fragment.Fragment import doobie.util.meta.Meta +import doobie.util.update.Update import fs2.Stream import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder @@ -241,4 +243,75 @@ class SalesorderheaderRepoImpl extends SalesorderheaderRepo { returning "salesorderid", "revisionnumber", "orderdate"::text, "duedate"::text, "shipdate"::text, "status", "onlineorderflag", "purchaseordernumber", "accountnumber", "customerid", "salespersonid", "territoryid", "billtoaddressid", "shiptoaddressid", "shipmethodid", "creditcardid", "creditcardapprovalcode", "currencyrateid", "subtotal", "taxamt", "freight", "totaldue", "comment", "rowguid", "modifieddate"::text """.query(using SalesorderheaderRow.read).unique } + override def upsertBatch(unsaved: List[SalesorderheaderRow]): Stream[ConnectionIO, SalesorderheaderRow] = { + Update[SalesorderheaderRow]( + s"""insert into sales.salesorderheader("salesorderid", "revisionnumber", "orderdate", "duedate", "shipdate", "status", "onlineorderflag", "purchaseordernumber", "accountnumber", "customerid", "salespersonid", "territoryid", "billtoaddressid", "shiptoaddressid", "shipmethodid", "creditcardid", "creditcardapprovalcode", "currencyrateid", "subtotal", "taxamt", "freight", "totaldue", "comment", "rowguid", "modifieddate") + values (?::int4,?::int2,?::timestamp,?::timestamp,?::timestamp,?::int2,?::bool,?::varchar,?::varchar,?::int4,?::int4,?::int4,?::int4,?::int4,?::int4,?::int4,?,?::int4,?::numeric,?::numeric,?::numeric,?::numeric,?,?::uuid,?::timestamp) + on conflict ("salesorderid") + do update set + "revisionnumber" = EXCLUDED."revisionnumber", + "orderdate" = EXCLUDED."orderdate", + "duedate" = EXCLUDED."duedate", + "shipdate" = EXCLUDED."shipdate", + "status" = EXCLUDED."status", + "onlineorderflag" = EXCLUDED."onlineorderflag", + "purchaseordernumber" = EXCLUDED."purchaseordernumber", + "accountnumber" = EXCLUDED."accountnumber", + "customerid" = EXCLUDED."customerid", + "salespersonid" = EXCLUDED."salespersonid", + "territoryid" = EXCLUDED."territoryid", + "billtoaddressid" = EXCLUDED."billtoaddressid", + "shiptoaddressid" = EXCLUDED."shiptoaddressid", + "shipmethodid" = EXCLUDED."shipmethodid", + "creditcardid" = EXCLUDED."creditcardid", + "creditcardapprovalcode" = EXCLUDED."creditcardapprovalcode", + "currencyrateid" = EXCLUDED."currencyrateid", + "subtotal" = EXCLUDED."subtotal", + "taxamt" = EXCLUDED."taxamt", + "freight" = EXCLUDED."freight", + "totaldue" = EXCLUDED."totaldue", + "comment" = EXCLUDED."comment", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + returning "salesorderid", "revisionnumber", "orderdate"::text, "duedate"::text, "shipdate"::text, "status", "onlineorderflag", "purchaseordernumber", "accountnumber", "customerid", "salespersonid", "territoryid", "billtoaddressid", "shiptoaddressid", "shipmethodid", "creditcardid", "creditcardapprovalcode", "currencyrateid", "subtotal", "taxamt", "freight", "totaldue", "comment", "rowguid", "modifieddate"::text""" + )(using SalesorderheaderRow.write) + .updateManyWithGeneratedKeys[SalesorderheaderRow]("salesorderid", "revisionnumber", "orderdate", "duedate", "shipdate", "status", "onlineorderflag", "purchaseordernumber", "accountnumber", "customerid", "salespersonid", "territoryid", "billtoaddressid", "shiptoaddressid", "shipmethodid", "creditcardid", "creditcardapprovalcode", "currencyrateid", "subtotal", "taxamt", "freight", "totaldue", "comment", "rowguid", "modifieddate")(unsaved)(using catsStdInstancesForList, SalesorderheaderRow.read) + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, SalesorderheaderRow], batchSize: Int = 10000): ConnectionIO[Int] = { + for { + _ <- sql"create temporary table salesorderheader_TEMP (like sales.salesorderheader) on commit drop".update.run + _ <- new FragmentOps(sql"""copy salesorderheader_TEMP("salesorderid", "revisionnumber", "orderdate", "duedate", "shipdate", "status", "onlineorderflag", "purchaseordernumber", "accountnumber", "customerid", "salespersonid", "territoryid", "billtoaddressid", "shiptoaddressid", "shipmethodid", "creditcardid", "creditcardapprovalcode", "currencyrateid", "subtotal", "taxamt", "freight", "totaldue", "comment", "rowguid", "modifieddate") from stdin""").copyIn(unsaved, batchSize)(using SalesorderheaderRow.text) + res <- sql"""insert into sales.salesorderheader("salesorderid", "revisionnumber", "orderdate", "duedate", "shipdate", "status", "onlineorderflag", "purchaseordernumber", "accountnumber", "customerid", "salespersonid", "territoryid", "billtoaddressid", "shiptoaddressid", "shipmethodid", "creditcardid", "creditcardapprovalcode", "currencyrateid", "subtotal", "taxamt", "freight", "totaldue", "comment", "rowguid", "modifieddate") + select * from salesorderheader_TEMP + on conflict ("salesorderid") + do update set + "revisionnumber" = EXCLUDED."revisionnumber", + "orderdate" = EXCLUDED."orderdate", + "duedate" = EXCLUDED."duedate", + "shipdate" = EXCLUDED."shipdate", + "status" = EXCLUDED."status", + "onlineorderflag" = EXCLUDED."onlineorderflag", + "purchaseordernumber" = EXCLUDED."purchaseordernumber", + "accountnumber" = EXCLUDED."accountnumber", + "customerid" = EXCLUDED."customerid", + "salespersonid" = EXCLUDED."salespersonid", + "territoryid" = EXCLUDED."territoryid", + "billtoaddressid" = EXCLUDED."billtoaddressid", + "shiptoaddressid" = EXCLUDED."shiptoaddressid", + "shipmethodid" = EXCLUDED."shipmethodid", + "creditcardid" = EXCLUDED."creditcardid", + "creditcardapprovalcode" = EXCLUDED."creditcardapprovalcode", + "currencyrateid" = EXCLUDED."currencyrateid", + "subtotal" = EXCLUDED."subtotal", + "taxamt" = EXCLUDED."taxamt", + "freight" = EXCLUDED."freight", + "totaldue" = EXCLUDED."totaldue", + "comment" = EXCLUDED."comment", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table salesorderheader_TEMP;""".update.run + } yield res + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salesorderheader/SalesorderheaderRepoMock.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salesorderheader/SalesorderheaderRepoMock.scala index 5fb9337641..86a1fdfbd2 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salesorderheader/SalesorderheaderRepoMock.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salesorderheader/SalesorderheaderRepoMock.scala @@ -105,4 +105,23 @@ class SalesorderheaderRepoMock(toRow: Function1[SalesorderheaderRowUnsaved, Sale unsaved } } + override def upsertBatch(unsaved: List[SalesorderheaderRow]): Stream[ConnectionIO, SalesorderheaderRow] = { + Stream.emits { + unsaved.map { row => + map += (row.salesorderid -> row) + row + } + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, SalesorderheaderRow], batchSize: Int = 10000): ConnectionIO[Int] = { + unsaved.compile.toList.map { rows => + var num = 0 + rows.foreach { row => + map += (row.salesorderid -> row) + num += 1 + } + num + } + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salesorderheader/SalesorderheaderRow.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salesorderheader/SalesorderheaderRow.scala index 4ff377276c..aa30a55455 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salesorderheader/SalesorderheaderRow.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salesorderheader/SalesorderheaderRow.scala @@ -24,6 +24,7 @@ import adventureworks.userdefined.CustomCreditcardId import doobie.enumerated.Nullability import doobie.postgres.Text import doobie.util.Read +import doobie.util.Write import doobie.util.meta.Meta import io.circe.Decoder import io.circe.DecodingFailure @@ -289,4 +290,86 @@ object SalesorderheaderRow { sb.append(Text.DELIMETER) TypoLocalDateTime.text.unsafeEncode(row.modifieddate, sb) } + implicit lazy val write: Write[SalesorderheaderRow] = new Write[SalesorderheaderRow]( + puts = List((SalesorderheaderId.put, Nullability.NoNulls), + (TypoShort.put, Nullability.NoNulls), + (TypoLocalDateTime.put, Nullability.NoNulls), + (TypoLocalDateTime.put, Nullability.NoNulls), + (TypoLocalDateTime.put, Nullability.Nullable), + (TypoShort.put, Nullability.NoNulls), + (Flag.put, Nullability.NoNulls), + (OrderNumber.put, Nullability.Nullable), + (AccountNumber.put, Nullability.Nullable), + (CustomerId.put, Nullability.NoNulls), + (BusinessentityId.put, Nullability.Nullable), + (SalesterritoryId.put, Nullability.Nullable), + (AddressId.put, Nullability.NoNulls), + (AddressId.put, Nullability.NoNulls), + (ShipmethodId.put, Nullability.NoNulls), + (/* user-picked */ CustomCreditcardId.put, Nullability.Nullable), + (Meta.StringMeta.put, Nullability.Nullable), + (CurrencyrateId.put, Nullability.Nullable), + (Meta.ScalaBigDecimalMeta.put, Nullability.NoNulls), + (Meta.ScalaBigDecimalMeta.put, Nullability.NoNulls), + (Meta.ScalaBigDecimalMeta.put, Nullability.NoNulls), + (Meta.ScalaBigDecimalMeta.put, Nullability.Nullable), + (Meta.StringMeta.put, Nullability.Nullable), + (TypoUUID.put, Nullability.NoNulls), + (TypoLocalDateTime.put, Nullability.NoNulls)), + toList = x => List(x.salesorderid, x.revisionnumber, x.orderdate, x.duedate, x.shipdate, x.status, x.onlineorderflag, x.purchaseordernumber, x.accountnumber, x.customerid, x.salespersonid, x.territoryid, x.billtoaddressid, x.shiptoaddressid, x.shipmethodid, x.creditcardid, x.creditcardapprovalcode, x.currencyrateid, x.subtotal, x.taxamt, x.freight, x.totaldue, x.comment, x.rowguid, x.modifieddate), + unsafeSet = (rs, i, a) => { + SalesorderheaderId.put.unsafeSetNonNullable(rs, i + 0, a.salesorderid) + TypoShort.put.unsafeSetNonNullable(rs, i + 1, a.revisionnumber) + TypoLocalDateTime.put.unsafeSetNonNullable(rs, i + 2, a.orderdate) + TypoLocalDateTime.put.unsafeSetNonNullable(rs, i + 3, a.duedate) + TypoLocalDateTime.put.unsafeSetNullable(rs, i + 4, a.shipdate) + TypoShort.put.unsafeSetNonNullable(rs, i + 5, a.status) + Flag.put.unsafeSetNonNullable(rs, i + 6, a.onlineorderflag) + OrderNumber.put.unsafeSetNullable(rs, i + 7, a.purchaseordernumber) + AccountNumber.put.unsafeSetNullable(rs, i + 8, a.accountnumber) + CustomerId.put.unsafeSetNonNullable(rs, i + 9, a.customerid) + BusinessentityId.put.unsafeSetNullable(rs, i + 10, a.salespersonid) + SalesterritoryId.put.unsafeSetNullable(rs, i + 11, a.territoryid) + AddressId.put.unsafeSetNonNullable(rs, i + 12, a.billtoaddressid) + AddressId.put.unsafeSetNonNullable(rs, i + 13, a.shiptoaddressid) + ShipmethodId.put.unsafeSetNonNullable(rs, i + 14, a.shipmethodid) + /* user-picked */ CustomCreditcardId.put.unsafeSetNullable(rs, i + 15, a.creditcardid) + Meta.StringMeta.put.unsafeSetNullable(rs, i + 16, a.creditcardapprovalcode) + CurrencyrateId.put.unsafeSetNullable(rs, i + 17, a.currencyrateid) + Meta.ScalaBigDecimalMeta.put.unsafeSetNonNullable(rs, i + 18, a.subtotal) + Meta.ScalaBigDecimalMeta.put.unsafeSetNonNullable(rs, i + 19, a.taxamt) + Meta.ScalaBigDecimalMeta.put.unsafeSetNonNullable(rs, i + 20, a.freight) + Meta.ScalaBigDecimalMeta.put.unsafeSetNullable(rs, i + 21, a.totaldue) + Meta.StringMeta.put.unsafeSetNullable(rs, i + 22, a.comment) + TypoUUID.put.unsafeSetNonNullable(rs, i + 23, a.rowguid) + TypoLocalDateTime.put.unsafeSetNonNullable(rs, i + 24, a.modifieddate) + }, + unsafeUpdate = (ps, i, a) => { + SalesorderheaderId.put.unsafeUpdateNonNullable(ps, i + 0, a.salesorderid) + TypoShort.put.unsafeUpdateNonNullable(ps, i + 1, a.revisionnumber) + TypoLocalDateTime.put.unsafeUpdateNonNullable(ps, i + 2, a.orderdate) + TypoLocalDateTime.put.unsafeUpdateNonNullable(ps, i + 3, a.duedate) + TypoLocalDateTime.put.unsafeUpdateNullable(ps, i + 4, a.shipdate) + TypoShort.put.unsafeUpdateNonNullable(ps, i + 5, a.status) + Flag.put.unsafeUpdateNonNullable(ps, i + 6, a.onlineorderflag) + OrderNumber.put.unsafeUpdateNullable(ps, i + 7, a.purchaseordernumber) + AccountNumber.put.unsafeUpdateNullable(ps, i + 8, a.accountnumber) + CustomerId.put.unsafeUpdateNonNullable(ps, i + 9, a.customerid) + BusinessentityId.put.unsafeUpdateNullable(ps, i + 10, a.salespersonid) + SalesterritoryId.put.unsafeUpdateNullable(ps, i + 11, a.territoryid) + AddressId.put.unsafeUpdateNonNullable(ps, i + 12, a.billtoaddressid) + AddressId.put.unsafeUpdateNonNullable(ps, i + 13, a.shiptoaddressid) + ShipmethodId.put.unsafeUpdateNonNullable(ps, i + 14, a.shipmethodid) + /* user-picked */ CustomCreditcardId.put.unsafeUpdateNullable(ps, i + 15, a.creditcardid) + Meta.StringMeta.put.unsafeUpdateNullable(ps, i + 16, a.creditcardapprovalcode) + CurrencyrateId.put.unsafeUpdateNullable(ps, i + 17, a.currencyrateid) + Meta.ScalaBigDecimalMeta.put.unsafeUpdateNonNullable(ps, i + 18, a.subtotal) + Meta.ScalaBigDecimalMeta.put.unsafeUpdateNonNullable(ps, i + 19, a.taxamt) + Meta.ScalaBigDecimalMeta.put.unsafeUpdateNonNullable(ps, i + 20, a.freight) + Meta.ScalaBigDecimalMeta.put.unsafeUpdateNullable(ps, i + 21, a.totaldue) + Meta.StringMeta.put.unsafeUpdateNullable(ps, i + 22, a.comment) + TypoUUID.put.unsafeUpdateNonNullable(ps, i + 23, a.rowguid) + TypoLocalDateTime.put.unsafeUpdateNonNullable(ps, i + 24, a.modifieddate) + } + ) } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salesorderheadersalesreason/SalesorderheadersalesreasonRepo.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salesorderheadersalesreason/SalesorderheadersalesreasonRepo.scala index 8864be3256..4a1f57e4cb 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salesorderheadersalesreason/SalesorderheadersalesreasonRepo.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salesorderheadersalesreason/SalesorderheadersalesreasonRepo.scala @@ -30,4 +30,7 @@ trait SalesorderheadersalesreasonRepo { def update: UpdateBuilder[SalesorderheadersalesreasonFields, SalesorderheadersalesreasonRow] def update(row: SalesorderheadersalesreasonRow): ConnectionIO[Boolean] def upsert(unsaved: SalesorderheadersalesreasonRow): ConnectionIO[SalesorderheadersalesreasonRow] + def upsertBatch(unsaved: List[SalesorderheadersalesreasonRow]): Stream[ConnectionIO, SalesorderheadersalesreasonRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Stream[ConnectionIO, SalesorderheadersalesreasonRow], batchSize: Int = 10000): ConnectionIO[Int] } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salesorderheadersalesreason/SalesorderheadersalesreasonRepoImpl.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salesorderheadersalesreason/SalesorderheadersalesreasonRepoImpl.scala index 52dcdb1049..4877c01ed0 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salesorderheadersalesreason/SalesorderheadersalesreasonRepoImpl.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salesorderheadersalesreason/SalesorderheadersalesreasonRepoImpl.scala @@ -11,12 +11,14 @@ import adventureworks.customtypes.Defaulted import adventureworks.customtypes.TypoLocalDateTime import adventureworks.sales.salesorderheader.SalesorderheaderId import adventureworks.sales.salesreason.SalesreasonId +import cats.instances.list.catsStdInstancesForList import doobie.free.connection.ConnectionIO import doobie.postgres.syntax.FragmentOps import doobie.syntax.SqlInterpolator.SingleFragment.fromWrite import doobie.syntax.string.toSqlInterpolator import doobie.util.Write import doobie.util.fragment.Fragment +import doobie.util.update.Update import fs2.Stream import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder @@ -127,4 +129,29 @@ class SalesorderheadersalesreasonRepoImpl extends SalesorderheadersalesreasonRep returning "salesorderid", "salesreasonid", "modifieddate"::text """.query(using SalesorderheadersalesreasonRow.read).unique } + override def upsertBatch(unsaved: List[SalesorderheadersalesreasonRow]): Stream[ConnectionIO, SalesorderheadersalesreasonRow] = { + Update[SalesorderheadersalesreasonRow]( + s"""insert into sales.salesorderheadersalesreason("salesorderid", "salesreasonid", "modifieddate") + values (?::int4,?::int4,?::timestamp) + on conflict ("salesorderid", "salesreasonid") + do update set + "modifieddate" = EXCLUDED."modifieddate" + returning "salesorderid", "salesreasonid", "modifieddate"::text""" + )(using SalesorderheadersalesreasonRow.write) + .updateManyWithGeneratedKeys[SalesorderheadersalesreasonRow]("salesorderid", "salesreasonid", "modifieddate")(unsaved)(using catsStdInstancesForList, SalesorderheadersalesreasonRow.read) + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, SalesorderheadersalesreasonRow], batchSize: Int = 10000): ConnectionIO[Int] = { + for { + _ <- sql"create temporary table salesorderheadersalesreason_TEMP (like sales.salesorderheadersalesreason) on commit drop".update.run + _ <- new FragmentOps(sql"""copy salesorderheadersalesreason_TEMP("salesorderid", "salesreasonid", "modifieddate") from stdin""").copyIn(unsaved, batchSize)(using SalesorderheadersalesreasonRow.text) + res <- sql"""insert into sales.salesorderheadersalesreason("salesorderid", "salesreasonid", "modifieddate") + select * from salesorderheadersalesreason_TEMP + on conflict ("salesorderid", "salesreasonid") + do update set + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table salesorderheadersalesreason_TEMP;""".update.run + } yield res + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salesorderheadersalesreason/SalesorderheadersalesreasonRepoMock.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salesorderheadersalesreason/SalesorderheadersalesreasonRepoMock.scala index ad6c685410..38aa0b63a7 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salesorderheadersalesreason/SalesorderheadersalesreasonRepoMock.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salesorderheadersalesreason/SalesorderheadersalesreasonRepoMock.scala @@ -105,4 +105,23 @@ class SalesorderheadersalesreasonRepoMock(toRow: Function1[Salesorderheadersales unsaved } } + override def upsertBatch(unsaved: List[SalesorderheadersalesreasonRow]): Stream[ConnectionIO, SalesorderheadersalesreasonRow] = { + Stream.emits { + unsaved.map { row => + map += (row.compositeId -> row) + row + } + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, SalesorderheadersalesreasonRow], batchSize: Int = 10000): ConnectionIO[Int] = { + unsaved.compile.toList.map { rows => + var num = 0 + rows.foreach { row => + map += (row.compositeId -> row) + num += 1 + } + num + } + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salesorderheadersalesreason/SalesorderheadersalesreasonRow.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salesorderheadersalesreason/SalesorderheadersalesreasonRow.scala index e146435aaf..5f57902b28 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salesorderheadersalesreason/SalesorderheadersalesreasonRow.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salesorderheadersalesreason/SalesorderheadersalesreasonRow.scala @@ -14,6 +14,7 @@ import adventureworks.sales.salesreason.SalesreasonId import doobie.enumerated.Nullability import doobie.postgres.Text import doobie.util.Read +import doobie.util.Write import io.circe.Decoder import io.circe.Encoder import java.sql.ResultSet @@ -61,4 +62,20 @@ object SalesorderheadersalesreasonRow { sb.append(Text.DELIMETER) TypoLocalDateTime.text.unsafeEncode(row.modifieddate, sb) } + implicit lazy val write: Write[SalesorderheadersalesreasonRow] = new Write[SalesorderheadersalesreasonRow]( + puts = List((SalesorderheaderId.put, Nullability.NoNulls), + (SalesreasonId.put, Nullability.NoNulls), + (TypoLocalDateTime.put, Nullability.NoNulls)), + toList = x => List(x.salesorderid, x.salesreasonid, x.modifieddate), + unsafeSet = (rs, i, a) => { + SalesorderheaderId.put.unsafeSetNonNullable(rs, i + 0, a.salesorderid) + SalesreasonId.put.unsafeSetNonNullable(rs, i + 1, a.salesreasonid) + TypoLocalDateTime.put.unsafeSetNonNullable(rs, i + 2, a.modifieddate) + }, + unsafeUpdate = (ps, i, a) => { + SalesorderheaderId.put.unsafeUpdateNonNullable(ps, i + 0, a.salesorderid) + SalesreasonId.put.unsafeUpdateNonNullable(ps, i + 1, a.salesreasonid) + TypoLocalDateTime.put.unsafeUpdateNonNullable(ps, i + 2, a.modifieddate) + } + ) } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salesperson/SalespersonRepo.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salesperson/SalespersonRepo.scala index 341a512333..235a11a7b8 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salesperson/SalespersonRepo.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salesperson/SalespersonRepo.scala @@ -31,4 +31,7 @@ trait SalespersonRepo { def update: UpdateBuilder[SalespersonFields, SalespersonRow] def update(row: SalespersonRow): ConnectionIO[Boolean] def upsert(unsaved: SalespersonRow): ConnectionIO[SalespersonRow] + def upsertBatch(unsaved: List[SalespersonRow]): Stream[ConnectionIO, SalespersonRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Stream[ConnectionIO, SalespersonRow], batchSize: Int = 10000): ConnectionIO[Int] } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salesperson/SalespersonRepoImpl.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salesperson/SalespersonRepoImpl.scala index 4f75c59481..0bc71f5ccf 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salesperson/SalespersonRepoImpl.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salesperson/SalespersonRepoImpl.scala @@ -12,6 +12,7 @@ import adventureworks.customtypes.TypoLocalDateTime import adventureworks.customtypes.TypoUUID import adventureworks.person.businessentity.BusinessentityId import adventureworks.sales.salesterritory.SalesterritoryId +import cats.instances.list.catsStdInstancesForList import doobie.free.connection.ConnectionIO import doobie.postgres.syntax.FragmentOps import doobie.syntax.SqlInterpolator.SingleFragment.fromWrite @@ -19,6 +20,7 @@ import doobie.syntax.string.toSqlInterpolator import doobie.util.Write import doobie.util.fragment.Fragment import doobie.util.meta.Meta +import doobie.util.update.Update import fs2.Stream import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder @@ -156,4 +158,43 @@ class SalespersonRepoImpl extends SalespersonRepo { returning "businessentityid", "territoryid", "salesquota", "bonus", "commissionpct", "salesytd", "saleslastyear", "rowguid", "modifieddate"::text """.query(using SalespersonRow.read).unique } + override def upsertBatch(unsaved: List[SalespersonRow]): Stream[ConnectionIO, SalespersonRow] = { + Update[SalespersonRow]( + s"""insert into sales.salesperson("businessentityid", "territoryid", "salesquota", "bonus", "commissionpct", "salesytd", "saleslastyear", "rowguid", "modifieddate") + values (?::int4,?::int4,?::numeric,?::numeric,?::numeric,?::numeric,?::numeric,?::uuid,?::timestamp) + on conflict ("businessentityid") + do update set + "territoryid" = EXCLUDED."territoryid", + "salesquota" = EXCLUDED."salesquota", + "bonus" = EXCLUDED."bonus", + "commissionpct" = EXCLUDED."commissionpct", + "salesytd" = EXCLUDED."salesytd", + "saleslastyear" = EXCLUDED."saleslastyear", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + returning "businessentityid", "territoryid", "salesquota", "bonus", "commissionpct", "salesytd", "saleslastyear", "rowguid", "modifieddate"::text""" + )(using SalespersonRow.write) + .updateManyWithGeneratedKeys[SalespersonRow]("businessentityid", "territoryid", "salesquota", "bonus", "commissionpct", "salesytd", "saleslastyear", "rowguid", "modifieddate")(unsaved)(using catsStdInstancesForList, SalespersonRow.read) + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, SalespersonRow], batchSize: Int = 10000): ConnectionIO[Int] = { + for { + _ <- sql"create temporary table salesperson_TEMP (like sales.salesperson) on commit drop".update.run + _ <- new FragmentOps(sql"""copy salesperson_TEMP("businessentityid", "territoryid", "salesquota", "bonus", "commissionpct", "salesytd", "saleslastyear", "rowguid", "modifieddate") from stdin""").copyIn(unsaved, batchSize)(using SalespersonRow.text) + res <- sql"""insert into sales.salesperson("businessentityid", "territoryid", "salesquota", "bonus", "commissionpct", "salesytd", "saleslastyear", "rowguid", "modifieddate") + select * from salesperson_TEMP + on conflict ("businessentityid") + do update set + "territoryid" = EXCLUDED."territoryid", + "salesquota" = EXCLUDED."salesquota", + "bonus" = EXCLUDED."bonus", + "commissionpct" = EXCLUDED."commissionpct", + "salesytd" = EXCLUDED."salesytd", + "saleslastyear" = EXCLUDED."saleslastyear", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table salesperson_TEMP;""".update.run + } yield res + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salesperson/SalespersonRepoMock.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salesperson/SalespersonRepoMock.scala index e75ed30d60..2277c0e3d2 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salesperson/SalespersonRepoMock.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salesperson/SalespersonRepoMock.scala @@ -106,4 +106,23 @@ class SalespersonRepoMock(toRow: Function1[SalespersonRowUnsaved, SalespersonRow unsaved } } + override def upsertBatch(unsaved: List[SalespersonRow]): Stream[ConnectionIO, SalespersonRow] = { + Stream.emits { + unsaved.map { row => + map += (row.businessentityid -> row) + row + } + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, SalespersonRow], batchSize: Int = 10000): ConnectionIO[Int] = { + unsaved.compile.toList.map { rows => + var num = 0 + rows.foreach { row => + map += (row.businessentityid -> row) + num += 1 + } + num + } + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salesperson/SalespersonRow.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salesperson/SalespersonRow.scala index fbbf538e5b..b1c375bf28 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salesperson/SalespersonRow.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salesperson/SalespersonRow.scala @@ -15,6 +15,7 @@ import adventureworks.sales.salesterritory.SalesterritoryId import doobie.enumerated.Nullability import doobie.postgres.Text import doobie.util.Read +import doobie.util.Write import doobie.util.meta.Meta import io.circe.Decoder import io.circe.Encoder @@ -105,4 +106,38 @@ object SalespersonRow { sb.append(Text.DELIMETER) TypoLocalDateTime.text.unsafeEncode(row.modifieddate, sb) } + implicit lazy val write: Write[SalespersonRow] = new Write[SalespersonRow]( + puts = List((BusinessentityId.put, Nullability.NoNulls), + (SalesterritoryId.put, Nullability.Nullable), + (Meta.ScalaBigDecimalMeta.put, Nullability.Nullable), + (Meta.ScalaBigDecimalMeta.put, Nullability.NoNulls), + (Meta.ScalaBigDecimalMeta.put, Nullability.NoNulls), + (Meta.ScalaBigDecimalMeta.put, Nullability.NoNulls), + (Meta.ScalaBigDecimalMeta.put, Nullability.NoNulls), + (TypoUUID.put, Nullability.NoNulls), + (TypoLocalDateTime.put, Nullability.NoNulls)), + toList = x => List(x.businessentityid, x.territoryid, x.salesquota, x.bonus, x.commissionpct, x.salesytd, x.saleslastyear, x.rowguid, x.modifieddate), + unsafeSet = (rs, i, a) => { + BusinessentityId.put.unsafeSetNonNullable(rs, i + 0, a.businessentityid) + SalesterritoryId.put.unsafeSetNullable(rs, i + 1, a.territoryid) + Meta.ScalaBigDecimalMeta.put.unsafeSetNullable(rs, i + 2, a.salesquota) + Meta.ScalaBigDecimalMeta.put.unsafeSetNonNullable(rs, i + 3, a.bonus) + Meta.ScalaBigDecimalMeta.put.unsafeSetNonNullable(rs, i + 4, a.commissionpct) + Meta.ScalaBigDecimalMeta.put.unsafeSetNonNullable(rs, i + 5, a.salesytd) + Meta.ScalaBigDecimalMeta.put.unsafeSetNonNullable(rs, i + 6, a.saleslastyear) + TypoUUID.put.unsafeSetNonNullable(rs, i + 7, a.rowguid) + TypoLocalDateTime.put.unsafeSetNonNullable(rs, i + 8, a.modifieddate) + }, + unsafeUpdate = (ps, i, a) => { + BusinessentityId.put.unsafeUpdateNonNullable(ps, i + 0, a.businessentityid) + SalesterritoryId.put.unsafeUpdateNullable(ps, i + 1, a.territoryid) + Meta.ScalaBigDecimalMeta.put.unsafeUpdateNullable(ps, i + 2, a.salesquota) + Meta.ScalaBigDecimalMeta.put.unsafeUpdateNonNullable(ps, i + 3, a.bonus) + Meta.ScalaBigDecimalMeta.put.unsafeUpdateNonNullable(ps, i + 4, a.commissionpct) + Meta.ScalaBigDecimalMeta.put.unsafeUpdateNonNullable(ps, i + 5, a.salesytd) + Meta.ScalaBigDecimalMeta.put.unsafeUpdateNonNullable(ps, i + 6, a.saleslastyear) + TypoUUID.put.unsafeUpdateNonNullable(ps, i + 7, a.rowguid) + TypoLocalDateTime.put.unsafeUpdateNonNullable(ps, i + 8, a.modifieddate) + } + ) } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salespersonquotahistory/SalespersonquotahistoryRepo.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salespersonquotahistory/SalespersonquotahistoryRepo.scala index 79628947da..785bc64e16 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salespersonquotahistory/SalespersonquotahistoryRepo.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salespersonquotahistory/SalespersonquotahistoryRepo.scala @@ -30,4 +30,7 @@ trait SalespersonquotahistoryRepo { def update: UpdateBuilder[SalespersonquotahistoryFields, SalespersonquotahistoryRow] def update(row: SalespersonquotahistoryRow): ConnectionIO[Boolean] def upsert(unsaved: SalespersonquotahistoryRow): ConnectionIO[SalespersonquotahistoryRow] + def upsertBatch(unsaved: List[SalespersonquotahistoryRow]): Stream[ConnectionIO, SalespersonquotahistoryRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Stream[ConnectionIO, SalespersonquotahistoryRow], batchSize: Int = 10000): ConnectionIO[Int] } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salespersonquotahistory/SalespersonquotahistoryRepoImpl.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salespersonquotahistory/SalespersonquotahistoryRepoImpl.scala index a7d71b7910..c82662bbd3 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salespersonquotahistory/SalespersonquotahistoryRepoImpl.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salespersonquotahistory/SalespersonquotahistoryRepoImpl.scala @@ -11,6 +11,7 @@ import adventureworks.customtypes.Defaulted import adventureworks.customtypes.TypoLocalDateTime import adventureworks.customtypes.TypoUUID import adventureworks.person.businessentity.BusinessentityId +import cats.instances.list.catsStdInstancesForList import doobie.free.connection.ConnectionIO import doobie.postgres.syntax.FragmentOps import doobie.syntax.SqlInterpolator.SingleFragment.fromWrite @@ -18,6 +19,7 @@ import doobie.syntax.string.toSqlInterpolator import doobie.util.Write import doobie.util.fragment.Fragment import doobie.util.meta.Meta +import doobie.util.update.Update import fs2.Stream import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder @@ -139,4 +141,33 @@ class SalespersonquotahistoryRepoImpl extends SalespersonquotahistoryRepo { returning "businessentityid", "quotadate"::text, "salesquota", "rowguid", "modifieddate"::text """.query(using SalespersonquotahistoryRow.read).unique } + override def upsertBatch(unsaved: List[SalespersonquotahistoryRow]): Stream[ConnectionIO, SalespersonquotahistoryRow] = { + Update[SalespersonquotahistoryRow]( + s"""insert into sales.salespersonquotahistory("businessentityid", "quotadate", "salesquota", "rowguid", "modifieddate") + values (?::int4,?::timestamp,?::numeric,?::uuid,?::timestamp) + on conflict ("businessentityid", "quotadate") + do update set + "salesquota" = EXCLUDED."salesquota", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + returning "businessentityid", "quotadate"::text, "salesquota", "rowguid", "modifieddate"::text""" + )(using SalespersonquotahistoryRow.write) + .updateManyWithGeneratedKeys[SalespersonquotahistoryRow]("businessentityid", "quotadate", "salesquota", "rowguid", "modifieddate")(unsaved)(using catsStdInstancesForList, SalespersonquotahistoryRow.read) + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, SalespersonquotahistoryRow], batchSize: Int = 10000): ConnectionIO[Int] = { + for { + _ <- sql"create temporary table salespersonquotahistory_TEMP (like sales.salespersonquotahistory) on commit drop".update.run + _ <- new FragmentOps(sql"""copy salespersonquotahistory_TEMP("businessentityid", "quotadate", "salesquota", "rowguid", "modifieddate") from stdin""").copyIn(unsaved, batchSize)(using SalespersonquotahistoryRow.text) + res <- sql"""insert into sales.salespersonquotahistory("businessentityid", "quotadate", "salesquota", "rowguid", "modifieddate") + select * from salespersonquotahistory_TEMP + on conflict ("businessentityid", "quotadate") + do update set + "salesquota" = EXCLUDED."salesquota", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table salespersonquotahistory_TEMP;""".update.run + } yield res + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salespersonquotahistory/SalespersonquotahistoryRepoMock.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salespersonquotahistory/SalespersonquotahistoryRepoMock.scala index 98fe9ed526..90cdc494b9 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salespersonquotahistory/SalespersonquotahistoryRepoMock.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salespersonquotahistory/SalespersonquotahistoryRepoMock.scala @@ -105,4 +105,23 @@ class SalespersonquotahistoryRepoMock(toRow: Function1[SalespersonquotahistoryRo unsaved } } + override def upsertBatch(unsaved: List[SalespersonquotahistoryRow]): Stream[ConnectionIO, SalespersonquotahistoryRow] = { + Stream.emits { + unsaved.map { row => + map += (row.compositeId -> row) + row + } + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, SalespersonquotahistoryRow], batchSize: Int = 10000): ConnectionIO[Int] = { + unsaved.compile.toList.map { rows => + var num = 0 + rows.foreach { row => + map += (row.compositeId -> row) + num += 1 + } + num + } + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salespersonquotahistory/SalespersonquotahistoryRow.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salespersonquotahistory/SalespersonquotahistoryRow.scala index 77368f15e3..8b93767d3e 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salespersonquotahistory/SalespersonquotahistoryRow.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salespersonquotahistory/SalespersonquotahistoryRow.scala @@ -14,6 +14,7 @@ import adventureworks.person.businessentity.BusinessentityId import doobie.enumerated.Nullability import doobie.postgres.Text import doobie.util.Read +import doobie.util.Write import doobie.util.meta.Meta import io.circe.Decoder import io.circe.Encoder @@ -74,4 +75,26 @@ object SalespersonquotahistoryRow { sb.append(Text.DELIMETER) TypoLocalDateTime.text.unsafeEncode(row.modifieddate, sb) } + implicit lazy val write: Write[SalespersonquotahistoryRow] = new Write[SalespersonquotahistoryRow]( + puts = List((BusinessentityId.put, Nullability.NoNulls), + (TypoLocalDateTime.put, Nullability.NoNulls), + (Meta.ScalaBigDecimalMeta.put, Nullability.NoNulls), + (TypoUUID.put, Nullability.NoNulls), + (TypoLocalDateTime.put, Nullability.NoNulls)), + toList = x => List(x.businessentityid, x.quotadate, x.salesquota, x.rowguid, x.modifieddate), + unsafeSet = (rs, i, a) => { + BusinessentityId.put.unsafeSetNonNullable(rs, i + 0, a.businessentityid) + TypoLocalDateTime.put.unsafeSetNonNullable(rs, i + 1, a.quotadate) + Meta.ScalaBigDecimalMeta.put.unsafeSetNonNullable(rs, i + 2, a.salesquota) + TypoUUID.put.unsafeSetNonNullable(rs, i + 3, a.rowguid) + TypoLocalDateTime.put.unsafeSetNonNullable(rs, i + 4, a.modifieddate) + }, + unsafeUpdate = (ps, i, a) => { + BusinessentityId.put.unsafeUpdateNonNullable(ps, i + 0, a.businessentityid) + TypoLocalDateTime.put.unsafeUpdateNonNullable(ps, i + 1, a.quotadate) + Meta.ScalaBigDecimalMeta.put.unsafeUpdateNonNullable(ps, i + 2, a.salesquota) + TypoUUID.put.unsafeUpdateNonNullable(ps, i + 3, a.rowguid) + TypoLocalDateTime.put.unsafeUpdateNonNullable(ps, i + 4, a.modifieddate) + } + ) } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salesreason/SalesreasonRepo.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salesreason/SalesreasonRepo.scala index 8a2230c8ac..248f1a2aef 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salesreason/SalesreasonRepo.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salesreason/SalesreasonRepo.scala @@ -30,4 +30,7 @@ trait SalesreasonRepo { def update: UpdateBuilder[SalesreasonFields, SalesreasonRow] def update(row: SalesreasonRow): ConnectionIO[Boolean] def upsert(unsaved: SalesreasonRow): ConnectionIO[SalesreasonRow] + def upsertBatch(unsaved: List[SalesreasonRow]): Stream[ConnectionIO, SalesreasonRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Stream[ConnectionIO, SalesreasonRow], batchSize: Int = 10000): ConnectionIO[Int] } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salesreason/SalesreasonRepoImpl.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salesreason/SalesreasonRepoImpl.scala index 17475479b2..ef8ce9f086 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salesreason/SalesreasonRepoImpl.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salesreason/SalesreasonRepoImpl.scala @@ -10,12 +10,14 @@ package salesreason import adventureworks.customtypes.Defaulted import adventureworks.customtypes.TypoLocalDateTime import adventureworks.public.Name +import cats.instances.list.catsStdInstancesForList import doobie.free.connection.ConnectionIO import doobie.postgres.syntax.FragmentOps import doobie.syntax.SqlInterpolator.SingleFragment.fromWrite import doobie.syntax.string.toSqlInterpolator import doobie.util.Write import doobie.util.fragment.Fragment +import doobie.util.update.Update import fs2.Stream import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder @@ -121,4 +123,33 @@ class SalesreasonRepoImpl extends SalesreasonRepo { returning "salesreasonid", "name", "reasontype", "modifieddate"::text """.query(using SalesreasonRow.read).unique } + override def upsertBatch(unsaved: List[SalesreasonRow]): Stream[ConnectionIO, SalesreasonRow] = { + Update[SalesreasonRow]( + s"""insert into sales.salesreason("salesreasonid", "name", "reasontype", "modifieddate") + values (?::int4,?::varchar,?::varchar,?::timestamp) + on conflict ("salesreasonid") + do update set + "name" = EXCLUDED."name", + "reasontype" = EXCLUDED."reasontype", + "modifieddate" = EXCLUDED."modifieddate" + returning "salesreasonid", "name", "reasontype", "modifieddate"::text""" + )(using SalesreasonRow.write) + .updateManyWithGeneratedKeys[SalesreasonRow]("salesreasonid", "name", "reasontype", "modifieddate")(unsaved)(using catsStdInstancesForList, SalesreasonRow.read) + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, SalesreasonRow], batchSize: Int = 10000): ConnectionIO[Int] = { + for { + _ <- sql"create temporary table salesreason_TEMP (like sales.salesreason) on commit drop".update.run + _ <- new FragmentOps(sql"""copy salesreason_TEMP("salesreasonid", "name", "reasontype", "modifieddate") from stdin""").copyIn(unsaved, batchSize)(using SalesreasonRow.text) + res <- sql"""insert into sales.salesreason("salesreasonid", "name", "reasontype", "modifieddate") + select * from salesreason_TEMP + on conflict ("salesreasonid") + do update set + "name" = EXCLUDED."name", + "reasontype" = EXCLUDED."reasontype", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table salesreason_TEMP;""".update.run + } yield res + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salesreason/SalesreasonRepoMock.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salesreason/SalesreasonRepoMock.scala index a064554838..484040395b 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salesreason/SalesreasonRepoMock.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salesreason/SalesreasonRepoMock.scala @@ -105,4 +105,23 @@ class SalesreasonRepoMock(toRow: Function1[SalesreasonRowUnsaved, SalesreasonRow unsaved } } + override def upsertBatch(unsaved: List[SalesreasonRow]): Stream[ConnectionIO, SalesreasonRow] = { + Stream.emits { + unsaved.map { row => + map += (row.salesreasonid -> row) + row + } + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, SalesreasonRow], batchSize: Int = 10000): ConnectionIO[Int] = { + unsaved.compile.toList.map { rows => + var num = 0 + rows.foreach { row => + map += (row.salesreasonid -> row) + num += 1 + } + num + } + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salesreason/SalesreasonRow.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salesreason/SalesreasonRow.scala index 2dfbce4a65..17ffe51cfb 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salesreason/SalesreasonRow.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salesreason/SalesreasonRow.scala @@ -13,6 +13,7 @@ import adventureworks.public.Name import doobie.enumerated.Nullability import doobie.postgres.Text import doobie.util.Read +import doobie.util.Write import io.circe.Decoder import io.circe.Encoder import java.sql.ResultSet @@ -62,4 +63,23 @@ object SalesreasonRow { sb.append(Text.DELIMETER) TypoLocalDateTime.text.unsafeEncode(row.modifieddate, sb) } + implicit lazy val write: Write[SalesreasonRow] = new Write[SalesreasonRow]( + puts = List((SalesreasonId.put, Nullability.NoNulls), + (Name.put, Nullability.NoNulls), + (Name.put, Nullability.NoNulls), + (TypoLocalDateTime.put, Nullability.NoNulls)), + toList = x => List(x.salesreasonid, x.name, x.reasontype, x.modifieddate), + unsafeSet = (rs, i, a) => { + SalesreasonId.put.unsafeSetNonNullable(rs, i + 0, a.salesreasonid) + Name.put.unsafeSetNonNullable(rs, i + 1, a.name) + Name.put.unsafeSetNonNullable(rs, i + 2, a.reasontype) + TypoLocalDateTime.put.unsafeSetNonNullable(rs, i + 3, a.modifieddate) + }, + unsafeUpdate = (ps, i, a) => { + SalesreasonId.put.unsafeUpdateNonNullable(ps, i + 0, a.salesreasonid) + Name.put.unsafeUpdateNonNullable(ps, i + 1, a.name) + Name.put.unsafeUpdateNonNullable(ps, i + 2, a.reasontype) + TypoLocalDateTime.put.unsafeUpdateNonNullable(ps, i + 3, a.modifieddate) + } + ) } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salestaxrate/SalestaxrateRepo.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salestaxrate/SalestaxrateRepo.scala index 52e375556f..2c9b1795bf 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salestaxrate/SalestaxrateRepo.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salestaxrate/SalestaxrateRepo.scala @@ -30,4 +30,7 @@ trait SalestaxrateRepo { def update: UpdateBuilder[SalestaxrateFields, SalestaxrateRow] def update(row: SalestaxrateRow): ConnectionIO[Boolean] def upsert(unsaved: SalestaxrateRow): ConnectionIO[SalestaxrateRow] + def upsertBatch(unsaved: List[SalestaxrateRow]): Stream[ConnectionIO, SalestaxrateRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Stream[ConnectionIO, SalestaxrateRow], batchSize: Int = 10000): ConnectionIO[Int] } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salestaxrate/SalestaxrateRepoImpl.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salestaxrate/SalestaxrateRepoImpl.scala index cf414d28ac..740ed59d2d 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salestaxrate/SalestaxrateRepoImpl.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salestaxrate/SalestaxrateRepoImpl.scala @@ -13,6 +13,7 @@ import adventureworks.customtypes.TypoShort import adventureworks.customtypes.TypoUUID import adventureworks.person.stateprovince.StateprovinceId import adventureworks.public.Name +import cats.instances.list.catsStdInstancesForList import doobie.free.connection.ConnectionIO import doobie.postgres.syntax.FragmentOps import doobie.syntax.SqlInterpolator.SingleFragment.fromWrite @@ -20,6 +21,7 @@ import doobie.syntax.string.toSqlInterpolator import doobie.util.Write import doobie.util.fragment.Fragment import doobie.util.meta.Meta +import doobie.util.update.Update import fs2.Stream import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder @@ -143,4 +145,39 @@ class SalestaxrateRepoImpl extends SalestaxrateRepo { returning "salestaxrateid", "stateprovinceid", "taxtype", "taxrate", "name", "rowguid", "modifieddate"::text """.query(using SalestaxrateRow.read).unique } + override def upsertBatch(unsaved: List[SalestaxrateRow]): Stream[ConnectionIO, SalestaxrateRow] = { + Update[SalestaxrateRow]( + s"""insert into sales.salestaxrate("salestaxrateid", "stateprovinceid", "taxtype", "taxrate", "name", "rowguid", "modifieddate") + values (?::int4,?::int4,?::int2,?::numeric,?::varchar,?::uuid,?::timestamp) + on conflict ("salestaxrateid") + do update set + "stateprovinceid" = EXCLUDED."stateprovinceid", + "taxtype" = EXCLUDED."taxtype", + "taxrate" = EXCLUDED."taxrate", + "name" = EXCLUDED."name", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + returning "salestaxrateid", "stateprovinceid", "taxtype", "taxrate", "name", "rowguid", "modifieddate"::text""" + )(using SalestaxrateRow.write) + .updateManyWithGeneratedKeys[SalestaxrateRow]("salestaxrateid", "stateprovinceid", "taxtype", "taxrate", "name", "rowguid", "modifieddate")(unsaved)(using catsStdInstancesForList, SalestaxrateRow.read) + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, SalestaxrateRow], batchSize: Int = 10000): ConnectionIO[Int] = { + for { + _ <- sql"create temporary table salestaxrate_TEMP (like sales.salestaxrate) on commit drop".update.run + _ <- new FragmentOps(sql"""copy salestaxrate_TEMP("salestaxrateid", "stateprovinceid", "taxtype", "taxrate", "name", "rowguid", "modifieddate") from stdin""").copyIn(unsaved, batchSize)(using SalestaxrateRow.text) + res <- sql"""insert into sales.salestaxrate("salestaxrateid", "stateprovinceid", "taxtype", "taxrate", "name", "rowguid", "modifieddate") + select * from salestaxrate_TEMP + on conflict ("salestaxrateid") + do update set + "stateprovinceid" = EXCLUDED."stateprovinceid", + "taxtype" = EXCLUDED."taxtype", + "taxrate" = EXCLUDED."taxrate", + "name" = EXCLUDED."name", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table salestaxrate_TEMP;""".update.run + } yield res + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salestaxrate/SalestaxrateRepoMock.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salestaxrate/SalestaxrateRepoMock.scala index ab6ff6bfda..2afea1308d 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salestaxrate/SalestaxrateRepoMock.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salestaxrate/SalestaxrateRepoMock.scala @@ -105,4 +105,23 @@ class SalestaxrateRepoMock(toRow: Function1[SalestaxrateRowUnsaved, Salestaxrate unsaved } } + override def upsertBatch(unsaved: List[SalestaxrateRow]): Stream[ConnectionIO, SalestaxrateRow] = { + Stream.emits { + unsaved.map { row => + map += (row.salestaxrateid -> row) + row + } + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, SalestaxrateRow], batchSize: Int = 10000): ConnectionIO[Int] = { + unsaved.compile.toList.map { rows => + var num = 0 + rows.foreach { row => + map += (row.salestaxrateid -> row) + num += 1 + } + num + } + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salestaxrate/SalestaxrateRow.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salestaxrate/SalestaxrateRow.scala index 4a97ac9ace..d771751a4a 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salestaxrate/SalestaxrateRow.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salestaxrate/SalestaxrateRow.scala @@ -16,6 +16,7 @@ import adventureworks.public.Name import doobie.enumerated.Nullability import doobie.postgres.Text import doobie.util.Read +import doobie.util.Write import doobie.util.meta.Meta import io.circe.Decoder import io.circe.Encoder @@ -87,4 +88,32 @@ object SalestaxrateRow { sb.append(Text.DELIMETER) TypoLocalDateTime.text.unsafeEncode(row.modifieddate, sb) } + implicit lazy val write: Write[SalestaxrateRow] = new Write[SalestaxrateRow]( + puts = List((SalestaxrateId.put, Nullability.NoNulls), + (StateprovinceId.put, Nullability.NoNulls), + (TypoShort.put, Nullability.NoNulls), + (Meta.ScalaBigDecimalMeta.put, Nullability.NoNulls), + (Name.put, Nullability.NoNulls), + (TypoUUID.put, Nullability.NoNulls), + (TypoLocalDateTime.put, Nullability.NoNulls)), + toList = x => List(x.salestaxrateid, x.stateprovinceid, x.taxtype, x.taxrate, x.name, x.rowguid, x.modifieddate), + unsafeSet = (rs, i, a) => { + SalestaxrateId.put.unsafeSetNonNullable(rs, i + 0, a.salestaxrateid) + StateprovinceId.put.unsafeSetNonNullable(rs, i + 1, a.stateprovinceid) + TypoShort.put.unsafeSetNonNullable(rs, i + 2, a.taxtype) + Meta.ScalaBigDecimalMeta.put.unsafeSetNonNullable(rs, i + 3, a.taxrate) + Name.put.unsafeSetNonNullable(rs, i + 4, a.name) + TypoUUID.put.unsafeSetNonNullable(rs, i + 5, a.rowguid) + TypoLocalDateTime.put.unsafeSetNonNullable(rs, i + 6, a.modifieddate) + }, + unsafeUpdate = (ps, i, a) => { + SalestaxrateId.put.unsafeUpdateNonNullable(ps, i + 0, a.salestaxrateid) + StateprovinceId.put.unsafeUpdateNonNullable(ps, i + 1, a.stateprovinceid) + TypoShort.put.unsafeUpdateNonNullable(ps, i + 2, a.taxtype) + Meta.ScalaBigDecimalMeta.put.unsafeUpdateNonNullable(ps, i + 3, a.taxrate) + Name.put.unsafeUpdateNonNullable(ps, i + 4, a.name) + TypoUUID.put.unsafeUpdateNonNullable(ps, i + 5, a.rowguid) + TypoLocalDateTime.put.unsafeUpdateNonNullable(ps, i + 6, a.modifieddate) + } + ) } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salesterritory/SalesterritoryRepo.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salesterritory/SalesterritoryRepo.scala index 4d6976a24d..ca487e230e 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salesterritory/SalesterritoryRepo.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salesterritory/SalesterritoryRepo.scala @@ -30,4 +30,7 @@ trait SalesterritoryRepo { def update: UpdateBuilder[SalesterritoryFields, SalesterritoryRow] def update(row: SalesterritoryRow): ConnectionIO[Boolean] def upsert(unsaved: SalesterritoryRow): ConnectionIO[SalesterritoryRow] + def upsertBatch(unsaved: List[SalesterritoryRow]): Stream[ConnectionIO, SalesterritoryRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Stream[ConnectionIO, SalesterritoryRow], batchSize: Int = 10000): ConnectionIO[Int] } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salesterritory/SalesterritoryRepoImpl.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salesterritory/SalesterritoryRepoImpl.scala index ca9268c0e4..42dfdb6852 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salesterritory/SalesterritoryRepoImpl.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salesterritory/SalesterritoryRepoImpl.scala @@ -12,6 +12,7 @@ import adventureworks.customtypes.TypoLocalDateTime import adventureworks.customtypes.TypoUUID import adventureworks.person.countryregion.CountryregionId import adventureworks.public.Name +import cats.instances.list.catsStdInstancesForList import doobie.free.connection.ConnectionIO import doobie.postgres.syntax.FragmentOps import doobie.syntax.SqlInterpolator.SingleFragment.fromWrite @@ -19,6 +20,7 @@ import doobie.syntax.string.toSqlInterpolator import doobie.util.Write import doobie.util.fragment.Fragment import doobie.util.meta.Meta +import doobie.util.update.Update import fs2.Stream import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder @@ -163,4 +165,45 @@ class SalesterritoryRepoImpl extends SalesterritoryRepo { returning "territoryid", "name", "countryregioncode", "group", "salesytd", "saleslastyear", "costytd", "costlastyear", "rowguid", "modifieddate"::text """.query(using SalesterritoryRow.read).unique } + override def upsertBatch(unsaved: List[SalesterritoryRow]): Stream[ConnectionIO, SalesterritoryRow] = { + Update[SalesterritoryRow]( + s"""insert into sales.salesterritory("territoryid", "name", "countryregioncode", "group", "salesytd", "saleslastyear", "costytd", "costlastyear", "rowguid", "modifieddate") + values (?::int4,?::varchar,?,?,?::numeric,?::numeric,?::numeric,?::numeric,?::uuid,?::timestamp) + on conflict ("territoryid") + do update set + "name" = EXCLUDED."name", + "countryregioncode" = EXCLUDED."countryregioncode", + "group" = EXCLUDED."group", + "salesytd" = EXCLUDED."salesytd", + "saleslastyear" = EXCLUDED."saleslastyear", + "costytd" = EXCLUDED."costytd", + "costlastyear" = EXCLUDED."costlastyear", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + returning "territoryid", "name", "countryregioncode", "group", "salesytd", "saleslastyear", "costytd", "costlastyear", "rowguid", "modifieddate"::text""" + )(using SalesterritoryRow.write) + .updateManyWithGeneratedKeys[SalesterritoryRow]("territoryid", "name", "countryregioncode", "group", "salesytd", "saleslastyear", "costytd", "costlastyear", "rowguid", "modifieddate")(unsaved)(using catsStdInstancesForList, SalesterritoryRow.read) + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, SalesterritoryRow], batchSize: Int = 10000): ConnectionIO[Int] = { + for { + _ <- sql"create temporary table salesterritory_TEMP (like sales.salesterritory) on commit drop".update.run + _ <- new FragmentOps(sql"""copy salesterritory_TEMP("territoryid", "name", "countryregioncode", "group", "salesytd", "saleslastyear", "costytd", "costlastyear", "rowguid", "modifieddate") from stdin""").copyIn(unsaved, batchSize)(using SalesterritoryRow.text) + res <- sql"""insert into sales.salesterritory("territoryid", "name", "countryregioncode", "group", "salesytd", "saleslastyear", "costytd", "costlastyear", "rowguid", "modifieddate") + select * from salesterritory_TEMP + on conflict ("territoryid") + do update set + "name" = EXCLUDED."name", + "countryregioncode" = EXCLUDED."countryregioncode", + "group" = EXCLUDED."group", + "salesytd" = EXCLUDED."salesytd", + "saleslastyear" = EXCLUDED."saleslastyear", + "costytd" = EXCLUDED."costytd", + "costlastyear" = EXCLUDED."costlastyear", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table salesterritory_TEMP;""".update.run + } yield res + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salesterritory/SalesterritoryRepoMock.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salesterritory/SalesterritoryRepoMock.scala index f00de79f98..f595a0ea41 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salesterritory/SalesterritoryRepoMock.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salesterritory/SalesterritoryRepoMock.scala @@ -105,4 +105,23 @@ class SalesterritoryRepoMock(toRow: Function1[SalesterritoryRowUnsaved, Salester unsaved } } + override def upsertBatch(unsaved: List[SalesterritoryRow]): Stream[ConnectionIO, SalesterritoryRow] = { + Stream.emits { + unsaved.map { row => + map += (row.territoryid -> row) + row + } + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, SalesterritoryRow], batchSize: Int = 10000): ConnectionIO[Int] = { + unsaved.compile.toList.map { rows => + var num = 0 + rows.foreach { row => + map += (row.territoryid -> row) + num += 1 + } + num + } + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salesterritory/SalesterritoryRow.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salesterritory/SalesterritoryRow.scala index 773d8db4bb..7190b9672d 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salesterritory/SalesterritoryRow.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salesterritory/SalesterritoryRow.scala @@ -15,6 +15,7 @@ import adventureworks.public.Name import doobie.enumerated.Nullability import doobie.postgres.Text import doobie.util.Read +import doobie.util.Write import doobie.util.meta.Meta import io.circe.Decoder import io.circe.Encoder @@ -110,4 +111,41 @@ object SalesterritoryRow { sb.append(Text.DELIMETER) TypoLocalDateTime.text.unsafeEncode(row.modifieddate, sb) } + implicit lazy val write: Write[SalesterritoryRow] = new Write[SalesterritoryRow]( + puts = List((SalesterritoryId.put, Nullability.NoNulls), + (Name.put, Nullability.NoNulls), + (CountryregionId.put, Nullability.NoNulls), + (Meta.StringMeta.put, Nullability.NoNulls), + (Meta.ScalaBigDecimalMeta.put, Nullability.NoNulls), + (Meta.ScalaBigDecimalMeta.put, Nullability.NoNulls), + (Meta.ScalaBigDecimalMeta.put, Nullability.NoNulls), + (Meta.ScalaBigDecimalMeta.put, Nullability.NoNulls), + (TypoUUID.put, Nullability.NoNulls), + (TypoLocalDateTime.put, Nullability.NoNulls)), + toList = x => List(x.territoryid, x.name, x.countryregioncode, x.group, x.salesytd, x.saleslastyear, x.costytd, x.costlastyear, x.rowguid, x.modifieddate), + unsafeSet = (rs, i, a) => { + SalesterritoryId.put.unsafeSetNonNullable(rs, i + 0, a.territoryid) + Name.put.unsafeSetNonNullable(rs, i + 1, a.name) + CountryregionId.put.unsafeSetNonNullable(rs, i + 2, a.countryregioncode) + Meta.StringMeta.put.unsafeSetNonNullable(rs, i + 3, a.group) + Meta.ScalaBigDecimalMeta.put.unsafeSetNonNullable(rs, i + 4, a.salesytd) + Meta.ScalaBigDecimalMeta.put.unsafeSetNonNullable(rs, i + 5, a.saleslastyear) + Meta.ScalaBigDecimalMeta.put.unsafeSetNonNullable(rs, i + 6, a.costytd) + Meta.ScalaBigDecimalMeta.put.unsafeSetNonNullable(rs, i + 7, a.costlastyear) + TypoUUID.put.unsafeSetNonNullable(rs, i + 8, a.rowguid) + TypoLocalDateTime.put.unsafeSetNonNullable(rs, i + 9, a.modifieddate) + }, + unsafeUpdate = (ps, i, a) => { + SalesterritoryId.put.unsafeUpdateNonNullable(ps, i + 0, a.territoryid) + Name.put.unsafeUpdateNonNullable(ps, i + 1, a.name) + CountryregionId.put.unsafeUpdateNonNullable(ps, i + 2, a.countryregioncode) + Meta.StringMeta.put.unsafeUpdateNonNullable(ps, i + 3, a.group) + Meta.ScalaBigDecimalMeta.put.unsafeUpdateNonNullable(ps, i + 4, a.salesytd) + Meta.ScalaBigDecimalMeta.put.unsafeUpdateNonNullable(ps, i + 5, a.saleslastyear) + Meta.ScalaBigDecimalMeta.put.unsafeUpdateNonNullable(ps, i + 6, a.costytd) + Meta.ScalaBigDecimalMeta.put.unsafeUpdateNonNullable(ps, i + 7, a.costlastyear) + TypoUUID.put.unsafeUpdateNonNullable(ps, i + 8, a.rowguid) + TypoLocalDateTime.put.unsafeUpdateNonNullable(ps, i + 9, a.modifieddate) + } + ) } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salesterritoryhistory/SalesterritoryhistoryRepo.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salesterritoryhistory/SalesterritoryhistoryRepo.scala index 5d987ea579..cd3bc9f0a6 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salesterritoryhistory/SalesterritoryhistoryRepo.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salesterritoryhistory/SalesterritoryhistoryRepo.scala @@ -30,4 +30,7 @@ trait SalesterritoryhistoryRepo { def update: UpdateBuilder[SalesterritoryhistoryFields, SalesterritoryhistoryRow] def update(row: SalesterritoryhistoryRow): ConnectionIO[Boolean] def upsert(unsaved: SalesterritoryhistoryRow): ConnectionIO[SalesterritoryhistoryRow] + def upsertBatch(unsaved: List[SalesterritoryhistoryRow]): Stream[ConnectionIO, SalesterritoryhistoryRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Stream[ConnectionIO, SalesterritoryhistoryRow], batchSize: Int = 10000): ConnectionIO[Int] } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salesterritoryhistory/SalesterritoryhistoryRepoImpl.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salesterritoryhistory/SalesterritoryhistoryRepoImpl.scala index acfda57883..86656429cd 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salesterritoryhistory/SalesterritoryhistoryRepoImpl.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salesterritoryhistory/SalesterritoryhistoryRepoImpl.scala @@ -12,12 +12,14 @@ import adventureworks.customtypes.TypoLocalDateTime import adventureworks.customtypes.TypoUUID import adventureworks.person.businessentity.BusinessentityId import adventureworks.sales.salesterritory.SalesterritoryId +import cats.instances.list.catsStdInstancesForList import doobie.free.connection.ConnectionIO import doobie.postgres.syntax.FragmentOps import doobie.syntax.SqlInterpolator.SingleFragment.fromWrite import doobie.syntax.string.toSqlInterpolator import doobie.util.Write import doobie.util.fragment.Fragment +import doobie.util.update.Update import fs2.Stream import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder @@ -143,4 +145,33 @@ class SalesterritoryhistoryRepoImpl extends SalesterritoryhistoryRepo { returning "businessentityid", "territoryid", "startdate"::text, "enddate"::text, "rowguid", "modifieddate"::text """.query(using SalesterritoryhistoryRow.read).unique } + override def upsertBatch(unsaved: List[SalesterritoryhistoryRow]): Stream[ConnectionIO, SalesterritoryhistoryRow] = { + Update[SalesterritoryhistoryRow]( + s"""insert into sales.salesterritoryhistory("businessentityid", "territoryid", "startdate", "enddate", "rowguid", "modifieddate") + values (?::int4,?::int4,?::timestamp,?::timestamp,?::uuid,?::timestamp) + on conflict ("businessentityid", "startdate", "territoryid") + do update set + "enddate" = EXCLUDED."enddate", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + returning "businessentityid", "territoryid", "startdate"::text, "enddate"::text, "rowguid", "modifieddate"::text""" + )(using SalesterritoryhistoryRow.write) + .updateManyWithGeneratedKeys[SalesterritoryhistoryRow]("businessentityid", "territoryid", "startdate", "enddate", "rowguid", "modifieddate")(unsaved)(using catsStdInstancesForList, SalesterritoryhistoryRow.read) + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, SalesterritoryhistoryRow], batchSize: Int = 10000): ConnectionIO[Int] = { + for { + _ <- sql"create temporary table salesterritoryhistory_TEMP (like sales.salesterritoryhistory) on commit drop".update.run + _ <- new FragmentOps(sql"""copy salesterritoryhistory_TEMP("businessentityid", "territoryid", "startdate", "enddate", "rowguid", "modifieddate") from stdin""").copyIn(unsaved, batchSize)(using SalesterritoryhistoryRow.text) + res <- sql"""insert into sales.salesterritoryhistory("businessentityid", "territoryid", "startdate", "enddate", "rowguid", "modifieddate") + select * from salesterritoryhistory_TEMP + on conflict ("businessentityid", "startdate", "territoryid") + do update set + "enddate" = EXCLUDED."enddate", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table salesterritoryhistory_TEMP;""".update.run + } yield res + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salesterritoryhistory/SalesterritoryhistoryRepoMock.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salesterritoryhistory/SalesterritoryhistoryRepoMock.scala index 6d53b74a01..3ec28f0d84 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salesterritoryhistory/SalesterritoryhistoryRepoMock.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salesterritoryhistory/SalesterritoryhistoryRepoMock.scala @@ -105,4 +105,23 @@ class SalesterritoryhistoryRepoMock(toRow: Function1[SalesterritoryhistoryRowUns unsaved } } + override def upsertBatch(unsaved: List[SalesterritoryhistoryRow]): Stream[ConnectionIO, SalesterritoryhistoryRow] = { + Stream.emits { + unsaved.map { row => + map += (row.compositeId -> row) + row + } + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, SalesterritoryhistoryRow], batchSize: Int = 10000): ConnectionIO[Int] = { + unsaved.compile.toList.map { rows => + var num = 0 + rows.foreach { row => + map += (row.compositeId -> row) + num += 1 + } + num + } + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salesterritoryhistory/SalesterritoryhistoryRow.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salesterritoryhistory/SalesterritoryhistoryRow.scala index d6a7f7041c..8c55fcb971 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salesterritoryhistory/SalesterritoryhistoryRow.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/salesterritoryhistory/SalesterritoryhistoryRow.scala @@ -15,6 +15,7 @@ import adventureworks.sales.salesterritory.SalesterritoryId import doobie.enumerated.Nullability import doobie.postgres.Text import doobie.util.Read +import doobie.util.Write import io.circe.Decoder import io.circe.Encoder import java.sql.ResultSet @@ -82,4 +83,29 @@ object SalesterritoryhistoryRow { sb.append(Text.DELIMETER) TypoLocalDateTime.text.unsafeEncode(row.modifieddate, sb) } + implicit lazy val write: Write[SalesterritoryhistoryRow] = new Write[SalesterritoryhistoryRow]( + puts = List((BusinessentityId.put, Nullability.NoNulls), + (SalesterritoryId.put, Nullability.NoNulls), + (TypoLocalDateTime.put, Nullability.NoNulls), + (TypoLocalDateTime.put, Nullability.Nullable), + (TypoUUID.put, Nullability.NoNulls), + (TypoLocalDateTime.put, Nullability.NoNulls)), + toList = x => List(x.businessentityid, x.territoryid, x.startdate, x.enddate, x.rowguid, x.modifieddate), + unsafeSet = (rs, i, a) => { + BusinessentityId.put.unsafeSetNonNullable(rs, i + 0, a.businessentityid) + SalesterritoryId.put.unsafeSetNonNullable(rs, i + 1, a.territoryid) + TypoLocalDateTime.put.unsafeSetNonNullable(rs, i + 2, a.startdate) + TypoLocalDateTime.put.unsafeSetNullable(rs, i + 3, a.enddate) + TypoUUID.put.unsafeSetNonNullable(rs, i + 4, a.rowguid) + TypoLocalDateTime.put.unsafeSetNonNullable(rs, i + 5, a.modifieddate) + }, + unsafeUpdate = (ps, i, a) => { + BusinessentityId.put.unsafeUpdateNonNullable(ps, i + 0, a.businessentityid) + SalesterritoryId.put.unsafeUpdateNonNullable(ps, i + 1, a.territoryid) + TypoLocalDateTime.put.unsafeUpdateNonNullable(ps, i + 2, a.startdate) + TypoLocalDateTime.put.unsafeUpdateNullable(ps, i + 3, a.enddate) + TypoUUID.put.unsafeUpdateNonNullable(ps, i + 4, a.rowguid) + TypoLocalDateTime.put.unsafeUpdateNonNullable(ps, i + 5, a.modifieddate) + } + ) } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/shoppingcartitem/ShoppingcartitemRepo.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/shoppingcartitem/ShoppingcartitemRepo.scala index baeb0de874..7da8e1b145 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/shoppingcartitem/ShoppingcartitemRepo.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/shoppingcartitem/ShoppingcartitemRepo.scala @@ -30,4 +30,7 @@ trait ShoppingcartitemRepo { def update: UpdateBuilder[ShoppingcartitemFields, ShoppingcartitemRow] def update(row: ShoppingcartitemRow): ConnectionIO[Boolean] def upsert(unsaved: ShoppingcartitemRow): ConnectionIO[ShoppingcartitemRow] + def upsertBatch(unsaved: List[ShoppingcartitemRow]): Stream[ConnectionIO, ShoppingcartitemRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Stream[ConnectionIO, ShoppingcartitemRow], batchSize: Int = 10000): ConnectionIO[Int] } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/shoppingcartitem/ShoppingcartitemRepoImpl.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/shoppingcartitem/ShoppingcartitemRepoImpl.scala index a97979b8b9..9de3c57e7a 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/shoppingcartitem/ShoppingcartitemRepoImpl.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/shoppingcartitem/ShoppingcartitemRepoImpl.scala @@ -10,6 +10,7 @@ package shoppingcartitem import adventureworks.customtypes.Defaulted import adventureworks.customtypes.TypoLocalDateTime import adventureworks.production.product.ProductId +import cats.instances.list.catsStdInstancesForList import doobie.free.connection.ConnectionIO import doobie.postgres.syntax.FragmentOps import doobie.syntax.SqlInterpolator.SingleFragment.fromWrite @@ -17,6 +18,7 @@ import doobie.syntax.string.toSqlInterpolator import doobie.util.Write import doobie.util.fragment.Fragment import doobie.util.meta.Meta +import doobie.util.update.Update import fs2.Stream import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder @@ -136,4 +138,37 @@ class ShoppingcartitemRepoImpl extends ShoppingcartitemRepo { returning "shoppingcartitemid", "shoppingcartid", "quantity", "productid", "datecreated"::text, "modifieddate"::text """.query(using ShoppingcartitemRow.read).unique } + override def upsertBatch(unsaved: List[ShoppingcartitemRow]): Stream[ConnectionIO, ShoppingcartitemRow] = { + Update[ShoppingcartitemRow]( + s"""insert into sales.shoppingcartitem("shoppingcartitemid", "shoppingcartid", "quantity", "productid", "datecreated", "modifieddate") + values (?::int4,?,?::int4,?::int4,?::timestamp,?::timestamp) + on conflict ("shoppingcartitemid") + do update set + "shoppingcartid" = EXCLUDED."shoppingcartid", + "quantity" = EXCLUDED."quantity", + "productid" = EXCLUDED."productid", + "datecreated" = EXCLUDED."datecreated", + "modifieddate" = EXCLUDED."modifieddate" + returning "shoppingcartitemid", "shoppingcartid", "quantity", "productid", "datecreated"::text, "modifieddate"::text""" + )(using ShoppingcartitemRow.write) + .updateManyWithGeneratedKeys[ShoppingcartitemRow]("shoppingcartitemid", "shoppingcartid", "quantity", "productid", "datecreated", "modifieddate")(unsaved)(using catsStdInstancesForList, ShoppingcartitemRow.read) + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, ShoppingcartitemRow], batchSize: Int = 10000): ConnectionIO[Int] = { + for { + _ <- sql"create temporary table shoppingcartitem_TEMP (like sales.shoppingcartitem) on commit drop".update.run + _ <- new FragmentOps(sql"""copy shoppingcartitem_TEMP("shoppingcartitemid", "shoppingcartid", "quantity", "productid", "datecreated", "modifieddate") from stdin""").copyIn(unsaved, batchSize)(using ShoppingcartitemRow.text) + res <- sql"""insert into sales.shoppingcartitem("shoppingcartitemid", "shoppingcartid", "quantity", "productid", "datecreated", "modifieddate") + select * from shoppingcartitem_TEMP + on conflict ("shoppingcartitemid") + do update set + "shoppingcartid" = EXCLUDED."shoppingcartid", + "quantity" = EXCLUDED."quantity", + "productid" = EXCLUDED."productid", + "datecreated" = EXCLUDED."datecreated", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table shoppingcartitem_TEMP;""".update.run + } yield res + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/shoppingcartitem/ShoppingcartitemRepoMock.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/shoppingcartitem/ShoppingcartitemRepoMock.scala index 836e569270..d3baca7a14 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/shoppingcartitem/ShoppingcartitemRepoMock.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/shoppingcartitem/ShoppingcartitemRepoMock.scala @@ -105,4 +105,23 @@ class ShoppingcartitemRepoMock(toRow: Function1[ShoppingcartitemRowUnsaved, Shop unsaved } } + override def upsertBatch(unsaved: List[ShoppingcartitemRow]): Stream[ConnectionIO, ShoppingcartitemRow] = { + Stream.emits { + unsaved.map { row => + map += (row.shoppingcartitemid -> row) + row + } + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, ShoppingcartitemRow], batchSize: Int = 10000): ConnectionIO[Int] = { + unsaved.compile.toList.map { rows => + var num = 0 + rows.foreach { row => + map += (row.shoppingcartitemid -> row) + num += 1 + } + num + } + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/shoppingcartitem/ShoppingcartitemRow.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/shoppingcartitem/ShoppingcartitemRow.scala index babc2abdb2..e069c025a3 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/shoppingcartitem/ShoppingcartitemRow.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/shoppingcartitem/ShoppingcartitemRow.scala @@ -13,6 +13,7 @@ import adventureworks.production.product.ProductId import doobie.enumerated.Nullability import doobie.postgres.Text import doobie.util.Read +import doobie.util.Write import doobie.util.meta.Meta import io.circe.Decoder import io.circe.Encoder @@ -79,4 +80,29 @@ object ShoppingcartitemRow { sb.append(Text.DELIMETER) TypoLocalDateTime.text.unsafeEncode(row.modifieddate, sb) } + implicit lazy val write: Write[ShoppingcartitemRow] = new Write[ShoppingcartitemRow]( + puts = List((ShoppingcartitemId.put, Nullability.NoNulls), + (Meta.StringMeta.put, Nullability.NoNulls), + (Meta.IntMeta.put, Nullability.NoNulls), + (ProductId.put, Nullability.NoNulls), + (TypoLocalDateTime.put, Nullability.NoNulls), + (TypoLocalDateTime.put, Nullability.NoNulls)), + toList = x => List(x.shoppingcartitemid, x.shoppingcartid, x.quantity, x.productid, x.datecreated, x.modifieddate), + unsafeSet = (rs, i, a) => { + ShoppingcartitemId.put.unsafeSetNonNullable(rs, i + 0, a.shoppingcartitemid) + Meta.StringMeta.put.unsafeSetNonNullable(rs, i + 1, a.shoppingcartid) + Meta.IntMeta.put.unsafeSetNonNullable(rs, i + 2, a.quantity) + ProductId.put.unsafeSetNonNullable(rs, i + 3, a.productid) + TypoLocalDateTime.put.unsafeSetNonNullable(rs, i + 4, a.datecreated) + TypoLocalDateTime.put.unsafeSetNonNullable(rs, i + 5, a.modifieddate) + }, + unsafeUpdate = (ps, i, a) => { + ShoppingcartitemId.put.unsafeUpdateNonNullable(ps, i + 0, a.shoppingcartitemid) + Meta.StringMeta.put.unsafeUpdateNonNullable(ps, i + 1, a.shoppingcartid) + Meta.IntMeta.put.unsafeUpdateNonNullable(ps, i + 2, a.quantity) + ProductId.put.unsafeUpdateNonNullable(ps, i + 3, a.productid) + TypoLocalDateTime.put.unsafeUpdateNonNullable(ps, i + 4, a.datecreated) + TypoLocalDateTime.put.unsafeUpdateNonNullable(ps, i + 5, a.modifieddate) + } + ) } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/specialoffer/SpecialofferRepo.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/specialoffer/SpecialofferRepo.scala index b82db537e3..40135e6863 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/specialoffer/SpecialofferRepo.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/specialoffer/SpecialofferRepo.scala @@ -30,4 +30,7 @@ trait SpecialofferRepo { def update: UpdateBuilder[SpecialofferFields, SpecialofferRow] def update(row: SpecialofferRow): ConnectionIO[Boolean] def upsert(unsaved: SpecialofferRow): ConnectionIO[SpecialofferRow] + def upsertBatch(unsaved: List[SpecialofferRow]): Stream[ConnectionIO, SpecialofferRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Stream[ConnectionIO, SpecialofferRow], batchSize: Int = 10000): ConnectionIO[Int] } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/specialoffer/SpecialofferRepoImpl.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/specialoffer/SpecialofferRepoImpl.scala index 581558fc84..d05cbbd801 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/specialoffer/SpecialofferRepoImpl.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/specialoffer/SpecialofferRepoImpl.scala @@ -10,6 +10,7 @@ package specialoffer import adventureworks.customtypes.Defaulted import adventureworks.customtypes.TypoLocalDateTime import adventureworks.customtypes.TypoUUID +import cats.instances.list.catsStdInstancesForList import doobie.free.connection.ConnectionIO import doobie.postgres.syntax.FragmentOps import doobie.syntax.SqlInterpolator.SingleFragment.fromWrite @@ -17,6 +18,7 @@ import doobie.syntax.string.toSqlInterpolator import doobie.util.Write import doobie.util.fragment.Fragment import doobie.util.meta.Meta +import doobie.util.update.Update import fs2.Stream import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder @@ -159,4 +161,47 @@ class SpecialofferRepoImpl extends SpecialofferRepo { returning "specialofferid", "description", "discountpct", "type", "category", "startdate"::text, "enddate"::text, "minqty", "maxqty", "rowguid", "modifieddate"::text """.query(using SpecialofferRow.read).unique } + override def upsertBatch(unsaved: List[SpecialofferRow]): Stream[ConnectionIO, SpecialofferRow] = { + Update[SpecialofferRow]( + s"""insert into sales.specialoffer("specialofferid", "description", "discountpct", "type", "category", "startdate", "enddate", "minqty", "maxqty", "rowguid", "modifieddate") + values (?::int4,?,?::numeric,?,?,?::timestamp,?::timestamp,?::int4,?::int4,?::uuid,?::timestamp) + on conflict ("specialofferid") + do update set + "description" = EXCLUDED."description", + "discountpct" = EXCLUDED."discountpct", + "type" = EXCLUDED."type", + "category" = EXCLUDED."category", + "startdate" = EXCLUDED."startdate", + "enddate" = EXCLUDED."enddate", + "minqty" = EXCLUDED."minqty", + "maxqty" = EXCLUDED."maxqty", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + returning "specialofferid", "description", "discountpct", "type", "category", "startdate"::text, "enddate"::text, "minqty", "maxqty", "rowguid", "modifieddate"::text""" + )(using SpecialofferRow.write) + .updateManyWithGeneratedKeys[SpecialofferRow]("specialofferid", "description", "discountpct", "type", "category", "startdate", "enddate", "minqty", "maxqty", "rowguid", "modifieddate")(unsaved)(using catsStdInstancesForList, SpecialofferRow.read) + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, SpecialofferRow], batchSize: Int = 10000): ConnectionIO[Int] = { + for { + _ <- sql"create temporary table specialoffer_TEMP (like sales.specialoffer) on commit drop".update.run + _ <- new FragmentOps(sql"""copy specialoffer_TEMP("specialofferid", "description", "discountpct", "type", "category", "startdate", "enddate", "minqty", "maxqty", "rowguid", "modifieddate") from stdin""").copyIn(unsaved, batchSize)(using SpecialofferRow.text) + res <- sql"""insert into sales.specialoffer("specialofferid", "description", "discountpct", "type", "category", "startdate", "enddate", "minqty", "maxqty", "rowguid", "modifieddate") + select * from specialoffer_TEMP + on conflict ("specialofferid") + do update set + "description" = EXCLUDED."description", + "discountpct" = EXCLUDED."discountpct", + "type" = EXCLUDED."type", + "category" = EXCLUDED."category", + "startdate" = EXCLUDED."startdate", + "enddate" = EXCLUDED."enddate", + "minqty" = EXCLUDED."minqty", + "maxqty" = EXCLUDED."maxqty", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table specialoffer_TEMP;""".update.run + } yield res + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/specialoffer/SpecialofferRepoMock.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/specialoffer/SpecialofferRepoMock.scala index 94c9454314..67b03eaebc 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/specialoffer/SpecialofferRepoMock.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/specialoffer/SpecialofferRepoMock.scala @@ -105,4 +105,23 @@ class SpecialofferRepoMock(toRow: Function1[SpecialofferRowUnsaved, Specialoffer unsaved } } + override def upsertBatch(unsaved: List[SpecialofferRow]): Stream[ConnectionIO, SpecialofferRow] = { + Stream.emits { + unsaved.map { row => + map += (row.specialofferid -> row) + row + } + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, SpecialofferRow], batchSize: Int = 10000): ConnectionIO[Int] = { + unsaved.compile.toList.map { rows => + var num = 0 + rows.foreach { row => + map += (row.specialofferid -> row) + num += 1 + } + num + } + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/specialoffer/SpecialofferRow.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/specialoffer/SpecialofferRow.scala index 9a07cb5764..11ce559d41 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/specialoffer/SpecialofferRow.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/specialoffer/SpecialofferRow.scala @@ -13,6 +13,7 @@ import adventureworks.customtypes.TypoUUID import doobie.enumerated.Nullability import doobie.postgres.Text import doobie.util.Read +import doobie.util.Write import doobie.util.meta.Meta import io.circe.Decoder import io.circe.Encoder @@ -112,4 +113,44 @@ object SpecialofferRow { sb.append(Text.DELIMETER) TypoLocalDateTime.text.unsafeEncode(row.modifieddate, sb) } + implicit lazy val write: Write[SpecialofferRow] = new Write[SpecialofferRow]( + puts = List((SpecialofferId.put, Nullability.NoNulls), + (Meta.StringMeta.put, Nullability.NoNulls), + (Meta.ScalaBigDecimalMeta.put, Nullability.NoNulls), + (Meta.StringMeta.put, Nullability.NoNulls), + (Meta.StringMeta.put, Nullability.NoNulls), + (TypoLocalDateTime.put, Nullability.NoNulls), + (TypoLocalDateTime.put, Nullability.NoNulls), + (Meta.IntMeta.put, Nullability.NoNulls), + (Meta.IntMeta.put, Nullability.Nullable), + (TypoUUID.put, Nullability.NoNulls), + (TypoLocalDateTime.put, Nullability.NoNulls)), + toList = x => List(x.specialofferid, x.description, x.discountpct, x.`type`, x.category, x.startdate, x.enddate, x.minqty, x.maxqty, x.rowguid, x.modifieddate), + unsafeSet = (rs, i, a) => { + SpecialofferId.put.unsafeSetNonNullable(rs, i + 0, a.specialofferid) + Meta.StringMeta.put.unsafeSetNonNullable(rs, i + 1, a.description) + Meta.ScalaBigDecimalMeta.put.unsafeSetNonNullable(rs, i + 2, a.discountpct) + Meta.StringMeta.put.unsafeSetNonNullable(rs, i + 3, a.`type`) + Meta.StringMeta.put.unsafeSetNonNullable(rs, i + 4, a.category) + TypoLocalDateTime.put.unsafeSetNonNullable(rs, i + 5, a.startdate) + TypoLocalDateTime.put.unsafeSetNonNullable(rs, i + 6, a.enddate) + Meta.IntMeta.put.unsafeSetNonNullable(rs, i + 7, a.minqty) + Meta.IntMeta.put.unsafeSetNullable(rs, i + 8, a.maxqty) + TypoUUID.put.unsafeSetNonNullable(rs, i + 9, a.rowguid) + TypoLocalDateTime.put.unsafeSetNonNullable(rs, i + 10, a.modifieddate) + }, + unsafeUpdate = (ps, i, a) => { + SpecialofferId.put.unsafeUpdateNonNullable(ps, i + 0, a.specialofferid) + Meta.StringMeta.put.unsafeUpdateNonNullable(ps, i + 1, a.description) + Meta.ScalaBigDecimalMeta.put.unsafeUpdateNonNullable(ps, i + 2, a.discountpct) + Meta.StringMeta.put.unsafeUpdateNonNullable(ps, i + 3, a.`type`) + Meta.StringMeta.put.unsafeUpdateNonNullable(ps, i + 4, a.category) + TypoLocalDateTime.put.unsafeUpdateNonNullable(ps, i + 5, a.startdate) + TypoLocalDateTime.put.unsafeUpdateNonNullable(ps, i + 6, a.enddate) + Meta.IntMeta.put.unsafeUpdateNonNullable(ps, i + 7, a.minqty) + Meta.IntMeta.put.unsafeUpdateNullable(ps, i + 8, a.maxqty) + TypoUUID.put.unsafeUpdateNonNullable(ps, i + 9, a.rowguid) + TypoLocalDateTime.put.unsafeUpdateNonNullable(ps, i + 10, a.modifieddate) + } + ) } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/specialofferproduct/SpecialofferproductRepo.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/specialofferproduct/SpecialofferproductRepo.scala index ee69f3980a..6d6773e0ae 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/specialofferproduct/SpecialofferproductRepo.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/specialofferproduct/SpecialofferproductRepo.scala @@ -30,4 +30,7 @@ trait SpecialofferproductRepo { def update: UpdateBuilder[SpecialofferproductFields, SpecialofferproductRow] def update(row: SpecialofferproductRow): ConnectionIO[Boolean] def upsert(unsaved: SpecialofferproductRow): ConnectionIO[SpecialofferproductRow] + def upsertBatch(unsaved: List[SpecialofferproductRow]): Stream[ConnectionIO, SpecialofferproductRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Stream[ConnectionIO, SpecialofferproductRow], batchSize: Int = 10000): ConnectionIO[Int] } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/specialofferproduct/SpecialofferproductRepoImpl.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/specialofferproduct/SpecialofferproductRepoImpl.scala index d2f8afaca8..4282ff5b46 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/specialofferproduct/SpecialofferproductRepoImpl.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/specialofferproduct/SpecialofferproductRepoImpl.scala @@ -12,12 +12,14 @@ import adventureworks.customtypes.TypoLocalDateTime import adventureworks.customtypes.TypoUUID import adventureworks.production.product.ProductId import adventureworks.sales.specialoffer.SpecialofferId +import cats.instances.list.catsStdInstancesForList import doobie.free.connection.ConnectionIO import doobie.postgres.syntax.FragmentOps import doobie.syntax.SqlInterpolator.SingleFragment.fromWrite import doobie.syntax.string.toSqlInterpolator import doobie.util.Write import doobie.util.fragment.Fragment +import doobie.util.update.Update import fs2.Stream import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder @@ -135,4 +137,31 @@ class SpecialofferproductRepoImpl extends SpecialofferproductRepo { returning "specialofferid", "productid", "rowguid", "modifieddate"::text """.query(using SpecialofferproductRow.read).unique } + override def upsertBatch(unsaved: List[SpecialofferproductRow]): Stream[ConnectionIO, SpecialofferproductRow] = { + Update[SpecialofferproductRow]( + s"""insert into sales.specialofferproduct("specialofferid", "productid", "rowguid", "modifieddate") + values (?::int4,?::int4,?::uuid,?::timestamp) + on conflict ("specialofferid", "productid") + do update set + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + returning "specialofferid", "productid", "rowguid", "modifieddate"::text""" + )(using SpecialofferproductRow.write) + .updateManyWithGeneratedKeys[SpecialofferproductRow]("specialofferid", "productid", "rowguid", "modifieddate")(unsaved)(using catsStdInstancesForList, SpecialofferproductRow.read) + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, SpecialofferproductRow], batchSize: Int = 10000): ConnectionIO[Int] = { + for { + _ <- sql"create temporary table specialofferproduct_TEMP (like sales.specialofferproduct) on commit drop".update.run + _ <- new FragmentOps(sql"""copy specialofferproduct_TEMP("specialofferid", "productid", "rowguid", "modifieddate") from stdin""").copyIn(unsaved, batchSize)(using SpecialofferproductRow.text) + res <- sql"""insert into sales.specialofferproduct("specialofferid", "productid", "rowguid", "modifieddate") + select * from specialofferproduct_TEMP + on conflict ("specialofferid", "productid") + do update set + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table specialofferproduct_TEMP;""".update.run + } yield res + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/specialofferproduct/SpecialofferproductRepoMock.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/specialofferproduct/SpecialofferproductRepoMock.scala index 80f3069eb3..2f672a6b1b 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/specialofferproduct/SpecialofferproductRepoMock.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/specialofferproduct/SpecialofferproductRepoMock.scala @@ -105,4 +105,23 @@ class SpecialofferproductRepoMock(toRow: Function1[SpecialofferproductRowUnsaved unsaved } } + override def upsertBatch(unsaved: List[SpecialofferproductRow]): Stream[ConnectionIO, SpecialofferproductRow] = { + Stream.emits { + unsaved.map { row => + map += (row.compositeId -> row) + row + } + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, SpecialofferproductRow], batchSize: Int = 10000): ConnectionIO[Int] = { + unsaved.compile.toList.map { rows => + var num = 0 + rows.foreach { row => + map += (row.compositeId -> row) + num += 1 + } + num + } + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/specialofferproduct/SpecialofferproductRow.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/specialofferproduct/SpecialofferproductRow.scala index e4a8cdebea..8e85aacfc6 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/specialofferproduct/SpecialofferproductRow.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/specialofferproduct/SpecialofferproductRow.scala @@ -15,6 +15,7 @@ import adventureworks.sales.specialoffer.SpecialofferId import doobie.enumerated.Nullability import doobie.postgres.Text import doobie.util.Read +import doobie.util.Write import io.circe.Decoder import io.circe.Encoder import java.sql.ResultSet @@ -68,4 +69,23 @@ object SpecialofferproductRow { sb.append(Text.DELIMETER) TypoLocalDateTime.text.unsafeEncode(row.modifieddate, sb) } + implicit lazy val write: Write[SpecialofferproductRow] = new Write[SpecialofferproductRow]( + puts = List((SpecialofferId.put, Nullability.NoNulls), + (ProductId.put, Nullability.NoNulls), + (TypoUUID.put, Nullability.NoNulls), + (TypoLocalDateTime.put, Nullability.NoNulls)), + toList = x => List(x.specialofferid, x.productid, x.rowguid, x.modifieddate), + unsafeSet = (rs, i, a) => { + SpecialofferId.put.unsafeSetNonNullable(rs, i + 0, a.specialofferid) + ProductId.put.unsafeSetNonNullable(rs, i + 1, a.productid) + TypoUUID.put.unsafeSetNonNullable(rs, i + 2, a.rowguid) + TypoLocalDateTime.put.unsafeSetNonNullable(rs, i + 3, a.modifieddate) + }, + unsafeUpdate = (ps, i, a) => { + SpecialofferId.put.unsafeUpdateNonNullable(ps, i + 0, a.specialofferid) + ProductId.put.unsafeUpdateNonNullable(ps, i + 1, a.productid) + TypoUUID.put.unsafeUpdateNonNullable(ps, i + 2, a.rowguid) + TypoLocalDateTime.put.unsafeUpdateNonNullable(ps, i + 3, a.modifieddate) + } + ) } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/store/StoreRepo.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/store/StoreRepo.scala index 756b16f640..0a877126a1 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/store/StoreRepo.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/store/StoreRepo.scala @@ -31,4 +31,7 @@ trait StoreRepo { def update: UpdateBuilder[StoreFields, StoreRow] def update(row: StoreRow): ConnectionIO[Boolean] def upsert(unsaved: StoreRow): ConnectionIO[StoreRow] + def upsertBatch(unsaved: List[StoreRow]): Stream[ConnectionIO, StoreRow] + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: Stream[ConnectionIO, StoreRow], batchSize: Int = 10000): ConnectionIO[Int] } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/store/StoreRepoImpl.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/store/StoreRepoImpl.scala index 6f22544a27..10e3e43b4b 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/store/StoreRepoImpl.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/store/StoreRepoImpl.scala @@ -13,12 +13,14 @@ import adventureworks.customtypes.TypoUUID import adventureworks.customtypes.TypoXml import adventureworks.person.businessentity.BusinessentityId import adventureworks.public.Name +import cats.instances.list.catsStdInstancesForList import doobie.free.connection.ConnectionIO import doobie.postgres.syntax.FragmentOps import doobie.syntax.SqlInterpolator.SingleFragment.fromWrite import doobie.syntax.string.toSqlInterpolator import doobie.util.Write import doobie.util.fragment.Fragment +import doobie.util.update.Update import fs2.Stream import typo.dsl.DeleteBuilder import typo.dsl.SelectBuilder @@ -132,4 +134,37 @@ class StoreRepoImpl extends StoreRepo { returning "businessentityid", "name", "salespersonid", "demographics", "rowguid", "modifieddate"::text """.query(using StoreRow.read).unique } + override def upsertBatch(unsaved: List[StoreRow]): Stream[ConnectionIO, StoreRow] = { + Update[StoreRow]( + s"""insert into sales.store("businessentityid", "name", "salespersonid", "demographics", "rowguid", "modifieddate") + values (?::int4,?::varchar,?::int4,?::xml,?::uuid,?::timestamp) + on conflict ("businessentityid") + do update set + "name" = EXCLUDED."name", + "salespersonid" = EXCLUDED."salespersonid", + "demographics" = EXCLUDED."demographics", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + returning "businessentityid", "name", "salespersonid", "demographics", "rowguid", "modifieddate"::text""" + )(using StoreRow.write) + .updateManyWithGeneratedKeys[StoreRow]("businessentityid", "name", "salespersonid", "demographics", "rowguid", "modifieddate")(unsaved)(using catsStdInstancesForList, StoreRow.read) + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, StoreRow], batchSize: Int = 10000): ConnectionIO[Int] = { + for { + _ <- sql"create temporary table store_TEMP (like sales.store) on commit drop".update.run + _ <- new FragmentOps(sql"""copy store_TEMP("businessentityid", "name", "salespersonid", "demographics", "rowguid", "modifieddate") from stdin""").copyIn(unsaved, batchSize)(using StoreRow.text) + res <- sql"""insert into sales.store("businessentityid", "name", "salespersonid", "demographics", "rowguid", "modifieddate") + select * from store_TEMP + on conflict ("businessentityid") + do update set + "name" = EXCLUDED."name", + "salespersonid" = EXCLUDED."salespersonid", + "demographics" = EXCLUDED."demographics", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table store_TEMP;""".update.run + } yield res + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/store/StoreRepoMock.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/store/StoreRepoMock.scala index 4ab86e04d6..49aed4f1fb 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/store/StoreRepoMock.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/store/StoreRepoMock.scala @@ -106,4 +106,23 @@ class StoreRepoMock(toRow: Function1[StoreRowUnsaved, StoreRow], unsaved } } + override def upsertBatch(unsaved: List[StoreRow]): Stream[ConnectionIO, StoreRow] = { + Stream.emits { + unsaved.map { row => + map += (row.businessentityid -> row) + row + } + } + } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: Stream[ConnectionIO, StoreRow], batchSize: Int = 10000): ConnectionIO[Int] = { + unsaved.compile.toList.map { rows => + var num = 0 + rows.foreach { row => + map += (row.businessentityid -> row) + num += 1 + } + num + } + } } diff --git a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/store/StoreRow.scala b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/store/StoreRow.scala index 394479923a..bd34ceac9b 100644 --- a/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/store/StoreRow.scala +++ b/typo-tester-doobie/generated-and-checked-in/adventureworks/sales/store/StoreRow.scala @@ -16,6 +16,7 @@ import adventureworks.public.Name import doobie.enumerated.Nullability import doobie.postgres.Text import doobie.util.Read +import doobie.util.Write import io.circe.Decoder import io.circe.Encoder import java.sql.ResultSet @@ -78,4 +79,29 @@ object StoreRow { sb.append(Text.DELIMETER) TypoLocalDateTime.text.unsafeEncode(row.modifieddate, sb) } + implicit lazy val write: Write[StoreRow] = new Write[StoreRow]( + puts = List((BusinessentityId.put, Nullability.NoNulls), + (Name.put, Nullability.NoNulls), + (BusinessentityId.put, Nullability.Nullable), + (TypoXml.put, Nullability.Nullable), + (TypoUUID.put, Nullability.NoNulls), + (TypoLocalDateTime.put, Nullability.NoNulls)), + toList = x => List(x.businessentityid, x.name, x.salespersonid, x.demographics, x.rowguid, x.modifieddate), + unsafeSet = (rs, i, a) => { + BusinessentityId.put.unsafeSetNonNullable(rs, i + 0, a.businessentityid) + Name.put.unsafeSetNonNullable(rs, i + 1, a.name) + BusinessentityId.put.unsafeSetNullable(rs, i + 2, a.salespersonid) + TypoXml.put.unsafeSetNullable(rs, i + 3, a.demographics) + TypoUUID.put.unsafeSetNonNullable(rs, i + 4, a.rowguid) + TypoLocalDateTime.put.unsafeSetNonNullable(rs, i + 5, a.modifieddate) + }, + unsafeUpdate = (ps, i, a) => { + BusinessentityId.put.unsafeUpdateNonNullable(ps, i + 0, a.businessentityid) + Name.put.unsafeUpdateNonNullable(ps, i + 1, a.name) + BusinessentityId.put.unsafeUpdateNullable(ps, i + 2, a.salespersonid) + TypoXml.put.unsafeUpdateNullable(ps, i + 3, a.demographics) + TypoUUID.put.unsafeUpdateNonNullable(ps, i + 4, a.rowguid) + TypoLocalDateTime.put.unsafeUpdateNonNullable(ps, i + 5, a.modifieddate) + } + ) } diff --git a/typo-tester-doobie/src/scala/adventureworks/production/product/RepoTest.scala b/typo-tester-doobie/src/scala/adventureworks/production/product/RepoTest.scala new file mode 100644 index 0000000000..cfb570dfe7 --- /dev/null +++ b/typo-tester-doobie/src/scala/adventureworks/production/product/RepoTest.scala @@ -0,0 +1,45 @@ +package adventureworks.production.product + +import adventureworks.customtypes.* +import adventureworks.production.unitmeasure.* +import adventureworks.public.Name +import adventureworks.{SnapshotTest, withConnection} +import org.scalatest.Assertion +import doobie.free.connection.delay + +class RepoTest extends SnapshotTest { + def upsertStreaming(unitmeasureRepo: UnitmeasureRepo): Assertion = + withConnection { + val um1 = UnitmeasureRow(unitmeasurecode = UnitmeasureId("kg1"), name = Name("name1"), TypoLocalDateTime.now) + val um2 = UnitmeasureRow(unitmeasurecode = UnitmeasureId("kg2"), name = Name("name2"), TypoLocalDateTime.now) + for { + _ <- unitmeasureRepo.upsertStreaming(fs2.Stream(um1, um2)) + _ <- unitmeasureRepo.selectAll.compile.toList.map(all => assert(List(um1, um2) == all.sortBy(_.name))) + um1a = um1.copy(name = Name("name1a")) + um2a = um2.copy(name = Name("name2a")) + _ <- unitmeasureRepo.upsertStreaming(fs2.Stream(um1a, um2a)) + all <- unitmeasureRepo.selectAll.compile.toList + } yield assert(List(um1a, um2a) == all.sortBy(_.name)) + } + + def upsertBatch(unitmeasureRepo: UnitmeasureRepo): Assertion = + withConnection { + val um1 = UnitmeasureRow(unitmeasurecode = UnitmeasureId("kg1"), name = Name("name1"), TypoLocalDateTime.now) + val um2 = UnitmeasureRow(unitmeasurecode = UnitmeasureId("kg2"), name = Name("name2"), TypoLocalDateTime.now) + for { + initial <- unitmeasureRepo.upsertBatch(List(um1, um2)).compile.toList + _ <- delay(assert(List(um1, um2) == initial.sortBy(_.name))) + um1a = um1.copy(name = Name("name1a")) + um2a = um2.copy(name = Name("name2a")) + returned <- unitmeasureRepo.upsertBatch(List(um1a, um2a)).compile.toList + _ <- delay(assert(List(um1a, um2a) == returned.sortBy(_.name))) + all <- unitmeasureRepo.selectAll.compile.toList + } yield assert(List(um1a, um2a) == all.sortBy(_.name)) + } + + test("upsertStreaming in-memory")(upsertStreaming(new UnitmeasureRepoMock(_.toRow(TypoLocalDateTime.now)))) + test("upsertStreaming pg")(upsertStreaming(new UnitmeasureRepoImpl)) + + test("upsertBatch in-memory")(upsertBatch(new UnitmeasureRepoMock(_.toRow(TypoLocalDateTime.now)))) + test("upsertBatch pg")(upsertBatch(new UnitmeasureRepoImpl)) +} diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/humanresources/department/DepartmentRepo.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/humanresources/department/DepartmentRepo.scala index df273e5148..a4ecb69a89 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/humanresources/department/DepartmentRepo.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/humanresources/department/DepartmentRepo.scala @@ -32,4 +32,7 @@ trait DepartmentRepo { def update: UpdateBuilder[DepartmentFields, DepartmentRow] def update(row: DepartmentRow): ZIO[ZConnection, Throwable, Boolean] def upsert(unsaved: DepartmentRow): ZIO[ZConnection, Throwable, UpdateResult[DepartmentRow]] + // Not implementable for zio-jdbc: upsertBatch + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, DepartmentRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/humanresources/department/DepartmentRepoImpl.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/humanresources/department/DepartmentRepoImpl.scala index 58d12700aa..e675c78c1c 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/humanresources/department/DepartmentRepoImpl.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/humanresources/department/DepartmentRepoImpl.scala @@ -115,4 +115,19 @@ class DepartmentRepoImpl extends DepartmentRepo { "modifieddate" = EXCLUDED."modifieddate" returning "departmentid", "name", "groupname", "modifieddate"::text""".insertReturning(using DepartmentRow.jdbcDecoder) } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, DepartmentRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + val created = sql"create temporary table department_TEMP (like humanresources.department) on commit drop".execute + val copied = streamingInsert(s"""copy department_TEMP("departmentid", "name", "groupname", "modifieddate") from stdin""", batchSize, unsaved)(DepartmentRow.text) + val merged = sql"""insert into humanresources.department("departmentid", "name", "groupname", "modifieddate") + select * from department_TEMP + on conflict ("departmentid") + do update set + "name" = EXCLUDED."name", + "groupname" = EXCLUDED."groupname", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table department_TEMP;""".update + created *> copied *> merged + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/humanresources/department/DepartmentRepoMock.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/humanresources/department/DepartmentRepoMock.scala index 6cae265935..023e5870da 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/humanresources/department/DepartmentRepoMock.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/humanresources/department/DepartmentRepoMock.scala @@ -104,4 +104,13 @@ class DepartmentRepoMock(toRow: Function1[DepartmentRowUnsaved, DepartmentRow], UpdateResult(1, Chunk.single(unsaved)) } } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, DepartmentRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + unsaved.scanZIO(0L) { case (acc, row) => + ZIO.succeed { + map += (row.departmentid -> row) + acc + 1 + } + }.runLast.map(_.getOrElse(0L)) + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/humanresources/employee/EmployeeRepo.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/humanresources/employee/EmployeeRepo.scala index 10c985d53b..d835749ec1 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/humanresources/employee/EmployeeRepo.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/humanresources/employee/EmployeeRepo.scala @@ -33,4 +33,7 @@ trait EmployeeRepo { def update: UpdateBuilder[EmployeeFields, EmployeeRow] def update(row: EmployeeRow): ZIO[ZConnection, Throwable, Boolean] def upsert(unsaved: EmployeeRow): ZIO[ZConnection, Throwable, UpdateResult[EmployeeRow]] + // Not implementable for zio-jdbc: upsertBatch + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, EmployeeRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/humanresources/employee/EmployeeRepoImpl.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/humanresources/employee/EmployeeRepoImpl.scala index c89d7b34f0..cf8250fbd9 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/humanresources/employee/EmployeeRepoImpl.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/humanresources/employee/EmployeeRepoImpl.scala @@ -179,4 +179,30 @@ class EmployeeRepoImpl extends EmployeeRepo { "organizationnode" = EXCLUDED."organizationnode" returning "businessentityid", "nationalidnumber", "loginid", "jobtitle", "birthdate"::text, "maritalstatus", "gender", "hiredate"::text, "salariedflag", "vacationhours", "sickleavehours", "currentflag", "rowguid", "modifieddate"::text, "organizationnode"""".insertReturning(using EmployeeRow.jdbcDecoder) } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, EmployeeRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + val created = sql"create temporary table employee_TEMP (like humanresources.employee) on commit drop".execute + val copied = streamingInsert(s"""copy employee_TEMP("businessentityid", "nationalidnumber", "loginid", "jobtitle", "birthdate", "maritalstatus", "gender", "hiredate", "salariedflag", "vacationhours", "sickleavehours", "currentflag", "rowguid", "modifieddate", "organizationnode") from stdin""", batchSize, unsaved)(EmployeeRow.text) + val merged = sql"""insert into humanresources.employee("businessentityid", "nationalidnumber", "loginid", "jobtitle", "birthdate", "maritalstatus", "gender", "hiredate", "salariedflag", "vacationhours", "sickleavehours", "currentflag", "rowguid", "modifieddate", "organizationnode") + select * from employee_TEMP + on conflict ("businessentityid") + do update set + "nationalidnumber" = EXCLUDED."nationalidnumber", + "loginid" = EXCLUDED."loginid", + "jobtitle" = EXCLUDED."jobtitle", + "birthdate" = EXCLUDED."birthdate", + "maritalstatus" = EXCLUDED."maritalstatus", + "gender" = EXCLUDED."gender", + "hiredate" = EXCLUDED."hiredate", + "salariedflag" = EXCLUDED."salariedflag", + "vacationhours" = EXCLUDED."vacationhours", + "sickleavehours" = EXCLUDED."sickleavehours", + "currentflag" = EXCLUDED."currentflag", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate", + "organizationnode" = EXCLUDED."organizationnode" + ; + drop table employee_TEMP;""".update + created *> copied *> merged + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/humanresources/employee/EmployeeRepoMock.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/humanresources/employee/EmployeeRepoMock.scala index 2ba6e9ec16..b03bd1df65 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/humanresources/employee/EmployeeRepoMock.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/humanresources/employee/EmployeeRepoMock.scala @@ -105,4 +105,13 @@ class EmployeeRepoMock(toRow: Function1[EmployeeRowUnsaved, EmployeeRow], UpdateResult(1, Chunk.single(unsaved)) } } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, EmployeeRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + unsaved.scanZIO(0L) { case (acc, row) => + ZIO.succeed { + map += (row.businessentityid -> row) + acc + 1 + } + }.runLast.map(_.getOrElse(0L)) + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/humanresources/employeedepartmenthistory/EmployeedepartmenthistoryRepo.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/humanresources/employeedepartmenthistory/EmployeedepartmenthistoryRepo.scala index 11c6bd2c2e..21ebdef468 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/humanresources/employeedepartmenthistory/EmployeedepartmenthistoryRepo.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/humanresources/employeedepartmenthistory/EmployeedepartmenthistoryRepo.scala @@ -32,4 +32,7 @@ trait EmployeedepartmenthistoryRepo { def update: UpdateBuilder[EmployeedepartmenthistoryFields, EmployeedepartmenthistoryRow] def update(row: EmployeedepartmenthistoryRow): ZIO[ZConnection, Throwable, Boolean] def upsert(unsaved: EmployeedepartmenthistoryRow): ZIO[ZConnection, Throwable, UpdateResult[EmployeedepartmenthistoryRow]] + // Not implementable for zio-jdbc: upsertBatch + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, EmployeedepartmenthistoryRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/humanresources/employeedepartmenthistory/EmployeedepartmenthistoryRepoImpl.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/humanresources/employeedepartmenthistory/EmployeedepartmenthistoryRepoImpl.scala index 1d659a7a79..3063156219 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/humanresources/employeedepartmenthistory/EmployeedepartmenthistoryRepoImpl.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/humanresources/employeedepartmenthistory/EmployeedepartmenthistoryRepoImpl.scala @@ -136,4 +136,18 @@ class EmployeedepartmenthistoryRepoImpl extends EmployeedepartmenthistoryRepo { "modifieddate" = EXCLUDED."modifieddate" returning "businessentityid", "departmentid", "shiftid", "startdate"::text, "enddate"::text, "modifieddate"::text""".insertReturning(using EmployeedepartmenthistoryRow.jdbcDecoder) } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, EmployeedepartmenthistoryRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + val created = sql"create temporary table employeedepartmenthistory_TEMP (like humanresources.employeedepartmenthistory) on commit drop".execute + val copied = streamingInsert(s"""copy employeedepartmenthistory_TEMP("businessentityid", "departmentid", "shiftid", "startdate", "enddate", "modifieddate") from stdin""", batchSize, unsaved)(EmployeedepartmenthistoryRow.text) + val merged = sql"""insert into humanresources.employeedepartmenthistory("businessentityid", "departmentid", "shiftid", "startdate", "enddate", "modifieddate") + select * from employeedepartmenthistory_TEMP + on conflict ("businessentityid", "startdate", "departmentid", "shiftid") + do update set + "enddate" = EXCLUDED."enddate", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table employeedepartmenthistory_TEMP;""".update + created *> copied *> merged + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/humanresources/employeedepartmenthistory/EmployeedepartmenthistoryRepoMock.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/humanresources/employeedepartmenthistory/EmployeedepartmenthistoryRepoMock.scala index 22d4573729..8c55f8423a 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/humanresources/employeedepartmenthistory/EmployeedepartmenthistoryRepoMock.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/humanresources/employeedepartmenthistory/EmployeedepartmenthistoryRepoMock.scala @@ -104,4 +104,13 @@ class EmployeedepartmenthistoryRepoMock(toRow: Function1[Employeedepartmenthisto UpdateResult(1, Chunk.single(unsaved)) } } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, EmployeedepartmenthistoryRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + unsaved.scanZIO(0L) { case (acc, row) => + ZIO.succeed { + map += (row.compositeId -> row) + acc + 1 + } + }.runLast.map(_.getOrElse(0L)) + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/humanresources/employeepayhistory/EmployeepayhistoryRepo.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/humanresources/employeepayhistory/EmployeepayhistoryRepo.scala index a2ebfdd44e..35ed6d5506 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/humanresources/employeepayhistory/EmployeepayhistoryRepo.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/humanresources/employeepayhistory/EmployeepayhistoryRepo.scala @@ -32,4 +32,7 @@ trait EmployeepayhistoryRepo { def update: UpdateBuilder[EmployeepayhistoryFields, EmployeepayhistoryRow] def update(row: EmployeepayhistoryRow): ZIO[ZConnection, Throwable, Boolean] def upsert(unsaved: EmployeepayhistoryRow): ZIO[ZConnection, Throwable, UpdateResult[EmployeepayhistoryRow]] + // Not implementable for zio-jdbc: upsertBatch + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, EmployeepayhistoryRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/humanresources/employeepayhistory/EmployeepayhistoryRepoImpl.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/humanresources/employeepayhistory/EmployeepayhistoryRepoImpl.scala index aa095755eb..fc44c01f7a 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/humanresources/employeepayhistory/EmployeepayhistoryRepoImpl.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/humanresources/employeepayhistory/EmployeepayhistoryRepoImpl.scala @@ -130,4 +130,19 @@ class EmployeepayhistoryRepoImpl extends EmployeepayhistoryRepo { "modifieddate" = EXCLUDED."modifieddate" returning "businessentityid", "ratechangedate"::text, "rate", "payfrequency", "modifieddate"::text""".insertReturning(using EmployeepayhistoryRow.jdbcDecoder) } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, EmployeepayhistoryRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + val created = sql"create temporary table employeepayhistory_TEMP (like humanresources.employeepayhistory) on commit drop".execute + val copied = streamingInsert(s"""copy employeepayhistory_TEMP("businessentityid", "ratechangedate", "rate", "payfrequency", "modifieddate") from stdin""", batchSize, unsaved)(EmployeepayhistoryRow.text) + val merged = sql"""insert into humanresources.employeepayhistory("businessentityid", "ratechangedate", "rate", "payfrequency", "modifieddate") + select * from employeepayhistory_TEMP + on conflict ("businessentityid", "ratechangedate") + do update set + "rate" = EXCLUDED."rate", + "payfrequency" = EXCLUDED."payfrequency", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table employeepayhistory_TEMP;""".update + created *> copied *> merged + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/humanresources/employeepayhistory/EmployeepayhistoryRepoMock.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/humanresources/employeepayhistory/EmployeepayhistoryRepoMock.scala index beb692a78c..b11f1600c1 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/humanresources/employeepayhistory/EmployeepayhistoryRepoMock.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/humanresources/employeepayhistory/EmployeepayhistoryRepoMock.scala @@ -104,4 +104,13 @@ class EmployeepayhistoryRepoMock(toRow: Function1[EmployeepayhistoryRowUnsaved, UpdateResult(1, Chunk.single(unsaved)) } } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, EmployeepayhistoryRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + unsaved.scanZIO(0L) { case (acc, row) => + ZIO.succeed { + map += (row.compositeId -> row) + acc + 1 + } + }.runLast.map(_.getOrElse(0L)) + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/humanresources/jobcandidate/JobcandidateRepo.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/humanresources/jobcandidate/JobcandidateRepo.scala index d5e2d8f49d..3100526f13 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/humanresources/jobcandidate/JobcandidateRepo.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/humanresources/jobcandidate/JobcandidateRepo.scala @@ -32,4 +32,7 @@ trait JobcandidateRepo { def update: UpdateBuilder[JobcandidateFields, JobcandidateRow] def update(row: JobcandidateRow): ZIO[ZConnection, Throwable, Boolean] def upsert(unsaved: JobcandidateRow): ZIO[ZConnection, Throwable, UpdateResult[JobcandidateRow]] + // Not implementable for zio-jdbc: upsertBatch + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, JobcandidateRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/humanresources/jobcandidate/JobcandidateRepoImpl.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/humanresources/jobcandidate/JobcandidateRepoImpl.scala index df014bfa1a..9b13ee47f4 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/humanresources/jobcandidate/JobcandidateRepoImpl.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/humanresources/jobcandidate/JobcandidateRepoImpl.scala @@ -117,4 +117,19 @@ class JobcandidateRepoImpl extends JobcandidateRepo { "modifieddate" = EXCLUDED."modifieddate" returning "jobcandidateid", "businessentityid", "resume", "modifieddate"::text""".insertReturning(using JobcandidateRow.jdbcDecoder) } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, JobcandidateRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + val created = sql"create temporary table jobcandidate_TEMP (like humanresources.jobcandidate) on commit drop".execute + val copied = streamingInsert(s"""copy jobcandidate_TEMP("jobcandidateid", "businessentityid", "resume", "modifieddate") from stdin""", batchSize, unsaved)(JobcandidateRow.text) + val merged = sql"""insert into humanresources.jobcandidate("jobcandidateid", "businessentityid", "resume", "modifieddate") + select * from jobcandidate_TEMP + on conflict ("jobcandidateid") + do update set + "businessentityid" = EXCLUDED."businessentityid", + "resume" = EXCLUDED."resume", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table jobcandidate_TEMP;""".update + created *> copied *> merged + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/humanresources/jobcandidate/JobcandidateRepoMock.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/humanresources/jobcandidate/JobcandidateRepoMock.scala index a4bd10df6b..ace2940721 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/humanresources/jobcandidate/JobcandidateRepoMock.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/humanresources/jobcandidate/JobcandidateRepoMock.scala @@ -104,4 +104,13 @@ class JobcandidateRepoMock(toRow: Function1[JobcandidateRowUnsaved, Jobcandidate UpdateResult(1, Chunk.single(unsaved)) } } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, JobcandidateRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + unsaved.scanZIO(0L) { case (acc, row) => + ZIO.succeed { + map += (row.jobcandidateid -> row) + acc + 1 + } + }.runLast.map(_.getOrElse(0L)) + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/humanresources/shift/ShiftRepo.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/humanresources/shift/ShiftRepo.scala index 466ee97664..067e3a94ff 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/humanresources/shift/ShiftRepo.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/humanresources/shift/ShiftRepo.scala @@ -32,4 +32,7 @@ trait ShiftRepo { def update: UpdateBuilder[ShiftFields, ShiftRow] def update(row: ShiftRow): ZIO[ZConnection, Throwable, Boolean] def upsert(unsaved: ShiftRow): ZIO[ZConnection, Throwable, UpdateResult[ShiftRow]] + // Not implementable for zio-jdbc: upsertBatch + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, ShiftRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/humanresources/shift/ShiftRepoImpl.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/humanresources/shift/ShiftRepoImpl.scala index b96732646f..e947224528 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/humanresources/shift/ShiftRepoImpl.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/humanresources/shift/ShiftRepoImpl.scala @@ -120,4 +120,20 @@ class ShiftRepoImpl extends ShiftRepo { "modifieddate" = EXCLUDED."modifieddate" returning "shiftid", "name", "starttime"::text, "endtime"::text, "modifieddate"::text""".insertReturning(using ShiftRow.jdbcDecoder) } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, ShiftRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + val created = sql"create temporary table shift_TEMP (like humanresources.shift) on commit drop".execute + val copied = streamingInsert(s"""copy shift_TEMP("shiftid", "name", "starttime", "endtime", "modifieddate") from stdin""", batchSize, unsaved)(ShiftRow.text) + val merged = sql"""insert into humanresources.shift("shiftid", "name", "starttime", "endtime", "modifieddate") + select * from shift_TEMP + on conflict ("shiftid") + do update set + "name" = EXCLUDED."name", + "starttime" = EXCLUDED."starttime", + "endtime" = EXCLUDED."endtime", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table shift_TEMP;""".update + created *> copied *> merged + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/humanresources/shift/ShiftRepoMock.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/humanresources/shift/ShiftRepoMock.scala index 254878ca39..2bf4af7fb4 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/humanresources/shift/ShiftRepoMock.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/humanresources/shift/ShiftRepoMock.scala @@ -104,4 +104,13 @@ class ShiftRepoMock(toRow: Function1[ShiftRowUnsaved, ShiftRow], UpdateResult(1, Chunk.single(unsaved)) } } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, ShiftRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + unsaved.scanZIO(0L) { case (acc, row) => + ZIO.succeed { + map += (row.shiftid -> row) + acc + 1 + } + }.runLast.map(_.getOrElse(0L)) + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/information_schema/CardinalNumber.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/information_schema/CardinalNumber.scala new file mode 100644 index 0000000000..41146ea388 --- /dev/null +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/information_schema/CardinalNumber.scala @@ -0,0 +1,38 @@ +/** + * File has been automatically generated by `typo`. + * + * IF YOU CHANGE THIS FILE YOUR CHANGES WILL BE OVERWRITTEN. + */ +package adventureworks +package information_schema + +import java.sql.Types +import typo.dsl.Bijection +import typo.dsl.PGType +import zio.jdbc.JdbcDecoder +import zio.jdbc.JdbcEncoder +import zio.jdbc.SqlFragment.Setter +import zio.json.JsonDecoder +import zio.json.JsonEncoder + +/** Domain `information_schema.cardinal_number` + * Constraint: CHECK ((VALUE >= 0)) + */ +case class CardinalNumber(value: Int) +object CardinalNumber { + implicit lazy val arrayJdbcDecoder: JdbcDecoder[Array[CardinalNumber]] = adventureworks.IntArrayDecoder.map(_.map(CardinalNumber.apply)) + implicit lazy val arrayJdbcEncoder: JdbcEncoder[Array[CardinalNumber]] = adventureworks.IntArrayEncoder.contramap(_.map(_.value)) + implicit lazy val arraySetter: Setter[Array[CardinalNumber]] = adventureworks.IntArraySetter.contramap(_.map(_.value)) + implicit lazy val bijection: Bijection[CardinalNumber, Int] = Bijection[CardinalNumber, Int](_.value)(CardinalNumber.apply) + implicit lazy val jdbcDecoder: JdbcDecoder[CardinalNumber] = JdbcDecoder.intDecoder.map(CardinalNumber.apply) + implicit lazy val jdbcEncoder: JdbcEncoder[CardinalNumber] = JdbcEncoder.intEncoder.contramap(_.value) + implicit lazy val jsonDecoder: JsonDecoder[CardinalNumber] = JsonDecoder.int.map(CardinalNumber.apply) + implicit lazy val jsonEncoder: JsonEncoder[CardinalNumber] = JsonEncoder.int.contramap(_.value) + implicit lazy val ordering: Ordering[CardinalNumber] = Ordering.by(_.value) + implicit lazy val pgType: PGType[CardinalNumber] = PGType.instance(""""information_schema"."cardinal_number"""", Types.OTHER) + implicit lazy val setter: Setter[CardinalNumber] = Setter.intSetter.contramap(_.value) + implicit lazy val text: Text[CardinalNumber] = new Text[CardinalNumber] { + override def unsafeEncode(v: CardinalNumber, sb: StringBuilder) = Text.intInstance.unsafeEncode(v.value, sb) + override def unsafeArrayEncode(v: CardinalNumber, sb: StringBuilder) = Text.intInstance.unsafeArrayEncode(v.value, sb) + } +} \ No newline at end of file diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/information_schema/CharacterData.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/information_schema/CharacterData.scala new file mode 100644 index 0000000000..f067aa4bda --- /dev/null +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/information_schema/CharacterData.scala @@ -0,0 +1,38 @@ +/** + * File has been automatically generated by `typo`. + * + * IF YOU CHANGE THIS FILE YOUR CHANGES WILL BE OVERWRITTEN. + */ +package adventureworks +package information_schema + +import java.sql.Types +import typo.dsl.Bijection +import typo.dsl.PGType +import zio.jdbc.JdbcDecoder +import zio.jdbc.JdbcEncoder +import zio.jdbc.SqlFragment.Setter +import zio.json.JsonDecoder +import zio.json.JsonEncoder + +/** Domain `information_schema.character_data` + * No constraint + */ +case class CharacterData(value: String) +object CharacterData { + implicit lazy val arrayJdbcDecoder: JdbcDecoder[Array[CharacterData]] = adventureworks.StringArrayDecoder.map(_.map(CharacterData.apply)) + implicit lazy val arrayJdbcEncoder: JdbcEncoder[Array[CharacterData]] = adventureworks.StringArrayEncoder.contramap(_.map(_.value)) + implicit lazy val arraySetter: Setter[Array[CharacterData]] = adventureworks.StringArraySetter.contramap(_.map(_.value)) + implicit lazy val bijection: Bijection[CharacterData, String] = Bijection[CharacterData, String](_.value)(CharacterData.apply) + implicit lazy val jdbcDecoder: JdbcDecoder[CharacterData] = JdbcDecoder.stringDecoder.map(CharacterData.apply) + implicit lazy val jdbcEncoder: JdbcEncoder[CharacterData] = JdbcEncoder.stringEncoder.contramap(_.value) + implicit lazy val jsonDecoder: JsonDecoder[CharacterData] = JsonDecoder.string.map(CharacterData.apply) + implicit lazy val jsonEncoder: JsonEncoder[CharacterData] = JsonEncoder.string.contramap(_.value) + implicit lazy val ordering: Ordering[CharacterData] = Ordering.by(_.value) + implicit lazy val pgType: PGType[CharacterData] = PGType.instance(""""information_schema"."character_data"""", Types.OTHER) + implicit lazy val setter: Setter[CharacterData] = Setter.stringSetter.contramap(_.value) + implicit lazy val text: Text[CharacterData] = new Text[CharacterData] { + override def unsafeEncode(v: CharacterData, sb: StringBuilder) = Text.stringInstance.unsafeEncode(v.value, sb) + override def unsafeArrayEncode(v: CharacterData, sb: StringBuilder) = Text.stringInstance.unsafeArrayEncode(v.value, sb) + } +} \ No newline at end of file diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/information_schema/SqlIdentifier.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/information_schema/SqlIdentifier.scala new file mode 100644 index 0000000000..652adfa158 --- /dev/null +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/information_schema/SqlIdentifier.scala @@ -0,0 +1,38 @@ +/** + * File has been automatically generated by `typo`. + * + * IF YOU CHANGE THIS FILE YOUR CHANGES WILL BE OVERWRITTEN. + */ +package adventureworks +package information_schema + +import java.sql.Types +import typo.dsl.Bijection +import typo.dsl.PGType +import zio.jdbc.JdbcDecoder +import zio.jdbc.JdbcEncoder +import zio.jdbc.SqlFragment.Setter +import zio.json.JsonDecoder +import zio.json.JsonEncoder + +/** Domain `information_schema.sql_identifier` + * No constraint + */ +case class SqlIdentifier(value: String) +object SqlIdentifier { + implicit lazy val arrayJdbcDecoder: JdbcDecoder[Array[SqlIdentifier]] = adventureworks.StringArrayDecoder.map(_.map(SqlIdentifier.apply)) + implicit lazy val arrayJdbcEncoder: JdbcEncoder[Array[SqlIdentifier]] = adventureworks.StringArrayEncoder.contramap(_.map(_.value)) + implicit lazy val arraySetter: Setter[Array[SqlIdentifier]] = adventureworks.StringArraySetter.contramap(_.map(_.value)) + implicit lazy val bijection: Bijection[SqlIdentifier, String] = Bijection[SqlIdentifier, String](_.value)(SqlIdentifier.apply) + implicit lazy val jdbcDecoder: JdbcDecoder[SqlIdentifier] = JdbcDecoder.stringDecoder.map(SqlIdentifier.apply) + implicit lazy val jdbcEncoder: JdbcEncoder[SqlIdentifier] = JdbcEncoder.stringEncoder.contramap(_.value) + implicit lazy val jsonDecoder: JsonDecoder[SqlIdentifier] = JsonDecoder.string.map(SqlIdentifier.apply) + implicit lazy val jsonEncoder: JsonEncoder[SqlIdentifier] = JsonEncoder.string.contramap(_.value) + implicit lazy val ordering: Ordering[SqlIdentifier] = Ordering.by(_.value) + implicit lazy val pgType: PGType[SqlIdentifier] = PGType.instance(""""information_schema"."sql_identifier"""", Types.OTHER) + implicit lazy val setter: Setter[SqlIdentifier] = Setter.stringSetter.contramap(_.value) + implicit lazy val text: Text[SqlIdentifier] = new Text[SqlIdentifier] { + override def unsafeEncode(v: SqlIdentifier, sb: StringBuilder) = Text.stringInstance.unsafeEncode(v.value, sb) + override def unsafeArrayEncode(v: SqlIdentifier, sb: StringBuilder) = Text.stringInstance.unsafeArrayEncode(v.value, sb) + } +} \ No newline at end of file diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/information_schema/TimeStamp.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/information_schema/TimeStamp.scala new file mode 100644 index 0000000000..0a53c42520 --- /dev/null +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/information_schema/TimeStamp.scala @@ -0,0 +1,39 @@ +/** + * File has been automatically generated by `typo`. + * + * IF YOU CHANGE THIS FILE YOUR CHANGES WILL BE OVERWRITTEN. + */ +package adventureworks +package information_schema + +import adventureworks.customtypes.TypoInstant +import java.sql.Types +import typo.dsl.Bijection +import typo.dsl.PGType +import zio.jdbc.JdbcDecoder +import zio.jdbc.JdbcEncoder +import zio.jdbc.SqlFragment.Setter +import zio.json.JsonDecoder +import zio.json.JsonEncoder + +/** Domain `information_schema.time_stamp` + * No constraint + */ +case class TimeStamp(value: TypoInstant) +object TimeStamp { + implicit lazy val arrayJdbcDecoder: JdbcDecoder[Array[TimeStamp]] = JdbcDecoder[Array[TypoInstant]].map(_.map(TimeStamp.apply)) + implicit lazy val arrayJdbcEncoder: JdbcEncoder[Array[TimeStamp]] = JdbcEncoder[Array[TypoInstant]].contramap(_.map(_.value)) + implicit lazy val arraySetter: Setter[Array[TimeStamp]] = TypoInstant.arraySetter.contramap(_.map(_.value)) + implicit lazy val bijection: Bijection[TimeStamp, TypoInstant] = Bijection[TimeStamp, TypoInstant](_.value)(TimeStamp.apply) + implicit lazy val jdbcDecoder: JdbcDecoder[TimeStamp] = TypoInstant.jdbcDecoder.map(TimeStamp.apply) + implicit lazy val jdbcEncoder: JdbcEncoder[TimeStamp] = TypoInstant.jdbcEncoder.contramap(_.value) + implicit lazy val jsonDecoder: JsonDecoder[TimeStamp] = TypoInstant.jsonDecoder.map(TimeStamp.apply) + implicit lazy val jsonEncoder: JsonEncoder[TimeStamp] = TypoInstant.jsonEncoder.contramap(_.value) + implicit def ordering(implicit O0: Ordering[TypoInstant]): Ordering[TimeStamp] = Ordering.by(_.value) + implicit lazy val pgType: PGType[TimeStamp] = PGType.instance(""""information_schema"."time_stamp"""", Types.OTHER) + implicit lazy val setter: Setter[TimeStamp] = TypoInstant.setter.contramap(_.value) + implicit lazy val text: Text[TimeStamp] = new Text[TimeStamp] { + override def unsafeEncode(v: TimeStamp, sb: StringBuilder) = TypoInstant.text.unsafeEncode(v.value, sb) + override def unsafeArrayEncode(v: TimeStamp, sb: StringBuilder) = TypoInstant.text.unsafeArrayEncode(v.value, sb) + } +} \ No newline at end of file diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/information_schema/YesOrNo.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/information_schema/YesOrNo.scala new file mode 100644 index 0000000000..97578ef7a6 --- /dev/null +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/information_schema/YesOrNo.scala @@ -0,0 +1,38 @@ +/** + * File has been automatically generated by `typo`. + * + * IF YOU CHANGE THIS FILE YOUR CHANGES WILL BE OVERWRITTEN. + */ +package adventureworks +package information_schema + +import java.sql.Types +import typo.dsl.Bijection +import typo.dsl.PGType +import zio.jdbc.JdbcDecoder +import zio.jdbc.JdbcEncoder +import zio.jdbc.SqlFragment.Setter +import zio.json.JsonDecoder +import zio.json.JsonEncoder + +/** Domain `information_schema.yes_or_no` + * Constraint: CHECK (((VALUE)::text = ANY ((ARRAY['YES'::character varying, 'NO'::character varying])::text[]))) + */ +case class YesOrNo(value: String) +object YesOrNo { + implicit lazy val arrayJdbcDecoder: JdbcDecoder[Array[YesOrNo]] = adventureworks.StringArrayDecoder.map(_.map(YesOrNo.apply)) + implicit lazy val arrayJdbcEncoder: JdbcEncoder[Array[YesOrNo]] = adventureworks.StringArrayEncoder.contramap(_.map(_.value)) + implicit lazy val arraySetter: Setter[Array[YesOrNo]] = adventureworks.StringArraySetter.contramap(_.map(_.value)) + implicit lazy val bijection: Bijection[YesOrNo, String] = Bijection[YesOrNo, String](_.value)(YesOrNo.apply) + implicit lazy val jdbcDecoder: JdbcDecoder[YesOrNo] = JdbcDecoder.stringDecoder.map(YesOrNo.apply) + implicit lazy val jdbcEncoder: JdbcEncoder[YesOrNo] = JdbcEncoder.stringEncoder.contramap(_.value) + implicit lazy val jsonDecoder: JsonDecoder[YesOrNo] = JsonDecoder.string.map(YesOrNo.apply) + implicit lazy val jsonEncoder: JsonEncoder[YesOrNo] = JsonEncoder.string.contramap(_.value) + implicit lazy val ordering: Ordering[YesOrNo] = Ordering.by(_.value) + implicit lazy val pgType: PGType[YesOrNo] = PGType.instance(""""information_schema"."yes_or_no"""", Types.OTHER) + implicit lazy val setter: Setter[YesOrNo] = Setter.stringSetter.contramap(_.value) + implicit lazy val text: Text[YesOrNo] = new Text[YesOrNo] { + override def unsafeEncode(v: YesOrNo, sb: StringBuilder) = Text.stringInstance.unsafeEncode(v.value, sb) + override def unsafeArrayEncode(v: YesOrNo, sb: StringBuilder) = Text.stringInstance.unsafeArrayEncode(v.value, sb) + } +} \ No newline at end of file diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/address/AddressRepo.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/address/AddressRepo.scala index 21e9897060..1a973d39bf 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/address/AddressRepo.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/address/AddressRepo.scala @@ -32,4 +32,7 @@ trait AddressRepo { def update: UpdateBuilder[AddressFields, AddressRow] def update(row: AddressRow): ZIO[ZConnection, Throwable, Boolean] def upsert(unsaved: AddressRow): ZIO[ZConnection, Throwable, UpdateResult[AddressRow]] + // Not implementable for zio-jdbc: upsertBatch + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, AddressRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/address/AddressRepoImpl.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/address/AddressRepoImpl.scala index 100b2fa2a2..e7db77f62d 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/address/AddressRepoImpl.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/address/AddressRepoImpl.scala @@ -141,4 +141,24 @@ class AddressRepoImpl extends AddressRepo { "modifieddate" = EXCLUDED."modifieddate" returning "addressid", "addressline1", "addressline2", "city", "stateprovinceid", "postalcode", "spatiallocation", "rowguid", "modifieddate"::text""".insertReturning(using AddressRow.jdbcDecoder) } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, AddressRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + val created = sql"create temporary table address_TEMP (like person.address) on commit drop".execute + val copied = streamingInsert(s"""copy address_TEMP("addressid", "addressline1", "addressline2", "city", "stateprovinceid", "postalcode", "spatiallocation", "rowguid", "modifieddate") from stdin""", batchSize, unsaved)(AddressRow.text) + val merged = sql"""insert into person.address("addressid", "addressline1", "addressline2", "city", "stateprovinceid", "postalcode", "spatiallocation", "rowguid", "modifieddate") + select * from address_TEMP + on conflict ("addressid") + do update set + "addressline1" = EXCLUDED."addressline1", + "addressline2" = EXCLUDED."addressline2", + "city" = EXCLUDED."city", + "stateprovinceid" = EXCLUDED."stateprovinceid", + "postalcode" = EXCLUDED."postalcode", + "spatiallocation" = EXCLUDED."spatiallocation", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table address_TEMP;""".update + created *> copied *> merged + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/address/AddressRepoMock.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/address/AddressRepoMock.scala index ea74ea3f2f..f1306654ec 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/address/AddressRepoMock.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/address/AddressRepoMock.scala @@ -104,4 +104,13 @@ class AddressRepoMock(toRow: Function1[AddressRowUnsaved, AddressRow], UpdateResult(1, Chunk.single(unsaved)) } } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, AddressRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + unsaved.scanZIO(0L) { case (acc, row) => + ZIO.succeed { + map += (row.addressid -> row) + acc + 1 + } + }.runLast.map(_.getOrElse(0L)) + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/addresstype/AddresstypeRepo.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/addresstype/AddresstypeRepo.scala index 970a1ee967..a3f10a481c 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/addresstype/AddresstypeRepo.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/addresstype/AddresstypeRepo.scala @@ -32,4 +32,7 @@ trait AddresstypeRepo { def update: UpdateBuilder[AddresstypeFields, AddresstypeRow] def update(row: AddresstypeRow): ZIO[ZConnection, Throwable, Boolean] def upsert(unsaved: AddresstypeRow): ZIO[ZConnection, Throwable, UpdateResult[AddresstypeRow]] + // Not implementable for zio-jdbc: upsertBatch + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, AddresstypeRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/addresstype/AddresstypeRepoImpl.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/addresstype/AddresstypeRepoImpl.scala index 6be1ed7a79..bf63e3d7a8 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/addresstype/AddresstypeRepoImpl.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/addresstype/AddresstypeRepoImpl.scala @@ -119,4 +119,19 @@ class AddresstypeRepoImpl extends AddresstypeRepo { "modifieddate" = EXCLUDED."modifieddate" returning "addresstypeid", "name", "rowguid", "modifieddate"::text""".insertReturning(using AddresstypeRow.jdbcDecoder) } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, AddresstypeRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + val created = sql"create temporary table addresstype_TEMP (like person.addresstype) on commit drop".execute + val copied = streamingInsert(s"""copy addresstype_TEMP("addresstypeid", "name", "rowguid", "modifieddate") from stdin""", batchSize, unsaved)(AddresstypeRow.text) + val merged = sql"""insert into person.addresstype("addresstypeid", "name", "rowguid", "modifieddate") + select * from addresstype_TEMP + on conflict ("addresstypeid") + do update set + "name" = EXCLUDED."name", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table addresstype_TEMP;""".update + created *> copied *> merged + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/addresstype/AddresstypeRepoMock.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/addresstype/AddresstypeRepoMock.scala index 3d136ee015..2e98a7a641 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/addresstype/AddresstypeRepoMock.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/addresstype/AddresstypeRepoMock.scala @@ -104,4 +104,13 @@ class AddresstypeRepoMock(toRow: Function1[AddresstypeRowUnsaved, AddresstypeRow UpdateResult(1, Chunk.single(unsaved)) } } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, AddresstypeRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + unsaved.scanZIO(0L) { case (acc, row) => + ZIO.succeed { + map += (row.addresstypeid -> row) + acc + 1 + } + }.runLast.map(_.getOrElse(0L)) + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/businessentity/BusinessentityRepo.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/businessentity/BusinessentityRepo.scala index 8d5723e576..39fec6772d 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/businessentity/BusinessentityRepo.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/businessentity/BusinessentityRepo.scala @@ -32,4 +32,7 @@ trait BusinessentityRepo { def update: UpdateBuilder[BusinessentityFields, BusinessentityRow] def update(row: BusinessentityRow): ZIO[ZConnection, Throwable, Boolean] def upsert(unsaved: BusinessentityRow): ZIO[ZConnection, Throwable, UpdateResult[BusinessentityRow]] + // Not implementable for zio-jdbc: upsertBatch + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, BusinessentityRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/businessentity/BusinessentityRepoImpl.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/businessentity/BusinessentityRepoImpl.scala index a909211f57..d4dba791ac 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/businessentity/BusinessentityRepoImpl.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/businessentity/BusinessentityRepoImpl.scala @@ -114,4 +114,18 @@ class BusinessentityRepoImpl extends BusinessentityRepo { "modifieddate" = EXCLUDED."modifieddate" returning "businessentityid", "rowguid", "modifieddate"::text""".insertReturning(using BusinessentityRow.jdbcDecoder) } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, BusinessentityRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + val created = sql"create temporary table businessentity_TEMP (like person.businessentity) on commit drop".execute + val copied = streamingInsert(s"""copy businessentity_TEMP("businessentityid", "rowguid", "modifieddate") from stdin""", batchSize, unsaved)(BusinessentityRow.text) + val merged = sql"""insert into person.businessentity("businessentityid", "rowguid", "modifieddate") + select * from businessentity_TEMP + on conflict ("businessentityid") + do update set + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table businessentity_TEMP;""".update + created *> copied *> merged + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/businessentity/BusinessentityRepoMock.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/businessentity/BusinessentityRepoMock.scala index a67ee9642f..d9d4f70ca8 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/businessentity/BusinessentityRepoMock.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/businessentity/BusinessentityRepoMock.scala @@ -104,4 +104,13 @@ class BusinessentityRepoMock(toRow: Function1[BusinessentityRowUnsaved, Business UpdateResult(1, Chunk.single(unsaved)) } } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, BusinessentityRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + unsaved.scanZIO(0L) { case (acc, row) => + ZIO.succeed { + map += (row.businessentityid -> row) + acc + 1 + } + }.runLast.map(_.getOrElse(0L)) + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/businessentityaddress/BusinessentityaddressRepo.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/businessentityaddress/BusinessentityaddressRepo.scala index de1c702ff3..60d62dbf15 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/businessentityaddress/BusinessentityaddressRepo.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/businessentityaddress/BusinessentityaddressRepo.scala @@ -32,4 +32,7 @@ trait BusinessentityaddressRepo { def update: UpdateBuilder[BusinessentityaddressFields, BusinessentityaddressRow] def update(row: BusinessentityaddressRow): ZIO[ZConnection, Throwable, Boolean] def upsert(unsaved: BusinessentityaddressRow): ZIO[ZConnection, Throwable, UpdateResult[BusinessentityaddressRow]] + // Not implementable for zio-jdbc: upsertBatch + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, BusinessentityaddressRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/businessentityaddress/BusinessentityaddressRepoImpl.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/businessentityaddress/BusinessentityaddressRepoImpl.scala index 9d54f614f0..b4dd567bda 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/businessentityaddress/BusinessentityaddressRepoImpl.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/businessentityaddress/BusinessentityaddressRepoImpl.scala @@ -134,4 +134,18 @@ class BusinessentityaddressRepoImpl extends BusinessentityaddressRepo { "modifieddate" = EXCLUDED."modifieddate" returning "businessentityid", "addressid", "addresstypeid", "rowguid", "modifieddate"::text""".insertReturning(using BusinessentityaddressRow.jdbcDecoder) } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, BusinessentityaddressRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + val created = sql"create temporary table businessentityaddress_TEMP (like person.businessentityaddress) on commit drop".execute + val copied = streamingInsert(s"""copy businessentityaddress_TEMP("businessentityid", "addressid", "addresstypeid", "rowguid", "modifieddate") from stdin""", batchSize, unsaved)(BusinessentityaddressRow.text) + val merged = sql"""insert into person.businessentityaddress("businessentityid", "addressid", "addresstypeid", "rowguid", "modifieddate") + select * from businessentityaddress_TEMP + on conflict ("businessentityid", "addressid", "addresstypeid") + do update set + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table businessentityaddress_TEMP;""".update + created *> copied *> merged + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/businessentityaddress/BusinessentityaddressRepoMock.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/businessentityaddress/BusinessentityaddressRepoMock.scala index 63abb1c795..05fef776cd 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/businessentityaddress/BusinessentityaddressRepoMock.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/businessentityaddress/BusinessentityaddressRepoMock.scala @@ -104,4 +104,13 @@ class BusinessentityaddressRepoMock(toRow: Function1[BusinessentityaddressRowUns UpdateResult(1, Chunk.single(unsaved)) } } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, BusinessentityaddressRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + unsaved.scanZIO(0L) { case (acc, row) => + ZIO.succeed { + map += (row.compositeId -> row) + acc + 1 + } + }.runLast.map(_.getOrElse(0L)) + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/businessentitycontact/BusinessentitycontactRepo.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/businessentitycontact/BusinessentitycontactRepo.scala index c4cef12533..b0c2013a92 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/businessentitycontact/BusinessentitycontactRepo.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/businessentitycontact/BusinessentitycontactRepo.scala @@ -32,4 +32,7 @@ trait BusinessentitycontactRepo { def update: UpdateBuilder[BusinessentitycontactFields, BusinessentitycontactRow] def update(row: BusinessentitycontactRow): ZIO[ZConnection, Throwable, Boolean] def upsert(unsaved: BusinessentitycontactRow): ZIO[ZConnection, Throwable, UpdateResult[BusinessentitycontactRow]] + // Not implementable for zio-jdbc: upsertBatch + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, BusinessentitycontactRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/businessentitycontact/BusinessentitycontactRepoImpl.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/businessentitycontact/BusinessentitycontactRepoImpl.scala index e74810ffea..a14c7a4076 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/businessentitycontact/BusinessentitycontactRepoImpl.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/businessentitycontact/BusinessentitycontactRepoImpl.scala @@ -133,4 +133,18 @@ class BusinessentitycontactRepoImpl extends BusinessentitycontactRepo { "modifieddate" = EXCLUDED."modifieddate" returning "businessentityid", "personid", "contacttypeid", "rowguid", "modifieddate"::text""".insertReturning(using BusinessentitycontactRow.jdbcDecoder) } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, BusinessentitycontactRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + val created = sql"create temporary table businessentitycontact_TEMP (like person.businessentitycontact) on commit drop".execute + val copied = streamingInsert(s"""copy businessentitycontact_TEMP("businessentityid", "personid", "contacttypeid", "rowguid", "modifieddate") from stdin""", batchSize, unsaved)(BusinessentitycontactRow.text) + val merged = sql"""insert into person.businessentitycontact("businessentityid", "personid", "contacttypeid", "rowguid", "modifieddate") + select * from businessentitycontact_TEMP + on conflict ("businessentityid", "personid", "contacttypeid") + do update set + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table businessentitycontact_TEMP;""".update + created *> copied *> merged + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/businessentitycontact/BusinessentitycontactRepoMock.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/businessentitycontact/BusinessentitycontactRepoMock.scala index cee9eed9ad..f22f3b46e3 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/businessentitycontact/BusinessentitycontactRepoMock.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/businessentitycontact/BusinessentitycontactRepoMock.scala @@ -104,4 +104,13 @@ class BusinessentitycontactRepoMock(toRow: Function1[BusinessentitycontactRowUns UpdateResult(1, Chunk.single(unsaved)) } } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, BusinessentitycontactRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + unsaved.scanZIO(0L) { case (acc, row) => + ZIO.succeed { + map += (row.compositeId -> row) + acc + 1 + } + }.runLast.map(_.getOrElse(0L)) + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/contacttype/ContacttypeRepo.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/contacttype/ContacttypeRepo.scala index 6cefc84874..fc4ea7a35a 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/contacttype/ContacttypeRepo.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/contacttype/ContacttypeRepo.scala @@ -32,4 +32,7 @@ trait ContacttypeRepo { def update: UpdateBuilder[ContacttypeFields, ContacttypeRow] def update(row: ContacttypeRow): ZIO[ZConnection, Throwable, Boolean] def upsert(unsaved: ContacttypeRow): ZIO[ZConnection, Throwable, UpdateResult[ContacttypeRow]] + // Not implementable for zio-jdbc: upsertBatch + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, ContacttypeRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/contacttype/ContacttypeRepoImpl.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/contacttype/ContacttypeRepoImpl.scala index a8d9879bc5..fa062b1313 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/contacttype/ContacttypeRepoImpl.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/contacttype/ContacttypeRepoImpl.scala @@ -111,4 +111,18 @@ class ContacttypeRepoImpl extends ContacttypeRepo { "modifieddate" = EXCLUDED."modifieddate" returning "contacttypeid", "name", "modifieddate"::text""".insertReturning(using ContacttypeRow.jdbcDecoder) } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, ContacttypeRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + val created = sql"create temporary table contacttype_TEMP (like person.contacttype) on commit drop".execute + val copied = streamingInsert(s"""copy contacttype_TEMP("contacttypeid", "name", "modifieddate") from stdin""", batchSize, unsaved)(ContacttypeRow.text) + val merged = sql"""insert into person.contacttype("contacttypeid", "name", "modifieddate") + select * from contacttype_TEMP + on conflict ("contacttypeid") + do update set + "name" = EXCLUDED."name", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table contacttype_TEMP;""".update + created *> copied *> merged + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/contacttype/ContacttypeRepoMock.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/contacttype/ContacttypeRepoMock.scala index f440033b12..5c02e6a97f 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/contacttype/ContacttypeRepoMock.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/contacttype/ContacttypeRepoMock.scala @@ -104,4 +104,13 @@ class ContacttypeRepoMock(toRow: Function1[ContacttypeRowUnsaved, ContacttypeRow UpdateResult(1, Chunk.single(unsaved)) } } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, ContacttypeRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + unsaved.scanZIO(0L) { case (acc, row) => + ZIO.succeed { + map += (row.contacttypeid -> row) + acc + 1 + } + }.runLast.map(_.getOrElse(0L)) + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/countryregion/CountryregionRepo.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/countryregion/CountryregionRepo.scala index ba9ccc582d..ad3bb8eb31 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/countryregion/CountryregionRepo.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/countryregion/CountryregionRepo.scala @@ -32,4 +32,7 @@ trait CountryregionRepo { def update: UpdateBuilder[CountryregionFields, CountryregionRow] def update(row: CountryregionRow): ZIO[ZConnection, Throwable, Boolean] def upsert(unsaved: CountryregionRow): ZIO[ZConnection, Throwable, UpdateResult[CountryregionRow]] + // Not implementable for zio-jdbc: upsertBatch + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, CountryregionRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/countryregion/CountryregionRepoImpl.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/countryregion/CountryregionRepoImpl.scala index 81f25e3a94..d03ac9a2e8 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/countryregion/CountryregionRepoImpl.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/countryregion/CountryregionRepoImpl.scala @@ -108,4 +108,18 @@ class CountryregionRepoImpl extends CountryregionRepo { "modifieddate" = EXCLUDED."modifieddate" returning "countryregioncode", "name", "modifieddate"::text""".insertReturning(using CountryregionRow.jdbcDecoder) } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, CountryregionRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + val created = sql"create temporary table countryregion_TEMP (like person.countryregion) on commit drop".execute + val copied = streamingInsert(s"""copy countryregion_TEMP("countryregioncode", "name", "modifieddate") from stdin""", batchSize, unsaved)(CountryregionRow.text) + val merged = sql"""insert into person.countryregion("countryregioncode", "name", "modifieddate") + select * from countryregion_TEMP + on conflict ("countryregioncode") + do update set + "name" = EXCLUDED."name", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table countryregion_TEMP;""".update + created *> copied *> merged + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/countryregion/CountryregionRepoMock.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/countryregion/CountryregionRepoMock.scala index cdc5ee6f96..bce0c417c5 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/countryregion/CountryregionRepoMock.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/countryregion/CountryregionRepoMock.scala @@ -104,4 +104,13 @@ class CountryregionRepoMock(toRow: Function1[CountryregionRowUnsaved, Countryreg UpdateResult(1, Chunk.single(unsaved)) } } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, CountryregionRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + unsaved.scanZIO(0L) { case (acc, row) => + ZIO.succeed { + map += (row.countryregioncode -> row) + acc + 1 + } + }.runLast.map(_.getOrElse(0L)) + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/emailaddress/EmailaddressRepo.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/emailaddress/EmailaddressRepo.scala index f25412f969..e6b7b53ede 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/emailaddress/EmailaddressRepo.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/emailaddress/EmailaddressRepo.scala @@ -32,4 +32,7 @@ trait EmailaddressRepo { def update: UpdateBuilder[EmailaddressFields, EmailaddressRow] def update(row: EmailaddressRow): ZIO[ZConnection, Throwable, Boolean] def upsert(unsaved: EmailaddressRow): ZIO[ZConnection, Throwable, UpdateResult[EmailaddressRow]] + // Not implementable for zio-jdbc: upsertBatch + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, EmailaddressRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/emailaddress/EmailaddressRepoImpl.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/emailaddress/EmailaddressRepoImpl.scala index 7bf819ab3e..d06be6ccc5 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/emailaddress/EmailaddressRepoImpl.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/emailaddress/EmailaddressRepoImpl.scala @@ -136,4 +136,19 @@ class EmailaddressRepoImpl extends EmailaddressRepo { "modifieddate" = EXCLUDED."modifieddate" returning "businessentityid", "emailaddressid", "emailaddress", "rowguid", "modifieddate"::text""".insertReturning(using EmailaddressRow.jdbcDecoder) } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, EmailaddressRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + val created = sql"create temporary table emailaddress_TEMP (like person.emailaddress) on commit drop".execute + val copied = streamingInsert(s"""copy emailaddress_TEMP("businessentityid", "emailaddressid", "emailaddress", "rowguid", "modifieddate") from stdin""", batchSize, unsaved)(EmailaddressRow.text) + val merged = sql"""insert into person.emailaddress("businessentityid", "emailaddressid", "emailaddress", "rowguid", "modifieddate") + select * from emailaddress_TEMP + on conflict ("businessentityid", "emailaddressid") + do update set + "emailaddress" = EXCLUDED."emailaddress", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table emailaddress_TEMP;""".update + created *> copied *> merged + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/emailaddress/EmailaddressRepoMock.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/emailaddress/EmailaddressRepoMock.scala index 41445281d7..4b3f9bc475 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/emailaddress/EmailaddressRepoMock.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/emailaddress/EmailaddressRepoMock.scala @@ -104,4 +104,13 @@ class EmailaddressRepoMock(toRow: Function1[EmailaddressRowUnsaved, Emailaddress UpdateResult(1, Chunk.single(unsaved)) } } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, EmailaddressRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + unsaved.scanZIO(0L) { case (acc, row) => + ZIO.succeed { + map += (row.compositeId -> row) + acc + 1 + } + }.runLast.map(_.getOrElse(0L)) + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/password/PasswordRepo.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/password/PasswordRepo.scala index 447152fff6..e65fa06905 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/password/PasswordRepo.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/password/PasswordRepo.scala @@ -33,4 +33,7 @@ trait PasswordRepo { def update: UpdateBuilder[PasswordFields, PasswordRow] def update(row: PasswordRow): ZIO[ZConnection, Throwable, Boolean] def upsert(unsaved: PasswordRow): ZIO[ZConnection, Throwable, UpdateResult[PasswordRow]] + // Not implementable for zio-jdbc: upsertBatch + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, PasswordRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/password/PasswordRepoImpl.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/password/PasswordRepoImpl.scala index aaeaf9d06a..f9acc195d6 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/password/PasswordRepoImpl.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/password/PasswordRepoImpl.scala @@ -121,4 +121,20 @@ class PasswordRepoImpl extends PasswordRepo { "modifieddate" = EXCLUDED."modifieddate" returning "businessentityid", "passwordhash", "passwordsalt", "rowguid", "modifieddate"::text""".insertReturning(using PasswordRow.jdbcDecoder) } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, PasswordRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + val created = sql"create temporary table password_TEMP (like person.password) on commit drop".execute + val copied = streamingInsert(s"""copy password_TEMP("businessentityid", "passwordhash", "passwordsalt", "rowguid", "modifieddate") from stdin""", batchSize, unsaved)(PasswordRow.text) + val merged = sql"""insert into person.password("businessentityid", "passwordhash", "passwordsalt", "rowguid", "modifieddate") + select * from password_TEMP + on conflict ("businessentityid") + do update set + "passwordhash" = EXCLUDED."passwordhash", + "passwordsalt" = EXCLUDED."passwordsalt", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table password_TEMP;""".update + created *> copied *> merged + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/password/PasswordRepoMock.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/password/PasswordRepoMock.scala index 46915294ca..a6ba35fd53 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/password/PasswordRepoMock.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/password/PasswordRepoMock.scala @@ -105,4 +105,13 @@ class PasswordRepoMock(toRow: Function1[PasswordRowUnsaved, PasswordRow], UpdateResult(1, Chunk.single(unsaved)) } } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, PasswordRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + unsaved.scanZIO(0L) { case (acc, row) => + ZIO.succeed { + map += (row.businessentityid -> row) + acc + 1 + } + }.runLast.map(_.getOrElse(0L)) + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/person/PersonRepo.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/person/PersonRepo.scala index e431bc1f12..5d880115af 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/person/PersonRepo.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/person/PersonRepo.scala @@ -33,4 +33,7 @@ trait PersonRepo { def update: UpdateBuilder[PersonFields, PersonRow] def update(row: PersonRow): ZIO[ZConnection, Throwable, Boolean] def upsert(unsaved: PersonRow): ZIO[ZConnection, Throwable, UpdateResult[PersonRow]] + // Not implementable for zio-jdbc: upsertBatch + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, PersonRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/person/PersonRepoImpl.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/person/PersonRepoImpl.scala index 1b594c84d9..7f73c6e2ec 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/person/PersonRepoImpl.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/person/PersonRepoImpl.scala @@ -163,4 +163,28 @@ class PersonRepoImpl extends PersonRepo { "modifieddate" = EXCLUDED."modifieddate" returning "businessentityid", "persontype", "namestyle", "title", "firstname", "middlename", "lastname", "suffix", "emailpromotion", "additionalcontactinfo", "demographics", "rowguid", "modifieddate"::text""".insertReturning(using PersonRow.jdbcDecoder) } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, PersonRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + val created = sql"create temporary table person_TEMP (like person.person) on commit drop".execute + val copied = streamingInsert(s"""copy person_TEMP("businessentityid", "persontype", "namestyle", "title", "firstname", "middlename", "lastname", "suffix", "emailpromotion", "additionalcontactinfo", "demographics", "rowguid", "modifieddate") from stdin""", batchSize, unsaved)(PersonRow.text) + val merged = sql"""insert into person.person("businessentityid", "persontype", "namestyle", "title", "firstname", "middlename", "lastname", "suffix", "emailpromotion", "additionalcontactinfo", "demographics", "rowguid", "modifieddate") + select * from person_TEMP + on conflict ("businessentityid") + do update set + "persontype" = EXCLUDED."persontype", + "namestyle" = EXCLUDED."namestyle", + "title" = EXCLUDED."title", + "firstname" = EXCLUDED."firstname", + "middlename" = EXCLUDED."middlename", + "lastname" = EXCLUDED."lastname", + "suffix" = EXCLUDED."suffix", + "emailpromotion" = EXCLUDED."emailpromotion", + "additionalcontactinfo" = EXCLUDED."additionalcontactinfo", + "demographics" = EXCLUDED."demographics", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table person_TEMP;""".update + created *> copied *> merged + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/person/PersonRepoMock.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/person/PersonRepoMock.scala index 5a3c9cec23..2eeb49c51b 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/person/PersonRepoMock.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/person/PersonRepoMock.scala @@ -105,4 +105,13 @@ class PersonRepoMock(toRow: Function1[PersonRowUnsaved, PersonRow], UpdateResult(1, Chunk.single(unsaved)) } } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, PersonRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + unsaved.scanZIO(0L) { case (acc, row) => + ZIO.succeed { + map += (row.businessentityid -> row) + acc + 1 + } + }.runLast.map(_.getOrElse(0L)) + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/personphone/PersonphoneRepo.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/personphone/PersonphoneRepo.scala index bc04cd72a3..650a141fdb 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/personphone/PersonphoneRepo.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/personphone/PersonphoneRepo.scala @@ -32,4 +32,7 @@ trait PersonphoneRepo { def update: UpdateBuilder[PersonphoneFields, PersonphoneRow] def update(row: PersonphoneRow): ZIO[ZConnection, Throwable, Boolean] def upsert(unsaved: PersonphoneRow): ZIO[ZConnection, Throwable, UpdateResult[PersonphoneRow]] + // Not implementable for zio-jdbc: upsertBatch + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, PersonphoneRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/personphone/PersonphoneRepoImpl.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/personphone/PersonphoneRepoImpl.scala index 8abf56aada..827112f157 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/personphone/PersonphoneRepoImpl.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/personphone/PersonphoneRepoImpl.scala @@ -126,4 +126,17 @@ class PersonphoneRepoImpl extends PersonphoneRepo { "modifieddate" = EXCLUDED."modifieddate" returning "businessentityid", "phonenumber", "phonenumbertypeid", "modifieddate"::text""".insertReturning(using PersonphoneRow.jdbcDecoder) } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, PersonphoneRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + val created = sql"create temporary table personphone_TEMP (like person.personphone) on commit drop".execute + val copied = streamingInsert(s"""copy personphone_TEMP("businessentityid", "phonenumber", "phonenumbertypeid", "modifieddate") from stdin""", batchSize, unsaved)(PersonphoneRow.text) + val merged = sql"""insert into person.personphone("businessentityid", "phonenumber", "phonenumbertypeid", "modifieddate") + select * from personphone_TEMP + on conflict ("businessentityid", "phonenumber", "phonenumbertypeid") + do update set + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table personphone_TEMP;""".update + created *> copied *> merged + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/personphone/PersonphoneRepoMock.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/personphone/PersonphoneRepoMock.scala index fccc49d199..17c15b430e 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/personphone/PersonphoneRepoMock.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/personphone/PersonphoneRepoMock.scala @@ -104,4 +104,13 @@ class PersonphoneRepoMock(toRow: Function1[PersonphoneRowUnsaved, PersonphoneRow UpdateResult(1, Chunk.single(unsaved)) } } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, PersonphoneRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + unsaved.scanZIO(0L) { case (acc, row) => + ZIO.succeed { + map += (row.compositeId -> row) + acc + 1 + } + }.runLast.map(_.getOrElse(0L)) + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/phonenumbertype/PhonenumbertypeRepo.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/phonenumbertype/PhonenumbertypeRepo.scala index 426ed087e7..2710eb9a93 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/phonenumbertype/PhonenumbertypeRepo.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/phonenumbertype/PhonenumbertypeRepo.scala @@ -32,4 +32,7 @@ trait PhonenumbertypeRepo { def update: UpdateBuilder[PhonenumbertypeFields, PhonenumbertypeRow] def update(row: PhonenumbertypeRow): ZIO[ZConnection, Throwable, Boolean] def upsert(unsaved: PhonenumbertypeRow): ZIO[ZConnection, Throwable, UpdateResult[PhonenumbertypeRow]] + // Not implementable for zio-jdbc: upsertBatch + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, PhonenumbertypeRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/phonenumbertype/PhonenumbertypeRepoImpl.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/phonenumbertype/PhonenumbertypeRepoImpl.scala index 63efd0a14d..e6aa4827cb 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/phonenumbertype/PhonenumbertypeRepoImpl.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/phonenumbertype/PhonenumbertypeRepoImpl.scala @@ -111,4 +111,18 @@ class PhonenumbertypeRepoImpl extends PhonenumbertypeRepo { "modifieddate" = EXCLUDED."modifieddate" returning "phonenumbertypeid", "name", "modifieddate"::text""".insertReturning(using PhonenumbertypeRow.jdbcDecoder) } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, PhonenumbertypeRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + val created = sql"create temporary table phonenumbertype_TEMP (like person.phonenumbertype) on commit drop".execute + val copied = streamingInsert(s"""copy phonenumbertype_TEMP("phonenumbertypeid", "name", "modifieddate") from stdin""", batchSize, unsaved)(PhonenumbertypeRow.text) + val merged = sql"""insert into person.phonenumbertype("phonenumbertypeid", "name", "modifieddate") + select * from phonenumbertype_TEMP + on conflict ("phonenumbertypeid") + do update set + "name" = EXCLUDED."name", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table phonenumbertype_TEMP;""".update + created *> copied *> merged + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/phonenumbertype/PhonenumbertypeRepoMock.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/phonenumbertype/PhonenumbertypeRepoMock.scala index dbe7dc7f10..2b95303c21 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/phonenumbertype/PhonenumbertypeRepoMock.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/phonenumbertype/PhonenumbertypeRepoMock.scala @@ -104,4 +104,13 @@ class PhonenumbertypeRepoMock(toRow: Function1[PhonenumbertypeRowUnsaved, Phonen UpdateResult(1, Chunk.single(unsaved)) } } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, PhonenumbertypeRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + unsaved.scanZIO(0L) { case (acc, row) => + ZIO.succeed { + map += (row.phonenumbertypeid -> row) + acc + 1 + } + }.runLast.map(_.getOrElse(0L)) + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/stateprovince/StateprovinceRepo.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/stateprovince/StateprovinceRepo.scala index 5e3a26c693..02ef257566 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/stateprovince/StateprovinceRepo.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/stateprovince/StateprovinceRepo.scala @@ -32,4 +32,7 @@ trait StateprovinceRepo { def update: UpdateBuilder[StateprovinceFields, StateprovinceRow] def update(row: StateprovinceRow): ZIO[ZConnection, Throwable, Boolean] def upsert(unsaved: StateprovinceRow): ZIO[ZConnection, Throwable, UpdateResult[StateprovinceRow]] + // Not implementable for zio-jdbc: upsertBatch + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, StateprovinceRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/stateprovince/StateprovinceRepoImpl.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/stateprovince/StateprovinceRepoImpl.scala index fc885ba9ab..0521b16016 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/stateprovince/StateprovinceRepoImpl.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/stateprovince/StateprovinceRepoImpl.scala @@ -142,4 +142,23 @@ class StateprovinceRepoImpl extends StateprovinceRepo { "modifieddate" = EXCLUDED."modifieddate" returning "stateprovinceid", "stateprovincecode", "countryregioncode", "isonlystateprovinceflag", "name", "territoryid", "rowguid", "modifieddate"::text""".insertReturning(using StateprovinceRow.jdbcDecoder) } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, StateprovinceRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + val created = sql"create temporary table stateprovince_TEMP (like person.stateprovince) on commit drop".execute + val copied = streamingInsert(s"""copy stateprovince_TEMP("stateprovinceid", "stateprovincecode", "countryregioncode", "isonlystateprovinceflag", "name", "territoryid", "rowguid", "modifieddate") from stdin""", batchSize, unsaved)(StateprovinceRow.text) + val merged = sql"""insert into person.stateprovince("stateprovinceid", "stateprovincecode", "countryregioncode", "isonlystateprovinceflag", "name", "territoryid", "rowguid", "modifieddate") + select * from stateprovince_TEMP + on conflict ("stateprovinceid") + do update set + "stateprovincecode" = EXCLUDED."stateprovincecode", + "countryregioncode" = EXCLUDED."countryregioncode", + "isonlystateprovinceflag" = EXCLUDED."isonlystateprovinceflag", + "name" = EXCLUDED."name", + "territoryid" = EXCLUDED."territoryid", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table stateprovince_TEMP;""".update + created *> copied *> merged + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/stateprovince/StateprovinceRepoMock.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/stateprovince/StateprovinceRepoMock.scala index e7d43ae06a..18d3050e9c 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/stateprovince/StateprovinceRepoMock.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/person/stateprovince/StateprovinceRepoMock.scala @@ -104,4 +104,13 @@ class StateprovinceRepoMock(toRow: Function1[StateprovinceRowUnsaved, Stateprovi UpdateResult(1, Chunk.single(unsaved)) } } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, StateprovinceRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + unsaved.scanZIO(0L) { case (acc, row) => + ZIO.succeed { + map += (row.stateprovinceid -> row) + acc + 1 + } + }.runLast.map(_.getOrElse(0L)) + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/billofmaterials/BillofmaterialsRepo.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/billofmaterials/BillofmaterialsRepo.scala index 312892a6e3..e94ddcbb50 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/billofmaterials/BillofmaterialsRepo.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/billofmaterials/BillofmaterialsRepo.scala @@ -32,4 +32,7 @@ trait BillofmaterialsRepo { def update: UpdateBuilder[BillofmaterialsFields, BillofmaterialsRow] def update(row: BillofmaterialsRow): ZIO[ZConnection, Throwable, Boolean] def upsert(unsaved: BillofmaterialsRow): ZIO[ZConnection, Throwable, UpdateResult[BillofmaterialsRow]] + // Not implementable for zio-jdbc: upsertBatch + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, BillofmaterialsRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/billofmaterials/BillofmaterialsRepoImpl.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/billofmaterials/BillofmaterialsRepoImpl.scala index fd1c7f2c09..cb38914d88 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/billofmaterials/BillofmaterialsRepoImpl.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/billofmaterials/BillofmaterialsRepoImpl.scala @@ -144,4 +144,24 @@ class BillofmaterialsRepoImpl extends BillofmaterialsRepo { "modifieddate" = EXCLUDED."modifieddate" returning "billofmaterialsid", "productassemblyid", "componentid", "startdate"::text, "enddate"::text, "unitmeasurecode", "bomlevel", "perassemblyqty", "modifieddate"::text""".insertReturning(using BillofmaterialsRow.jdbcDecoder) } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, BillofmaterialsRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + val created = sql"create temporary table billofmaterials_TEMP (like production.billofmaterials) on commit drop".execute + val copied = streamingInsert(s"""copy billofmaterials_TEMP("billofmaterialsid", "productassemblyid", "componentid", "startdate", "enddate", "unitmeasurecode", "bomlevel", "perassemblyqty", "modifieddate") from stdin""", batchSize, unsaved)(BillofmaterialsRow.text) + val merged = sql"""insert into production.billofmaterials("billofmaterialsid", "productassemblyid", "componentid", "startdate", "enddate", "unitmeasurecode", "bomlevel", "perassemblyqty", "modifieddate") + select * from billofmaterials_TEMP + on conflict ("billofmaterialsid") + do update set + "productassemblyid" = EXCLUDED."productassemblyid", + "componentid" = EXCLUDED."componentid", + "startdate" = EXCLUDED."startdate", + "enddate" = EXCLUDED."enddate", + "unitmeasurecode" = EXCLUDED."unitmeasurecode", + "bomlevel" = EXCLUDED."bomlevel", + "perassemblyqty" = EXCLUDED."perassemblyqty", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table billofmaterials_TEMP;""".update + created *> copied *> merged + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/billofmaterials/BillofmaterialsRepoMock.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/billofmaterials/BillofmaterialsRepoMock.scala index bd10bc5be2..46e570e631 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/billofmaterials/BillofmaterialsRepoMock.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/billofmaterials/BillofmaterialsRepoMock.scala @@ -104,4 +104,13 @@ class BillofmaterialsRepoMock(toRow: Function1[BillofmaterialsRowUnsaved, Billof UpdateResult(1, Chunk.single(unsaved)) } } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, BillofmaterialsRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + unsaved.scanZIO(0L) { case (acc, row) => + ZIO.succeed { + map += (row.billofmaterialsid -> row) + acc + 1 + } + }.runLast.map(_.getOrElse(0L)) + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/culture/CultureRepo.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/culture/CultureRepo.scala index 03712c55c1..40589de9a3 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/culture/CultureRepo.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/culture/CultureRepo.scala @@ -32,4 +32,7 @@ trait CultureRepo { def update: UpdateBuilder[CultureFields, CultureRow] def update(row: CultureRow): ZIO[ZConnection, Throwable, Boolean] def upsert(unsaved: CultureRow): ZIO[ZConnection, Throwable, UpdateResult[CultureRow]] + // Not implementable for zio-jdbc: upsertBatch + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, CultureRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/culture/CultureRepoImpl.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/culture/CultureRepoImpl.scala index 76057147d3..a939f40dde 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/culture/CultureRepoImpl.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/culture/CultureRepoImpl.scala @@ -108,4 +108,18 @@ class CultureRepoImpl extends CultureRepo { "modifieddate" = EXCLUDED."modifieddate" returning "cultureid", "name", "modifieddate"::text""".insertReturning(using CultureRow.jdbcDecoder) } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, CultureRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + val created = sql"create temporary table culture_TEMP (like production.culture) on commit drop".execute + val copied = streamingInsert(s"""copy culture_TEMP("cultureid", "name", "modifieddate") from stdin""", batchSize, unsaved)(CultureRow.text) + val merged = sql"""insert into production.culture("cultureid", "name", "modifieddate") + select * from culture_TEMP + on conflict ("cultureid") + do update set + "name" = EXCLUDED."name", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table culture_TEMP;""".update + created *> copied *> merged + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/culture/CultureRepoMock.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/culture/CultureRepoMock.scala index cdeaa55e05..1e92821f0d 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/culture/CultureRepoMock.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/culture/CultureRepoMock.scala @@ -104,4 +104,13 @@ class CultureRepoMock(toRow: Function1[CultureRowUnsaved, CultureRow], UpdateResult(1, Chunk.single(unsaved)) } } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, CultureRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + unsaved.scanZIO(0L) { case (acc, row) => + ZIO.succeed { + map += (row.cultureid -> row) + acc + 1 + } + }.runLast.map(_.getOrElse(0L)) + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/document/DocumentRepo.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/document/DocumentRepo.scala index f99a29423a..724696083f 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/document/DocumentRepo.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/document/DocumentRepo.scala @@ -34,4 +34,7 @@ trait DocumentRepo { def update: UpdateBuilder[DocumentFields, DocumentRow] def update(row: DocumentRow): ZIO[ZConnection, Throwable, Boolean] def upsert(unsaved: DocumentRow): ZIO[ZConnection, Throwable, UpdateResult[DocumentRow]] + // Not implementable for zio-jdbc: upsertBatch + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, DocumentRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/document/DocumentRepoImpl.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/document/DocumentRepoImpl.scala index 639c1bf316..38223ddad6 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/document/DocumentRepoImpl.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/document/DocumentRepoImpl.scala @@ -171,4 +171,28 @@ class DocumentRepoImpl extends DocumentRepo { "modifieddate" = EXCLUDED."modifieddate" returning "title", "owner", "folderflag", "filename", "fileextension", "revision", "changenumber", "status", "documentsummary", "document", "rowguid", "modifieddate"::text, "documentnode"""".insertReturning(using DocumentRow.jdbcDecoder) } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, DocumentRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + val created = sql"create temporary table document_TEMP (like production.document) on commit drop".execute + val copied = streamingInsert(s"""copy document_TEMP("title", "owner", "folderflag", "filename", "fileextension", "revision", "changenumber", "status", "documentsummary", "document", "rowguid", "modifieddate", "documentnode") from stdin""", batchSize, unsaved)(DocumentRow.text) + val merged = sql"""insert into production.document("title", "owner", "folderflag", "filename", "fileextension", "revision", "changenumber", "status", "documentsummary", "document", "rowguid", "modifieddate", "documentnode") + select * from document_TEMP + on conflict ("documentnode") + do update set + "title" = EXCLUDED."title", + "owner" = EXCLUDED."owner", + "folderflag" = EXCLUDED."folderflag", + "filename" = EXCLUDED."filename", + "fileextension" = EXCLUDED."fileextension", + "revision" = EXCLUDED."revision", + "changenumber" = EXCLUDED."changenumber", + "status" = EXCLUDED."status", + "documentsummary" = EXCLUDED."documentsummary", + "document" = EXCLUDED."document", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table document_TEMP;""".update + created *> copied *> merged + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/document/DocumentRepoMock.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/document/DocumentRepoMock.scala index d8dbf79792..2443ab6c70 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/document/DocumentRepoMock.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/document/DocumentRepoMock.scala @@ -108,4 +108,13 @@ class DocumentRepoMock(toRow: Function1[DocumentRowUnsaved, DocumentRow], UpdateResult(1, Chunk.single(unsaved)) } } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, DocumentRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + unsaved.scanZIO(0L) { case (acc, row) => + ZIO.succeed { + map += (row.documentnode -> row) + acc + 1 + } + }.runLast.map(_.getOrElse(0L)) + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/illustration/IllustrationRepo.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/illustration/IllustrationRepo.scala index 1ff081c4c2..6a97359380 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/illustration/IllustrationRepo.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/illustration/IllustrationRepo.scala @@ -32,4 +32,7 @@ trait IllustrationRepo { def update: UpdateBuilder[IllustrationFields, IllustrationRow] def update(row: IllustrationRow): ZIO[ZConnection, Throwable, Boolean] def upsert(unsaved: IllustrationRow): ZIO[ZConnection, Throwable, UpdateResult[IllustrationRow]] + // Not implementable for zio-jdbc: upsertBatch + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, IllustrationRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/illustration/IllustrationRepoImpl.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/illustration/IllustrationRepoImpl.scala index 73c8dc570a..9b4e4700bd 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/illustration/IllustrationRepoImpl.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/illustration/IllustrationRepoImpl.scala @@ -112,4 +112,18 @@ class IllustrationRepoImpl extends IllustrationRepo { "modifieddate" = EXCLUDED."modifieddate" returning "illustrationid", "diagram", "modifieddate"::text""".insertReturning(using IllustrationRow.jdbcDecoder) } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, IllustrationRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + val created = sql"create temporary table illustration_TEMP (like production.illustration) on commit drop".execute + val copied = streamingInsert(s"""copy illustration_TEMP("illustrationid", "diagram", "modifieddate") from stdin""", batchSize, unsaved)(IllustrationRow.text) + val merged = sql"""insert into production.illustration("illustrationid", "diagram", "modifieddate") + select * from illustration_TEMP + on conflict ("illustrationid") + do update set + "diagram" = EXCLUDED."diagram", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table illustration_TEMP;""".update + created *> copied *> merged + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/illustration/IllustrationRepoMock.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/illustration/IllustrationRepoMock.scala index 466b5da4c0..e3499486ab 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/illustration/IllustrationRepoMock.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/illustration/IllustrationRepoMock.scala @@ -104,4 +104,13 @@ class IllustrationRepoMock(toRow: Function1[IllustrationRowUnsaved, Illustration UpdateResult(1, Chunk.single(unsaved)) } } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, IllustrationRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + unsaved.scanZIO(0L) { case (acc, row) => + ZIO.succeed { + map += (row.illustrationid -> row) + acc + 1 + } + }.runLast.map(_.getOrElse(0L)) + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/location/LocationRepo.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/location/LocationRepo.scala index 2ba616034a..161d008b6b 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/location/LocationRepo.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/location/LocationRepo.scala @@ -32,4 +32,7 @@ trait LocationRepo { def update: UpdateBuilder[LocationFields, LocationRow] def update(row: LocationRow): ZIO[ZConnection, Throwable, Boolean] def upsert(unsaved: LocationRow): ZIO[ZConnection, Throwable, UpdateResult[LocationRow]] + // Not implementable for zio-jdbc: upsertBatch + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, LocationRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/location/LocationRepoImpl.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/location/LocationRepoImpl.scala index 628c9d6baf..0f175d8922 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/location/LocationRepoImpl.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/location/LocationRepoImpl.scala @@ -126,4 +126,20 @@ class LocationRepoImpl extends LocationRepo { "modifieddate" = EXCLUDED."modifieddate" returning "locationid", "name", "costrate", "availability", "modifieddate"::text""".insertReturning(using LocationRow.jdbcDecoder) } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, LocationRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + val created = sql"create temporary table location_TEMP (like production.location) on commit drop".execute + val copied = streamingInsert(s"""copy location_TEMP("locationid", "name", "costrate", "availability", "modifieddate") from stdin""", batchSize, unsaved)(LocationRow.text) + val merged = sql"""insert into production.location("locationid", "name", "costrate", "availability", "modifieddate") + select * from location_TEMP + on conflict ("locationid") + do update set + "name" = EXCLUDED."name", + "costrate" = EXCLUDED."costrate", + "availability" = EXCLUDED."availability", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table location_TEMP;""".update + created *> copied *> merged + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/location/LocationRepoMock.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/location/LocationRepoMock.scala index 22c1f0e7f4..4f54b1a9bb 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/location/LocationRepoMock.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/location/LocationRepoMock.scala @@ -104,4 +104,13 @@ class LocationRepoMock(toRow: Function1[LocationRowUnsaved, LocationRow], UpdateResult(1, Chunk.single(unsaved)) } } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, LocationRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + unsaved.scanZIO(0L) { case (acc, row) => + ZIO.succeed { + map += (row.locationid -> row) + acc + 1 + } + }.runLast.map(_.getOrElse(0L)) + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/product/ProductRepo.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/product/ProductRepo.scala index 0eca703668..4be415fe6a 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/product/ProductRepo.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/product/ProductRepo.scala @@ -32,4 +32,7 @@ trait ProductRepo { def update: UpdateBuilder[ProductFields, ProductRow] def update(row: ProductRow): ZIO[ZConnection, Throwable, Boolean] def upsert(unsaved: ProductRow): ZIO[ZConnection, Throwable, UpdateResult[ProductRow]] + // Not implementable for zio-jdbc: upsertBatch + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, ProductRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/product/ProductRepoImpl.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/product/ProductRepoImpl.scala index 72b2f4373a..cbec386008 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/product/ProductRepoImpl.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/product/ProductRepoImpl.scala @@ -215,4 +215,40 @@ class ProductRepoImpl extends ProductRepo { "modifieddate" = EXCLUDED."modifieddate" returning "productid", "name", "productnumber", "makeflag", "finishedgoodsflag", "color", "safetystocklevel", "reorderpoint", "standardcost", "listprice", "size", "sizeunitmeasurecode", "weightunitmeasurecode", "weight", "daystomanufacture", "productline", "class", "style", "productsubcategoryid", "productmodelid", "sellstartdate"::text, "sellenddate"::text, "discontinueddate"::text, "rowguid", "modifieddate"::text""".insertReturning(using ProductRow.jdbcDecoder) } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, ProductRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + val created = sql"create temporary table product_TEMP (like production.product) on commit drop".execute + val copied = streamingInsert(s"""copy product_TEMP("productid", "name", "productnumber", "makeflag", "finishedgoodsflag", "color", "safetystocklevel", "reorderpoint", "standardcost", "listprice", "size", "sizeunitmeasurecode", "weightunitmeasurecode", "weight", "daystomanufacture", "productline", "class", "style", "productsubcategoryid", "productmodelid", "sellstartdate", "sellenddate", "discontinueddate", "rowguid", "modifieddate") from stdin""", batchSize, unsaved)(ProductRow.text) + val merged = sql"""insert into production.product("productid", "name", "productnumber", "makeflag", "finishedgoodsflag", "color", "safetystocklevel", "reorderpoint", "standardcost", "listprice", "size", "sizeunitmeasurecode", "weightunitmeasurecode", "weight", "daystomanufacture", "productline", "class", "style", "productsubcategoryid", "productmodelid", "sellstartdate", "sellenddate", "discontinueddate", "rowguid", "modifieddate") + select * from product_TEMP + on conflict ("productid") + do update set + "name" = EXCLUDED."name", + "productnumber" = EXCLUDED."productnumber", + "makeflag" = EXCLUDED."makeflag", + "finishedgoodsflag" = EXCLUDED."finishedgoodsflag", + "color" = EXCLUDED."color", + "safetystocklevel" = EXCLUDED."safetystocklevel", + "reorderpoint" = EXCLUDED."reorderpoint", + "standardcost" = EXCLUDED."standardcost", + "listprice" = EXCLUDED."listprice", + "size" = EXCLUDED."size", + "sizeunitmeasurecode" = EXCLUDED."sizeunitmeasurecode", + "weightunitmeasurecode" = EXCLUDED."weightunitmeasurecode", + "weight" = EXCLUDED."weight", + "daystomanufacture" = EXCLUDED."daystomanufacture", + "productline" = EXCLUDED."productline", + "class" = EXCLUDED."class", + "style" = EXCLUDED."style", + "productsubcategoryid" = EXCLUDED."productsubcategoryid", + "productmodelid" = EXCLUDED."productmodelid", + "sellstartdate" = EXCLUDED."sellstartdate", + "sellenddate" = EXCLUDED."sellenddate", + "discontinueddate" = EXCLUDED."discontinueddate", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table product_TEMP;""".update + created *> copied *> merged + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/product/ProductRepoMock.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/product/ProductRepoMock.scala index 20bfccfedf..b3b1a494ba 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/product/ProductRepoMock.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/product/ProductRepoMock.scala @@ -104,4 +104,13 @@ class ProductRepoMock(toRow: Function1[ProductRowUnsaved, ProductRow], UpdateResult(1, Chunk.single(unsaved)) } } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, ProductRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + unsaved.scanZIO(0L) { case (acc, row) => + ZIO.succeed { + map += (row.productid -> row) + acc + 1 + } + }.runLast.map(_.getOrElse(0L)) + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productcategory/ProductcategoryRepo.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productcategory/ProductcategoryRepo.scala index 0d84445fa4..ba80243ae1 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productcategory/ProductcategoryRepo.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productcategory/ProductcategoryRepo.scala @@ -32,4 +32,7 @@ trait ProductcategoryRepo { def update: UpdateBuilder[ProductcategoryFields, ProductcategoryRow] def update(row: ProductcategoryRow): ZIO[ZConnection, Throwable, Boolean] def upsert(unsaved: ProductcategoryRow): ZIO[ZConnection, Throwable, UpdateResult[ProductcategoryRow]] + // Not implementable for zio-jdbc: upsertBatch + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, ProductcategoryRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productcategory/ProductcategoryRepoImpl.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productcategory/ProductcategoryRepoImpl.scala index 3b9e1baa61..a6cc30056b 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productcategory/ProductcategoryRepoImpl.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productcategory/ProductcategoryRepoImpl.scala @@ -119,4 +119,19 @@ class ProductcategoryRepoImpl extends ProductcategoryRepo { "modifieddate" = EXCLUDED."modifieddate" returning "productcategoryid", "name", "rowguid", "modifieddate"::text""".insertReturning(using ProductcategoryRow.jdbcDecoder) } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, ProductcategoryRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + val created = sql"create temporary table productcategory_TEMP (like production.productcategory) on commit drop".execute + val copied = streamingInsert(s"""copy productcategory_TEMP("productcategoryid", "name", "rowguid", "modifieddate") from stdin""", batchSize, unsaved)(ProductcategoryRow.text) + val merged = sql"""insert into production.productcategory("productcategoryid", "name", "rowguid", "modifieddate") + select * from productcategory_TEMP + on conflict ("productcategoryid") + do update set + "name" = EXCLUDED."name", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table productcategory_TEMP;""".update + created *> copied *> merged + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productcategory/ProductcategoryRepoMock.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productcategory/ProductcategoryRepoMock.scala index 96c228625d..351aef21da 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productcategory/ProductcategoryRepoMock.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productcategory/ProductcategoryRepoMock.scala @@ -104,4 +104,13 @@ class ProductcategoryRepoMock(toRow: Function1[ProductcategoryRowUnsaved, Produc UpdateResult(1, Chunk.single(unsaved)) } } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, ProductcategoryRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + unsaved.scanZIO(0L) { case (acc, row) => + ZIO.succeed { + map += (row.productcategoryid -> row) + acc + 1 + } + }.runLast.map(_.getOrElse(0L)) + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productcosthistory/ProductcosthistoryRepo.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productcosthistory/ProductcosthistoryRepo.scala index 0bf6ecf398..0893036d56 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productcosthistory/ProductcosthistoryRepo.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productcosthistory/ProductcosthistoryRepo.scala @@ -32,4 +32,7 @@ trait ProductcosthistoryRepo { def update: UpdateBuilder[ProductcosthistoryFields, ProductcosthistoryRow] def update(row: ProductcosthistoryRow): ZIO[ZConnection, Throwable, Boolean] def upsert(unsaved: ProductcosthistoryRow): ZIO[ZConnection, Throwable, UpdateResult[ProductcosthistoryRow]] + // Not implementable for zio-jdbc: upsertBatch + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, ProductcosthistoryRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productcosthistory/ProductcosthistoryRepoImpl.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productcosthistory/ProductcosthistoryRepoImpl.scala index ce95305ed7..da6654b105 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productcosthistory/ProductcosthistoryRepoImpl.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productcosthistory/ProductcosthistoryRepoImpl.scala @@ -129,4 +129,19 @@ class ProductcosthistoryRepoImpl extends ProductcosthistoryRepo { "modifieddate" = EXCLUDED."modifieddate" returning "productid", "startdate"::text, "enddate"::text, "standardcost", "modifieddate"::text""".insertReturning(using ProductcosthistoryRow.jdbcDecoder) } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, ProductcosthistoryRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + val created = sql"create temporary table productcosthistory_TEMP (like production.productcosthistory) on commit drop".execute + val copied = streamingInsert(s"""copy productcosthistory_TEMP("productid", "startdate", "enddate", "standardcost", "modifieddate") from stdin""", batchSize, unsaved)(ProductcosthistoryRow.text) + val merged = sql"""insert into production.productcosthistory("productid", "startdate", "enddate", "standardcost", "modifieddate") + select * from productcosthistory_TEMP + on conflict ("productid", "startdate") + do update set + "enddate" = EXCLUDED."enddate", + "standardcost" = EXCLUDED."standardcost", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table productcosthistory_TEMP;""".update + created *> copied *> merged + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productcosthistory/ProductcosthistoryRepoMock.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productcosthistory/ProductcosthistoryRepoMock.scala index 65553c54ec..94f8bbb7c6 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productcosthistory/ProductcosthistoryRepoMock.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productcosthistory/ProductcosthistoryRepoMock.scala @@ -104,4 +104,13 @@ class ProductcosthistoryRepoMock(toRow: Function1[ProductcosthistoryRowUnsaved, UpdateResult(1, Chunk.single(unsaved)) } } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, ProductcosthistoryRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + unsaved.scanZIO(0L) { case (acc, row) => + ZIO.succeed { + map += (row.compositeId -> row) + acc + 1 + } + }.runLast.map(_.getOrElse(0L)) + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productdescription/ProductdescriptionRepo.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productdescription/ProductdescriptionRepo.scala index cfe85d2fe8..c24dd31911 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productdescription/ProductdescriptionRepo.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productdescription/ProductdescriptionRepo.scala @@ -32,4 +32,7 @@ trait ProductdescriptionRepo { def update: UpdateBuilder[ProductdescriptionFields, ProductdescriptionRow] def update(row: ProductdescriptionRow): ZIO[ZConnection, Throwable, Boolean] def upsert(unsaved: ProductdescriptionRow): ZIO[ZConnection, Throwable, UpdateResult[ProductdescriptionRow]] + // Not implementable for zio-jdbc: upsertBatch + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, ProductdescriptionRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productdescription/ProductdescriptionRepoImpl.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productdescription/ProductdescriptionRepoImpl.scala index 56b7f8af4e..dd2c203187 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productdescription/ProductdescriptionRepoImpl.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productdescription/ProductdescriptionRepoImpl.scala @@ -119,4 +119,19 @@ class ProductdescriptionRepoImpl extends ProductdescriptionRepo { "modifieddate" = EXCLUDED."modifieddate" returning "productdescriptionid", "description", "rowguid", "modifieddate"::text""".insertReturning(using ProductdescriptionRow.jdbcDecoder) } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, ProductdescriptionRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + val created = sql"create temporary table productdescription_TEMP (like production.productdescription) on commit drop".execute + val copied = streamingInsert(s"""copy productdescription_TEMP("productdescriptionid", "description", "rowguid", "modifieddate") from stdin""", batchSize, unsaved)(ProductdescriptionRow.text) + val merged = sql"""insert into production.productdescription("productdescriptionid", "description", "rowguid", "modifieddate") + select * from productdescription_TEMP + on conflict ("productdescriptionid") + do update set + "description" = EXCLUDED."description", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table productdescription_TEMP;""".update + created *> copied *> merged + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productdescription/ProductdescriptionRepoMock.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productdescription/ProductdescriptionRepoMock.scala index 27856e11a6..fc61ae7820 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productdescription/ProductdescriptionRepoMock.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productdescription/ProductdescriptionRepoMock.scala @@ -104,4 +104,13 @@ class ProductdescriptionRepoMock(toRow: Function1[ProductdescriptionRowUnsaved, UpdateResult(1, Chunk.single(unsaved)) } } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, ProductdescriptionRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + unsaved.scanZIO(0L) { case (acc, row) => + ZIO.succeed { + map += (row.productdescriptionid -> row) + acc + 1 + } + }.runLast.map(_.getOrElse(0L)) + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productdocument/ProductdocumentRepo.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productdocument/ProductdocumentRepo.scala index a8d933b97f..4639989de5 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productdocument/ProductdocumentRepo.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productdocument/ProductdocumentRepo.scala @@ -32,4 +32,7 @@ trait ProductdocumentRepo { def update: UpdateBuilder[ProductdocumentFields, ProductdocumentRow] def update(row: ProductdocumentRow): ZIO[ZConnection, Throwable, Boolean] def upsert(unsaved: ProductdocumentRow): ZIO[ZConnection, Throwable, UpdateResult[ProductdocumentRow]] + // Not implementable for zio-jdbc: upsertBatch + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, ProductdocumentRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productdocument/ProductdocumentRepoImpl.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productdocument/ProductdocumentRepoImpl.scala index d74c080212..afd5b277f2 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productdocument/ProductdocumentRepoImpl.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productdocument/ProductdocumentRepoImpl.scala @@ -124,4 +124,17 @@ class ProductdocumentRepoImpl extends ProductdocumentRepo { "modifieddate" = EXCLUDED."modifieddate" returning "productid", "modifieddate"::text, "documentnode"""".insertReturning(using ProductdocumentRow.jdbcDecoder) } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, ProductdocumentRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + val created = sql"create temporary table productdocument_TEMP (like production.productdocument) on commit drop".execute + val copied = streamingInsert(s"""copy productdocument_TEMP("productid", "modifieddate", "documentnode") from stdin""", batchSize, unsaved)(ProductdocumentRow.text) + val merged = sql"""insert into production.productdocument("productid", "modifieddate", "documentnode") + select * from productdocument_TEMP + on conflict ("productid", "documentnode") + do update set + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table productdocument_TEMP;""".update + created *> copied *> merged + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productdocument/ProductdocumentRepoMock.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productdocument/ProductdocumentRepoMock.scala index 167ca48baa..a3ec80fa49 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productdocument/ProductdocumentRepoMock.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productdocument/ProductdocumentRepoMock.scala @@ -104,4 +104,13 @@ class ProductdocumentRepoMock(toRow: Function1[ProductdocumentRowUnsaved, Produc UpdateResult(1, Chunk.single(unsaved)) } } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, ProductdocumentRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + unsaved.scanZIO(0L) { case (acc, row) => + ZIO.succeed { + map += (row.compositeId -> row) + acc + 1 + } + }.runLast.map(_.getOrElse(0L)) + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productinventory/ProductinventoryRepo.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productinventory/ProductinventoryRepo.scala index 52e9c1d122..6dc98292db 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productinventory/ProductinventoryRepo.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productinventory/ProductinventoryRepo.scala @@ -32,4 +32,7 @@ trait ProductinventoryRepo { def update: UpdateBuilder[ProductinventoryFields, ProductinventoryRow] def update(row: ProductinventoryRow): ZIO[ZConnection, Throwable, Boolean] def upsert(unsaved: ProductinventoryRow): ZIO[ZConnection, Throwable, UpdateResult[ProductinventoryRow]] + // Not implementable for zio-jdbc: upsertBatch + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, ProductinventoryRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productinventory/ProductinventoryRepoImpl.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productinventory/ProductinventoryRepoImpl.scala index 682c8d6a10..a1495d1d26 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productinventory/ProductinventoryRepoImpl.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productinventory/ProductinventoryRepoImpl.scala @@ -146,4 +146,21 @@ class ProductinventoryRepoImpl extends ProductinventoryRepo { "modifieddate" = EXCLUDED."modifieddate" returning "productid", "locationid", "shelf", "bin", "quantity", "rowguid", "modifieddate"::text""".insertReturning(using ProductinventoryRow.jdbcDecoder) } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, ProductinventoryRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + val created = sql"create temporary table productinventory_TEMP (like production.productinventory) on commit drop".execute + val copied = streamingInsert(s"""copy productinventory_TEMP("productid", "locationid", "shelf", "bin", "quantity", "rowguid", "modifieddate") from stdin""", batchSize, unsaved)(ProductinventoryRow.text) + val merged = sql"""insert into production.productinventory("productid", "locationid", "shelf", "bin", "quantity", "rowguid", "modifieddate") + select * from productinventory_TEMP + on conflict ("productid", "locationid") + do update set + "shelf" = EXCLUDED."shelf", + "bin" = EXCLUDED."bin", + "quantity" = EXCLUDED."quantity", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table productinventory_TEMP;""".update + created *> copied *> merged + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productinventory/ProductinventoryRepoMock.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productinventory/ProductinventoryRepoMock.scala index 0bb6ad34b7..837bcfaccc 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productinventory/ProductinventoryRepoMock.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productinventory/ProductinventoryRepoMock.scala @@ -104,4 +104,13 @@ class ProductinventoryRepoMock(toRow: Function1[ProductinventoryRowUnsaved, Prod UpdateResult(1, Chunk.single(unsaved)) } } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, ProductinventoryRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + unsaved.scanZIO(0L) { case (acc, row) => + ZIO.succeed { + map += (row.compositeId -> row) + acc + 1 + } + }.runLast.map(_.getOrElse(0L)) + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productlistpricehistory/ProductlistpricehistoryRepo.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productlistpricehistory/ProductlistpricehistoryRepo.scala index 401a222fbc..e2a65f0359 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productlistpricehistory/ProductlistpricehistoryRepo.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productlistpricehistory/ProductlistpricehistoryRepo.scala @@ -32,4 +32,7 @@ trait ProductlistpricehistoryRepo { def update: UpdateBuilder[ProductlistpricehistoryFields, ProductlistpricehistoryRow] def update(row: ProductlistpricehistoryRow): ZIO[ZConnection, Throwable, Boolean] def upsert(unsaved: ProductlistpricehistoryRow): ZIO[ZConnection, Throwable, UpdateResult[ProductlistpricehistoryRow]] + // Not implementable for zio-jdbc: upsertBatch + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, ProductlistpricehistoryRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productlistpricehistory/ProductlistpricehistoryRepoImpl.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productlistpricehistory/ProductlistpricehistoryRepoImpl.scala index 789102f7aa..a5ff94d934 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productlistpricehistory/ProductlistpricehistoryRepoImpl.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productlistpricehistory/ProductlistpricehistoryRepoImpl.scala @@ -129,4 +129,19 @@ class ProductlistpricehistoryRepoImpl extends ProductlistpricehistoryRepo { "modifieddate" = EXCLUDED."modifieddate" returning "productid", "startdate"::text, "enddate"::text, "listprice", "modifieddate"::text""".insertReturning(using ProductlistpricehistoryRow.jdbcDecoder) } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, ProductlistpricehistoryRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + val created = sql"create temporary table productlistpricehistory_TEMP (like production.productlistpricehistory) on commit drop".execute + val copied = streamingInsert(s"""copy productlistpricehistory_TEMP("productid", "startdate", "enddate", "listprice", "modifieddate") from stdin""", batchSize, unsaved)(ProductlistpricehistoryRow.text) + val merged = sql"""insert into production.productlistpricehistory("productid", "startdate", "enddate", "listprice", "modifieddate") + select * from productlistpricehistory_TEMP + on conflict ("productid", "startdate") + do update set + "enddate" = EXCLUDED."enddate", + "listprice" = EXCLUDED."listprice", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table productlistpricehistory_TEMP;""".update + created *> copied *> merged + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productlistpricehistory/ProductlistpricehistoryRepoMock.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productlistpricehistory/ProductlistpricehistoryRepoMock.scala index 23082bf8b4..1af92512b2 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productlistpricehistory/ProductlistpricehistoryRepoMock.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productlistpricehistory/ProductlistpricehistoryRepoMock.scala @@ -104,4 +104,13 @@ class ProductlistpricehistoryRepoMock(toRow: Function1[ProductlistpricehistoryRo UpdateResult(1, Chunk.single(unsaved)) } } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, ProductlistpricehistoryRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + unsaved.scanZIO(0L) { case (acc, row) => + ZIO.succeed { + map += (row.compositeId -> row) + acc + 1 + } + }.runLast.map(_.getOrElse(0L)) + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productmodel/ProductmodelRepo.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productmodel/ProductmodelRepo.scala index f83f9f199a..7f7535151b 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productmodel/ProductmodelRepo.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productmodel/ProductmodelRepo.scala @@ -32,4 +32,7 @@ trait ProductmodelRepo { def update: UpdateBuilder[ProductmodelFields, ProductmodelRow] def update(row: ProductmodelRow): ZIO[ZConnection, Throwable, Boolean] def upsert(unsaved: ProductmodelRow): ZIO[ZConnection, Throwable, UpdateResult[ProductmodelRow]] + // Not implementable for zio-jdbc: upsertBatch + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, ProductmodelRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productmodel/ProductmodelRepoImpl.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productmodel/ProductmodelRepoImpl.scala index bfb8c9bb40..fe308ea5ad 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productmodel/ProductmodelRepoImpl.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productmodel/ProductmodelRepoImpl.scala @@ -129,4 +129,21 @@ class ProductmodelRepoImpl extends ProductmodelRepo { "modifieddate" = EXCLUDED."modifieddate" returning "productmodelid", "name", "catalogdescription", "instructions", "rowguid", "modifieddate"::text""".insertReturning(using ProductmodelRow.jdbcDecoder) } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, ProductmodelRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + val created = sql"create temporary table productmodel_TEMP (like production.productmodel) on commit drop".execute + val copied = streamingInsert(s"""copy productmodel_TEMP("productmodelid", "name", "catalogdescription", "instructions", "rowguid", "modifieddate") from stdin""", batchSize, unsaved)(ProductmodelRow.text) + val merged = sql"""insert into production.productmodel("productmodelid", "name", "catalogdescription", "instructions", "rowguid", "modifieddate") + select * from productmodel_TEMP + on conflict ("productmodelid") + do update set + "name" = EXCLUDED."name", + "catalogdescription" = EXCLUDED."catalogdescription", + "instructions" = EXCLUDED."instructions", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table productmodel_TEMP;""".update + created *> copied *> merged + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productmodel/ProductmodelRepoMock.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productmodel/ProductmodelRepoMock.scala index fe929b5210..4e44dac673 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productmodel/ProductmodelRepoMock.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productmodel/ProductmodelRepoMock.scala @@ -104,4 +104,13 @@ class ProductmodelRepoMock(toRow: Function1[ProductmodelRowUnsaved, Productmodel UpdateResult(1, Chunk.single(unsaved)) } } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, ProductmodelRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + unsaved.scanZIO(0L) { case (acc, row) => + ZIO.succeed { + map += (row.productmodelid -> row) + acc + 1 + } + }.runLast.map(_.getOrElse(0L)) + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productmodelillustration/ProductmodelillustrationRepo.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productmodelillustration/ProductmodelillustrationRepo.scala index d02bfe0c07..957c7eb6b5 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productmodelillustration/ProductmodelillustrationRepo.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productmodelillustration/ProductmodelillustrationRepo.scala @@ -32,4 +32,7 @@ trait ProductmodelillustrationRepo { def update: UpdateBuilder[ProductmodelillustrationFields, ProductmodelillustrationRow] def update(row: ProductmodelillustrationRow): ZIO[ZConnection, Throwable, Boolean] def upsert(unsaved: ProductmodelillustrationRow): ZIO[ZConnection, Throwable, UpdateResult[ProductmodelillustrationRow]] + // Not implementable for zio-jdbc: upsertBatch + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, ProductmodelillustrationRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productmodelillustration/ProductmodelillustrationRepoImpl.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productmodelillustration/ProductmodelillustrationRepoImpl.scala index 8ef3057fd0..f5d4efd0a7 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productmodelillustration/ProductmodelillustrationRepoImpl.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productmodelillustration/ProductmodelillustrationRepoImpl.scala @@ -121,4 +121,17 @@ class ProductmodelillustrationRepoImpl extends ProductmodelillustrationRepo { "modifieddate" = EXCLUDED."modifieddate" returning "productmodelid", "illustrationid", "modifieddate"::text""".insertReturning(using ProductmodelillustrationRow.jdbcDecoder) } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, ProductmodelillustrationRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + val created = sql"create temporary table productmodelillustration_TEMP (like production.productmodelillustration) on commit drop".execute + val copied = streamingInsert(s"""copy productmodelillustration_TEMP("productmodelid", "illustrationid", "modifieddate") from stdin""", batchSize, unsaved)(ProductmodelillustrationRow.text) + val merged = sql"""insert into production.productmodelillustration("productmodelid", "illustrationid", "modifieddate") + select * from productmodelillustration_TEMP + on conflict ("productmodelid", "illustrationid") + do update set + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table productmodelillustration_TEMP;""".update + created *> copied *> merged + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productmodelillustration/ProductmodelillustrationRepoMock.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productmodelillustration/ProductmodelillustrationRepoMock.scala index fa0976c0a9..d092852f29 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productmodelillustration/ProductmodelillustrationRepoMock.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productmodelillustration/ProductmodelillustrationRepoMock.scala @@ -104,4 +104,13 @@ class ProductmodelillustrationRepoMock(toRow: Function1[Productmodelillustration UpdateResult(1, Chunk.single(unsaved)) } } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, ProductmodelillustrationRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + unsaved.scanZIO(0L) { case (acc, row) => + ZIO.succeed { + map += (row.compositeId -> row) + acc + 1 + } + }.runLast.map(_.getOrElse(0L)) + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productmodelproductdescriptionculture/ProductmodelproductdescriptioncultureRepo.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productmodelproductdescriptionculture/ProductmodelproductdescriptioncultureRepo.scala index 9e7af612db..0faf5dfcb2 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productmodelproductdescriptionculture/ProductmodelproductdescriptioncultureRepo.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productmodelproductdescriptionculture/ProductmodelproductdescriptioncultureRepo.scala @@ -32,4 +32,7 @@ trait ProductmodelproductdescriptioncultureRepo { def update: UpdateBuilder[ProductmodelproductdescriptioncultureFields, ProductmodelproductdescriptioncultureRow] def update(row: ProductmodelproductdescriptioncultureRow): ZIO[ZConnection, Throwable, Boolean] def upsert(unsaved: ProductmodelproductdescriptioncultureRow): ZIO[ZConnection, Throwable, UpdateResult[ProductmodelproductdescriptioncultureRow]] + // Not implementable for zio-jdbc: upsertBatch + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, ProductmodelproductdescriptioncultureRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productmodelproductdescriptionculture/ProductmodelproductdescriptioncultureRepoImpl.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productmodelproductdescriptionculture/ProductmodelproductdescriptioncultureRepoImpl.scala index 46d1c7b410..90d8ecdb1d 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productmodelproductdescriptionculture/ProductmodelproductdescriptioncultureRepoImpl.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productmodelproductdescriptionculture/ProductmodelproductdescriptioncultureRepoImpl.scala @@ -126,4 +126,17 @@ class ProductmodelproductdescriptioncultureRepoImpl extends Productmodelproductd "modifieddate" = EXCLUDED."modifieddate" returning "productmodelid", "productdescriptionid", "cultureid", "modifieddate"::text""".insertReturning(using ProductmodelproductdescriptioncultureRow.jdbcDecoder) } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, ProductmodelproductdescriptioncultureRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + val created = sql"create temporary table productmodelproductdescriptionculture_TEMP (like production.productmodelproductdescriptionculture) on commit drop".execute + val copied = streamingInsert(s"""copy productmodelproductdescriptionculture_TEMP("productmodelid", "productdescriptionid", "cultureid", "modifieddate") from stdin""", batchSize, unsaved)(ProductmodelproductdescriptioncultureRow.text) + val merged = sql"""insert into production.productmodelproductdescriptionculture("productmodelid", "productdescriptionid", "cultureid", "modifieddate") + select * from productmodelproductdescriptionculture_TEMP + on conflict ("productmodelid", "productdescriptionid", "cultureid") + do update set + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table productmodelproductdescriptionculture_TEMP;""".update + created *> copied *> merged + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productmodelproductdescriptionculture/ProductmodelproductdescriptioncultureRepoMock.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productmodelproductdescriptionculture/ProductmodelproductdescriptioncultureRepoMock.scala index 19545e0df1..326d44cf18 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productmodelproductdescriptionculture/ProductmodelproductdescriptioncultureRepoMock.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productmodelproductdescriptionculture/ProductmodelproductdescriptioncultureRepoMock.scala @@ -104,4 +104,13 @@ class ProductmodelproductdescriptioncultureRepoMock(toRow: Function1[Productmode UpdateResult(1, Chunk.single(unsaved)) } } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, ProductmodelproductdescriptioncultureRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + unsaved.scanZIO(0L) { case (acc, row) => + ZIO.succeed { + map += (row.compositeId -> row) + acc + 1 + } + }.runLast.map(_.getOrElse(0L)) + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productphoto/ProductphotoRepo.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productphoto/ProductphotoRepo.scala index 493c164ac9..99411251f8 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productphoto/ProductphotoRepo.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productphoto/ProductphotoRepo.scala @@ -32,4 +32,7 @@ trait ProductphotoRepo { def update: UpdateBuilder[ProductphotoFields, ProductphotoRow] def update(row: ProductphotoRow): ZIO[ZConnection, Throwable, Boolean] def upsert(unsaved: ProductphotoRow): ZIO[ZConnection, Throwable, UpdateResult[ProductphotoRow]] + // Not implementable for zio-jdbc: upsertBatch + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, ProductphotoRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productphoto/ProductphotoRepoImpl.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productphoto/ProductphotoRepoImpl.scala index aaababbdb0..1e275225c7 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productphoto/ProductphotoRepoImpl.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productphoto/ProductphotoRepoImpl.scala @@ -124,4 +124,21 @@ class ProductphotoRepoImpl extends ProductphotoRepo { "modifieddate" = EXCLUDED."modifieddate" returning "productphotoid", "thumbnailphoto", "thumbnailphotofilename", "largephoto", "largephotofilename", "modifieddate"::text""".insertReturning(using ProductphotoRow.jdbcDecoder) } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, ProductphotoRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + val created = sql"create temporary table productphoto_TEMP (like production.productphoto) on commit drop".execute + val copied = streamingInsert(s"""copy productphoto_TEMP("productphotoid", "thumbnailphoto", "thumbnailphotofilename", "largephoto", "largephotofilename", "modifieddate") from stdin""", batchSize, unsaved)(ProductphotoRow.text) + val merged = sql"""insert into production.productphoto("productphotoid", "thumbnailphoto", "thumbnailphotofilename", "largephoto", "largephotofilename", "modifieddate") + select * from productphoto_TEMP + on conflict ("productphotoid") + do update set + "thumbnailphoto" = EXCLUDED."thumbnailphoto", + "thumbnailphotofilename" = EXCLUDED."thumbnailphotofilename", + "largephoto" = EXCLUDED."largephoto", + "largephotofilename" = EXCLUDED."largephotofilename", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table productphoto_TEMP;""".update + created *> copied *> merged + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productphoto/ProductphotoRepoMock.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productphoto/ProductphotoRepoMock.scala index cbcbffd667..d7bd187302 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productphoto/ProductphotoRepoMock.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productphoto/ProductphotoRepoMock.scala @@ -104,4 +104,13 @@ class ProductphotoRepoMock(toRow: Function1[ProductphotoRowUnsaved, Productphoto UpdateResult(1, Chunk.single(unsaved)) } } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, ProductphotoRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + unsaved.scanZIO(0L) { case (acc, row) => + ZIO.succeed { + map += (row.productphotoid -> row) + acc + 1 + } + }.runLast.map(_.getOrElse(0L)) + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productproductphoto/ProductproductphotoRepo.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productproductphoto/ProductproductphotoRepo.scala index eb8617e15f..80f5e8a483 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productproductphoto/ProductproductphotoRepo.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productproductphoto/ProductproductphotoRepo.scala @@ -32,4 +32,7 @@ trait ProductproductphotoRepo { def update: UpdateBuilder[ProductproductphotoFields, ProductproductphotoRow] def update(row: ProductproductphotoRow): ZIO[ZConnection, Throwable, Boolean] def upsert(unsaved: ProductproductphotoRow): ZIO[ZConnection, Throwable, UpdateResult[ProductproductphotoRow]] + // Not implementable for zio-jdbc: upsertBatch + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, ProductproductphotoRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productproductphoto/ProductproductphotoRepoImpl.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productproductphoto/ProductproductphotoRepoImpl.scala index 9094ad004d..42a3dc62d2 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productproductphoto/ProductproductphotoRepoImpl.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productproductphoto/ProductproductphotoRepoImpl.scala @@ -129,4 +129,18 @@ class ProductproductphotoRepoImpl extends ProductproductphotoRepo { "modifieddate" = EXCLUDED."modifieddate" returning "productid", "productphotoid", "primary", "modifieddate"::text""".insertReturning(using ProductproductphotoRow.jdbcDecoder) } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, ProductproductphotoRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + val created = sql"create temporary table productproductphoto_TEMP (like production.productproductphoto) on commit drop".execute + val copied = streamingInsert(s"""copy productproductphoto_TEMP("productid", "productphotoid", "primary", "modifieddate") from stdin""", batchSize, unsaved)(ProductproductphotoRow.text) + val merged = sql"""insert into production.productproductphoto("productid", "productphotoid", "primary", "modifieddate") + select * from productproductphoto_TEMP + on conflict ("productid", "productphotoid") + do update set + "primary" = EXCLUDED."primary", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table productproductphoto_TEMP;""".update + created *> copied *> merged + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productproductphoto/ProductproductphotoRepoMock.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productproductphoto/ProductproductphotoRepoMock.scala index 984227a4c8..9afe2290d4 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productproductphoto/ProductproductphotoRepoMock.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productproductphoto/ProductproductphotoRepoMock.scala @@ -104,4 +104,13 @@ class ProductproductphotoRepoMock(toRow: Function1[ProductproductphotoRowUnsaved UpdateResult(1, Chunk.single(unsaved)) } } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, ProductproductphotoRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + unsaved.scanZIO(0L) { case (acc, row) => + ZIO.succeed { + map += (row.compositeId -> row) + acc + 1 + } + }.runLast.map(_.getOrElse(0L)) + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productreview/ProductreviewRepo.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productreview/ProductreviewRepo.scala index d6f45cb205..5d9570ad55 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productreview/ProductreviewRepo.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productreview/ProductreviewRepo.scala @@ -32,4 +32,7 @@ trait ProductreviewRepo { def update: UpdateBuilder[ProductreviewFields, ProductreviewRow] def update(row: ProductreviewRow): ZIO[ZConnection, Throwable, Boolean] def upsert(unsaved: ProductreviewRow): ZIO[ZConnection, Throwable, UpdateResult[ProductreviewRow]] + // Not implementable for zio-jdbc: upsertBatch + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, ProductreviewRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productreview/ProductreviewRepoImpl.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productreview/ProductreviewRepoImpl.scala index d2ae45bed7..b49714ffa5 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productreview/ProductreviewRepoImpl.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productreview/ProductreviewRepoImpl.scala @@ -136,4 +136,23 @@ class ProductreviewRepoImpl extends ProductreviewRepo { "modifieddate" = EXCLUDED."modifieddate" returning "productreviewid", "productid", "reviewername", "reviewdate"::text, "emailaddress", "rating", "comments", "modifieddate"::text""".insertReturning(using ProductreviewRow.jdbcDecoder) } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, ProductreviewRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + val created = sql"create temporary table productreview_TEMP (like production.productreview) on commit drop".execute + val copied = streamingInsert(s"""copy productreview_TEMP("productreviewid", "productid", "reviewername", "reviewdate", "emailaddress", "rating", "comments", "modifieddate") from stdin""", batchSize, unsaved)(ProductreviewRow.text) + val merged = sql"""insert into production.productreview("productreviewid", "productid", "reviewername", "reviewdate", "emailaddress", "rating", "comments", "modifieddate") + select * from productreview_TEMP + on conflict ("productreviewid") + do update set + "productid" = EXCLUDED."productid", + "reviewername" = EXCLUDED."reviewername", + "reviewdate" = EXCLUDED."reviewdate", + "emailaddress" = EXCLUDED."emailaddress", + "rating" = EXCLUDED."rating", + "comments" = EXCLUDED."comments", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table productreview_TEMP;""".update + created *> copied *> merged + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productreview/ProductreviewRepoMock.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productreview/ProductreviewRepoMock.scala index 791c10e64f..d39abe0e98 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productreview/ProductreviewRepoMock.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productreview/ProductreviewRepoMock.scala @@ -104,4 +104,13 @@ class ProductreviewRepoMock(toRow: Function1[ProductreviewRowUnsaved, Productrev UpdateResult(1, Chunk.single(unsaved)) } } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, ProductreviewRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + unsaved.scanZIO(0L) { case (acc, row) => + ZIO.succeed { + map += (row.productreviewid -> row) + acc + 1 + } + }.runLast.map(_.getOrElse(0L)) + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productsubcategory/ProductsubcategoryRepo.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productsubcategory/ProductsubcategoryRepo.scala index b0d3f39526..2c8f26b321 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productsubcategory/ProductsubcategoryRepo.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productsubcategory/ProductsubcategoryRepo.scala @@ -32,4 +32,7 @@ trait ProductsubcategoryRepo { def update: UpdateBuilder[ProductsubcategoryFields, ProductsubcategoryRow] def update(row: ProductsubcategoryRow): ZIO[ZConnection, Throwable, Boolean] def upsert(unsaved: ProductsubcategoryRow): ZIO[ZConnection, Throwable, UpdateResult[ProductsubcategoryRow]] + // Not implementable for zio-jdbc: upsertBatch + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, ProductsubcategoryRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productsubcategory/ProductsubcategoryRepoImpl.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productsubcategory/ProductsubcategoryRepoImpl.scala index 0cf7c59751..20da56b53e 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productsubcategory/ProductsubcategoryRepoImpl.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productsubcategory/ProductsubcategoryRepoImpl.scala @@ -124,4 +124,20 @@ class ProductsubcategoryRepoImpl extends ProductsubcategoryRepo { "modifieddate" = EXCLUDED."modifieddate" returning "productsubcategoryid", "productcategoryid", "name", "rowguid", "modifieddate"::text""".insertReturning(using ProductsubcategoryRow.jdbcDecoder) } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, ProductsubcategoryRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + val created = sql"create temporary table productsubcategory_TEMP (like production.productsubcategory) on commit drop".execute + val copied = streamingInsert(s"""copy productsubcategory_TEMP("productsubcategoryid", "productcategoryid", "name", "rowguid", "modifieddate") from stdin""", batchSize, unsaved)(ProductsubcategoryRow.text) + val merged = sql"""insert into production.productsubcategory("productsubcategoryid", "productcategoryid", "name", "rowguid", "modifieddate") + select * from productsubcategory_TEMP + on conflict ("productsubcategoryid") + do update set + "productcategoryid" = EXCLUDED."productcategoryid", + "name" = EXCLUDED."name", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table productsubcategory_TEMP;""".update + created *> copied *> merged + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productsubcategory/ProductsubcategoryRepoMock.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productsubcategory/ProductsubcategoryRepoMock.scala index e34b3f74be..0fcf2060cd 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productsubcategory/ProductsubcategoryRepoMock.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/productsubcategory/ProductsubcategoryRepoMock.scala @@ -104,4 +104,13 @@ class ProductsubcategoryRepoMock(toRow: Function1[ProductsubcategoryRowUnsaved, UpdateResult(1, Chunk.single(unsaved)) } } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, ProductsubcategoryRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + unsaved.scanZIO(0L) { case (acc, row) => + ZIO.succeed { + map += (row.productsubcategoryid -> row) + acc + 1 + } + }.runLast.map(_.getOrElse(0L)) + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/scrapreason/ScrapreasonRepo.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/scrapreason/ScrapreasonRepo.scala index 591e1f6f01..f9762d2b03 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/scrapreason/ScrapreasonRepo.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/scrapreason/ScrapreasonRepo.scala @@ -32,4 +32,7 @@ trait ScrapreasonRepo { def update: UpdateBuilder[ScrapreasonFields, ScrapreasonRow] def update(row: ScrapreasonRow): ZIO[ZConnection, Throwable, Boolean] def upsert(unsaved: ScrapreasonRow): ZIO[ZConnection, Throwable, UpdateResult[ScrapreasonRow]] + // Not implementable for zio-jdbc: upsertBatch + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, ScrapreasonRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/scrapreason/ScrapreasonRepoImpl.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/scrapreason/ScrapreasonRepoImpl.scala index 13c70d4bb6..fd8f70a67d 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/scrapreason/ScrapreasonRepoImpl.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/scrapreason/ScrapreasonRepoImpl.scala @@ -111,4 +111,18 @@ class ScrapreasonRepoImpl extends ScrapreasonRepo { "modifieddate" = EXCLUDED."modifieddate" returning "scrapreasonid", "name", "modifieddate"::text""".insertReturning(using ScrapreasonRow.jdbcDecoder) } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, ScrapreasonRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + val created = sql"create temporary table scrapreason_TEMP (like production.scrapreason) on commit drop".execute + val copied = streamingInsert(s"""copy scrapreason_TEMP("scrapreasonid", "name", "modifieddate") from stdin""", batchSize, unsaved)(ScrapreasonRow.text) + val merged = sql"""insert into production.scrapreason("scrapreasonid", "name", "modifieddate") + select * from scrapreason_TEMP + on conflict ("scrapreasonid") + do update set + "name" = EXCLUDED."name", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table scrapreason_TEMP;""".update + created *> copied *> merged + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/scrapreason/ScrapreasonRepoMock.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/scrapreason/ScrapreasonRepoMock.scala index bd35f68e10..b6aaf6a0e6 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/scrapreason/ScrapreasonRepoMock.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/scrapreason/ScrapreasonRepoMock.scala @@ -104,4 +104,13 @@ class ScrapreasonRepoMock(toRow: Function1[ScrapreasonRowUnsaved, ScrapreasonRow UpdateResult(1, Chunk.single(unsaved)) } } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, ScrapreasonRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + unsaved.scanZIO(0L) { case (acc, row) => + ZIO.succeed { + map += (row.scrapreasonid -> row) + acc + 1 + } + }.runLast.map(_.getOrElse(0L)) + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/transactionhistory/TransactionhistoryRepo.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/transactionhistory/TransactionhistoryRepo.scala index 5fd64695fa..d2dddd5c9a 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/transactionhistory/TransactionhistoryRepo.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/transactionhistory/TransactionhistoryRepo.scala @@ -32,4 +32,7 @@ trait TransactionhistoryRepo { def update: UpdateBuilder[TransactionhistoryFields, TransactionhistoryRow] def update(row: TransactionhistoryRow): ZIO[ZConnection, Throwable, Boolean] def upsert(unsaved: TransactionhistoryRow): ZIO[ZConnection, Throwable, UpdateResult[TransactionhistoryRow]] + // Not implementable for zio-jdbc: upsertBatch + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, TransactionhistoryRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/transactionhistory/TransactionhistoryRepoImpl.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/transactionhistory/TransactionhistoryRepoImpl.scala index fc3444f3db..cdb82a4978 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/transactionhistory/TransactionhistoryRepoImpl.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/transactionhistory/TransactionhistoryRepoImpl.scala @@ -142,4 +142,24 @@ class TransactionhistoryRepoImpl extends TransactionhistoryRepo { "modifieddate" = EXCLUDED."modifieddate" returning "transactionid", "productid", "referenceorderid", "referenceorderlineid", "transactiondate"::text, "transactiontype", "quantity", "actualcost", "modifieddate"::text""".insertReturning(using TransactionhistoryRow.jdbcDecoder) } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, TransactionhistoryRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + val created = sql"create temporary table transactionhistory_TEMP (like production.transactionhistory) on commit drop".execute + val copied = streamingInsert(s"""copy transactionhistory_TEMP("transactionid", "productid", "referenceorderid", "referenceorderlineid", "transactiondate", "transactiontype", "quantity", "actualcost", "modifieddate") from stdin""", batchSize, unsaved)(TransactionhistoryRow.text) + val merged = sql"""insert into production.transactionhistory("transactionid", "productid", "referenceorderid", "referenceorderlineid", "transactiondate", "transactiontype", "quantity", "actualcost", "modifieddate") + select * from transactionhistory_TEMP + on conflict ("transactionid") + do update set + "productid" = EXCLUDED."productid", + "referenceorderid" = EXCLUDED."referenceorderid", + "referenceorderlineid" = EXCLUDED."referenceorderlineid", + "transactiondate" = EXCLUDED."transactiondate", + "transactiontype" = EXCLUDED."transactiontype", + "quantity" = EXCLUDED."quantity", + "actualcost" = EXCLUDED."actualcost", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table transactionhistory_TEMP;""".update + created *> copied *> merged + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/transactionhistory/TransactionhistoryRepoMock.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/transactionhistory/TransactionhistoryRepoMock.scala index d2093da597..a1c76efc21 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/transactionhistory/TransactionhistoryRepoMock.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/transactionhistory/TransactionhistoryRepoMock.scala @@ -104,4 +104,13 @@ class TransactionhistoryRepoMock(toRow: Function1[TransactionhistoryRowUnsaved, UpdateResult(1, Chunk.single(unsaved)) } } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, TransactionhistoryRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + unsaved.scanZIO(0L) { case (acc, row) => + ZIO.succeed { + map += (row.transactionid -> row) + acc + 1 + } + }.runLast.map(_.getOrElse(0L)) + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/transactionhistoryarchive/TransactionhistoryarchiveRepo.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/transactionhistoryarchive/TransactionhistoryarchiveRepo.scala index f9a6d2ebce..d870345009 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/transactionhistoryarchive/TransactionhistoryarchiveRepo.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/transactionhistoryarchive/TransactionhistoryarchiveRepo.scala @@ -32,4 +32,7 @@ trait TransactionhistoryarchiveRepo { def update: UpdateBuilder[TransactionhistoryarchiveFields, TransactionhistoryarchiveRow] def update(row: TransactionhistoryarchiveRow): ZIO[ZConnection, Throwable, Boolean] def upsert(unsaved: TransactionhistoryarchiveRow): ZIO[ZConnection, Throwable, UpdateResult[TransactionhistoryarchiveRow]] + // Not implementable for zio-jdbc: upsertBatch + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, TransactionhistoryarchiveRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/transactionhistoryarchive/TransactionhistoryarchiveRepoImpl.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/transactionhistoryarchive/TransactionhistoryarchiveRepoImpl.scala index cef5180d39..b84c631613 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/transactionhistoryarchive/TransactionhistoryarchiveRepoImpl.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/transactionhistoryarchive/TransactionhistoryarchiveRepoImpl.scala @@ -138,4 +138,24 @@ class TransactionhistoryarchiveRepoImpl extends TransactionhistoryarchiveRepo { "modifieddate" = EXCLUDED."modifieddate" returning "transactionid", "productid", "referenceorderid", "referenceorderlineid", "transactiondate"::text, "transactiontype", "quantity", "actualcost", "modifieddate"::text""".insertReturning(using TransactionhistoryarchiveRow.jdbcDecoder) } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, TransactionhistoryarchiveRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + val created = sql"create temporary table transactionhistoryarchive_TEMP (like production.transactionhistoryarchive) on commit drop".execute + val copied = streamingInsert(s"""copy transactionhistoryarchive_TEMP("transactionid", "productid", "referenceorderid", "referenceorderlineid", "transactiondate", "transactiontype", "quantity", "actualcost", "modifieddate") from stdin""", batchSize, unsaved)(TransactionhistoryarchiveRow.text) + val merged = sql"""insert into production.transactionhistoryarchive("transactionid", "productid", "referenceorderid", "referenceorderlineid", "transactiondate", "transactiontype", "quantity", "actualcost", "modifieddate") + select * from transactionhistoryarchive_TEMP + on conflict ("transactionid") + do update set + "productid" = EXCLUDED."productid", + "referenceorderid" = EXCLUDED."referenceorderid", + "referenceorderlineid" = EXCLUDED."referenceorderlineid", + "transactiondate" = EXCLUDED."transactiondate", + "transactiontype" = EXCLUDED."transactiontype", + "quantity" = EXCLUDED."quantity", + "actualcost" = EXCLUDED."actualcost", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table transactionhistoryarchive_TEMP;""".update + created *> copied *> merged + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/transactionhistoryarchive/TransactionhistoryarchiveRepoMock.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/transactionhistoryarchive/TransactionhistoryarchiveRepoMock.scala index d7dbcae654..fd34236e63 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/transactionhistoryarchive/TransactionhistoryarchiveRepoMock.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/transactionhistoryarchive/TransactionhistoryarchiveRepoMock.scala @@ -104,4 +104,13 @@ class TransactionhistoryarchiveRepoMock(toRow: Function1[Transactionhistoryarchi UpdateResult(1, Chunk.single(unsaved)) } } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, TransactionhistoryarchiveRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + unsaved.scanZIO(0L) { case (acc, row) => + ZIO.succeed { + map += (row.transactionid -> row) + acc + 1 + } + }.runLast.map(_.getOrElse(0L)) + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/unitmeasure/UnitmeasureRepo.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/unitmeasure/UnitmeasureRepo.scala index 7de3b87451..447430e09a 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/unitmeasure/UnitmeasureRepo.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/unitmeasure/UnitmeasureRepo.scala @@ -32,4 +32,7 @@ trait UnitmeasureRepo { def update: UpdateBuilder[UnitmeasureFields, UnitmeasureRow] def update(row: UnitmeasureRow): ZIO[ZConnection, Throwable, Boolean] def upsert(unsaved: UnitmeasureRow): ZIO[ZConnection, Throwable, UpdateResult[UnitmeasureRow]] + // Not implementable for zio-jdbc: upsertBatch + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, UnitmeasureRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/unitmeasure/UnitmeasureRepoImpl.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/unitmeasure/UnitmeasureRepoImpl.scala index 885cb06da2..5698a7ff74 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/unitmeasure/UnitmeasureRepoImpl.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/unitmeasure/UnitmeasureRepoImpl.scala @@ -108,4 +108,18 @@ class UnitmeasureRepoImpl extends UnitmeasureRepo { "modifieddate" = EXCLUDED."modifieddate" returning "unitmeasurecode", "name", "modifieddate"::text""".insertReturning(using UnitmeasureRow.jdbcDecoder) } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, UnitmeasureRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + val created = sql"create temporary table unitmeasure_TEMP (like production.unitmeasure) on commit drop".execute + val copied = streamingInsert(s"""copy unitmeasure_TEMP("unitmeasurecode", "name", "modifieddate") from stdin""", batchSize, unsaved)(UnitmeasureRow.text) + val merged = sql"""insert into production.unitmeasure("unitmeasurecode", "name", "modifieddate") + select * from unitmeasure_TEMP + on conflict ("unitmeasurecode") + do update set + "name" = EXCLUDED."name", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table unitmeasure_TEMP;""".update + created *> copied *> merged + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/unitmeasure/UnitmeasureRepoMock.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/unitmeasure/UnitmeasureRepoMock.scala index ff47e0dccc..1cafad1f6e 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/unitmeasure/UnitmeasureRepoMock.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/unitmeasure/UnitmeasureRepoMock.scala @@ -104,4 +104,13 @@ class UnitmeasureRepoMock(toRow: Function1[UnitmeasureRowUnsaved, UnitmeasureRow UpdateResult(1, Chunk.single(unsaved)) } } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, UnitmeasureRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + unsaved.scanZIO(0L) { case (acc, row) => + ZIO.succeed { + map += (row.unitmeasurecode -> row) + acc + 1 + } + }.runLast.map(_.getOrElse(0L)) + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/workorder/WorkorderRepo.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/workorder/WorkorderRepo.scala index f58124a293..76e4ac767f 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/workorder/WorkorderRepo.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/workorder/WorkorderRepo.scala @@ -32,4 +32,7 @@ trait WorkorderRepo { def update: UpdateBuilder[WorkorderFields, WorkorderRow] def update(row: WorkorderRow): ZIO[ZConnection, Throwable, Boolean] def upsert(unsaved: WorkorderRow): ZIO[ZConnection, Throwable, UpdateResult[WorkorderRow]] + // Not implementable for zio-jdbc: upsertBatch + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, WorkorderRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/workorder/WorkorderRepoImpl.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/workorder/WorkorderRepoImpl.scala index ae17d35041..b3ec67bd18 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/workorder/WorkorderRepoImpl.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/workorder/WorkorderRepoImpl.scala @@ -138,4 +138,24 @@ class WorkorderRepoImpl extends WorkorderRepo { "modifieddate" = EXCLUDED."modifieddate" returning "workorderid", "productid", "orderqty", "scrappedqty", "startdate"::text, "enddate"::text, "duedate"::text, "scrapreasonid", "modifieddate"::text""".insertReturning(using WorkorderRow.jdbcDecoder) } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, WorkorderRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + val created = sql"create temporary table workorder_TEMP (like production.workorder) on commit drop".execute + val copied = streamingInsert(s"""copy workorder_TEMP("workorderid", "productid", "orderqty", "scrappedqty", "startdate", "enddate", "duedate", "scrapreasonid", "modifieddate") from stdin""", batchSize, unsaved)(WorkorderRow.text) + val merged = sql"""insert into production.workorder("workorderid", "productid", "orderqty", "scrappedqty", "startdate", "enddate", "duedate", "scrapreasonid", "modifieddate") + select * from workorder_TEMP + on conflict ("workorderid") + do update set + "productid" = EXCLUDED."productid", + "orderqty" = EXCLUDED."orderqty", + "scrappedqty" = EXCLUDED."scrappedqty", + "startdate" = EXCLUDED."startdate", + "enddate" = EXCLUDED."enddate", + "duedate" = EXCLUDED."duedate", + "scrapreasonid" = EXCLUDED."scrapreasonid", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table workorder_TEMP;""".update + created *> copied *> merged + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/workorder/WorkorderRepoMock.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/workorder/WorkorderRepoMock.scala index 5f6fb7a10a..1d0ef61f03 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/workorder/WorkorderRepoMock.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/workorder/WorkorderRepoMock.scala @@ -104,4 +104,13 @@ class WorkorderRepoMock(toRow: Function1[WorkorderRowUnsaved, WorkorderRow], UpdateResult(1, Chunk.single(unsaved)) } } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, WorkorderRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + unsaved.scanZIO(0L) { case (acc, row) => + ZIO.succeed { + map += (row.workorderid -> row) + acc + 1 + } + }.runLast.map(_.getOrElse(0L)) + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/workorderrouting/WorkorderroutingRepo.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/workorderrouting/WorkorderroutingRepo.scala index 69e4845451..738cf38d97 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/workorderrouting/WorkorderroutingRepo.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/workorderrouting/WorkorderroutingRepo.scala @@ -32,4 +32,7 @@ trait WorkorderroutingRepo { def update: UpdateBuilder[WorkorderroutingFields, WorkorderroutingRow] def update(row: WorkorderroutingRow): ZIO[ZConnection, Throwable, Boolean] def upsert(unsaved: WorkorderroutingRow): ZIO[ZConnection, Throwable, UpdateResult[WorkorderroutingRow]] + // Not implementable for zio-jdbc: upsertBatch + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, WorkorderroutingRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/workorderrouting/WorkorderroutingRepoImpl.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/workorderrouting/WorkorderroutingRepoImpl.scala index a40f78010d..664c45955e 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/workorderrouting/WorkorderroutingRepoImpl.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/workorderrouting/WorkorderroutingRepoImpl.scala @@ -159,4 +159,25 @@ class WorkorderroutingRepoImpl extends WorkorderroutingRepo { "modifieddate" = EXCLUDED."modifieddate" returning "workorderid", "productid", "operationsequence", "locationid", "scheduledstartdate"::text, "scheduledenddate"::text, "actualstartdate"::text, "actualenddate"::text, "actualresourcehrs", "plannedcost", "actualcost", "modifieddate"::text""".insertReturning(using WorkorderroutingRow.jdbcDecoder) } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, WorkorderroutingRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + val created = sql"create temporary table workorderrouting_TEMP (like production.workorderrouting) on commit drop".execute + val copied = streamingInsert(s"""copy workorderrouting_TEMP("workorderid", "productid", "operationsequence", "locationid", "scheduledstartdate", "scheduledenddate", "actualstartdate", "actualenddate", "actualresourcehrs", "plannedcost", "actualcost", "modifieddate") from stdin""", batchSize, unsaved)(WorkorderroutingRow.text) + val merged = sql"""insert into production.workorderrouting("workorderid", "productid", "operationsequence", "locationid", "scheduledstartdate", "scheduledenddate", "actualstartdate", "actualenddate", "actualresourcehrs", "plannedcost", "actualcost", "modifieddate") + select * from workorderrouting_TEMP + on conflict ("workorderid", "productid", "operationsequence") + do update set + "locationid" = EXCLUDED."locationid", + "scheduledstartdate" = EXCLUDED."scheduledstartdate", + "scheduledenddate" = EXCLUDED."scheduledenddate", + "actualstartdate" = EXCLUDED."actualstartdate", + "actualenddate" = EXCLUDED."actualenddate", + "actualresourcehrs" = EXCLUDED."actualresourcehrs", + "plannedcost" = EXCLUDED."plannedcost", + "actualcost" = EXCLUDED."actualcost", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table workorderrouting_TEMP;""".update + created *> copied *> merged + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/workorderrouting/WorkorderroutingRepoMock.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/workorderrouting/WorkorderroutingRepoMock.scala index a3d43858cc..5c7f3cc121 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/workorderrouting/WorkorderroutingRepoMock.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/production/workorderrouting/WorkorderroutingRepoMock.scala @@ -104,4 +104,13 @@ class WorkorderroutingRepoMock(toRow: Function1[WorkorderroutingRowUnsaved, Work UpdateResult(1, Chunk.single(unsaved)) } } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, WorkorderroutingRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + unsaved.scanZIO(0L) { case (acc, row) => + ZIO.succeed { + map += (row.compositeId -> row) + acc + 1 + } + }.runLast.map(_.getOrElse(0L)) + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/public/flaff/FlaffRepo.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/public/flaff/FlaffRepo.scala index f1237e8762..7163410297 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/public/flaff/FlaffRepo.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/public/flaff/FlaffRepo.scala @@ -29,4 +29,7 @@ trait FlaffRepo { def update: UpdateBuilder[FlaffFields, FlaffRow] def update(row: FlaffRow): ZIO[ZConnection, Throwable, Boolean] def upsert(unsaved: FlaffRow): ZIO[ZConnection, Throwable, UpdateResult[FlaffRow]] + // Not implementable for zio-jdbc: upsertBatch + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, FlaffRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/public/flaff/FlaffRepoImpl.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/public/flaff/FlaffRepoImpl.scala index 9d7cf7a6a7..9a3e5ad45b 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/public/flaff/FlaffRepoImpl.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/public/flaff/FlaffRepoImpl.scala @@ -97,4 +97,17 @@ class FlaffRepoImpl extends FlaffRepo { "parentspecifier" = EXCLUDED."parentspecifier" returning "code", "another_code", "some_number", "specifier", "parentspecifier"""".insertReturning(using FlaffRow.jdbcDecoder) } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, FlaffRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + val created = sql"create temporary table flaff_TEMP (like public.flaff) on commit drop".execute + val copied = streamingInsert(s"""copy flaff_TEMP("code", "another_code", "some_number", "specifier", "parentspecifier") from stdin""", batchSize, unsaved)(FlaffRow.text) + val merged = sql"""insert into public.flaff("code", "another_code", "some_number", "specifier", "parentspecifier") + select * from flaff_TEMP + on conflict ("code", "another_code", "some_number", "specifier") + do update set + "parentspecifier" = EXCLUDED."parentspecifier" + ; + drop table flaff_TEMP;""".update + created *> copied *> merged + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/public/flaff/FlaffRepoMock.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/public/flaff/FlaffRepoMock.scala index c498852dd3..4850617e18 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/public/flaff/FlaffRepoMock.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/public/flaff/FlaffRepoMock.scala @@ -90,4 +90,13 @@ class FlaffRepoMock(map: scala.collection.mutable.Map[FlaffId, FlaffRow] = scala UpdateResult(1, Chunk.single(unsaved)) } } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, FlaffRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + unsaved.scanZIO(0L) { case (acc, row) => + ZIO.succeed { + map += (row.compositeId -> row) + acc + 1 + } + }.runLast.map(_.getOrElse(0L)) + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/public/identity_test/IdentityTestRepo.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/public/identity_test/IdentityTestRepo.scala index 94abafbe0e..e5850f8850 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/public/identity_test/IdentityTestRepo.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/public/identity_test/IdentityTestRepo.scala @@ -32,4 +32,7 @@ trait IdentityTestRepo { def update: UpdateBuilder[IdentityTestFields, IdentityTestRow] def update(row: IdentityTestRow): ZIO[ZConnection, Throwable, Boolean] def upsert(unsaved: IdentityTestRow): ZIO[ZConnection, Throwable, UpdateResult[IdentityTestRow]] + // Not implementable for zio-jdbc: upsertBatch + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, IdentityTestRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/public/identity_test/IdentityTestRepoImpl.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/public/identity_test/IdentityTestRepoImpl.scala index 1d70a20260..c67824ee64 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/public/identity_test/IdentityTestRepoImpl.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/public/identity_test/IdentityTestRepoImpl.scala @@ -106,4 +106,18 @@ class IdentityTestRepoImpl extends IdentityTestRepo { "default_generated" = EXCLUDED."default_generated" returning "always_generated", "default_generated", "name"""".insertReturning(using IdentityTestRow.jdbcDecoder) } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, IdentityTestRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + val created = sql"create temporary table identity-test_TEMP (like public.identity-test) on commit drop".execute + val copied = streamingInsert(s"""copy identity-test_TEMP("always_generated", "default_generated", "name") from stdin""", batchSize, unsaved)(IdentityTestRow.text) + val merged = sql"""insert into public.identity-test("always_generated", "default_generated", "name") + select * from identity-test_TEMP + on conflict ("name") + do update set + "always_generated" = EXCLUDED."always_generated", + "default_generated" = EXCLUDED."default_generated" + ; + drop table identity-test_TEMP;""".update + created *> copied *> merged + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/public/identity_test/IdentityTestRepoMock.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/public/identity_test/IdentityTestRepoMock.scala index 885b7ceaf7..892fffec65 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/public/identity_test/IdentityTestRepoMock.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/public/identity_test/IdentityTestRepoMock.scala @@ -104,4 +104,13 @@ class IdentityTestRepoMock(toRow: Function1[IdentityTestRowUnsaved, IdentityTest UpdateResult(1, Chunk.single(unsaved)) } } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, IdentityTestRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + unsaved.scanZIO(0L) { case (acc, row) => + ZIO.succeed { + map += (row.name -> row) + acc + 1 + } + }.runLast.map(_.getOrElse(0L)) + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/public/users/UsersRepo.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/public/users/UsersRepo.scala index ff5fc2d619..79bb9246bf 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/public/users/UsersRepo.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/public/users/UsersRepo.scala @@ -34,4 +34,7 @@ trait UsersRepo { def update: UpdateBuilder[UsersFields, UsersRow] def update(row: UsersRow): ZIO[ZConnection, Throwable, Boolean] def upsert(unsaved: UsersRow): ZIO[ZConnection, Throwable, UpdateResult[UsersRow]] + // Not implementable for zio-jdbc: upsertBatch + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, UsersRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/public/users/UsersRepoImpl.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/public/users/UsersRepoImpl.scala index ca99aff9a4..e3709fd553 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/public/users/UsersRepoImpl.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/public/users/UsersRepoImpl.scala @@ -131,4 +131,22 @@ class UsersRepoImpl extends UsersRepo { "verified_on" = EXCLUDED."verified_on" returning "user_id", "name", "last_name", "email"::text, "password", "created_at"::text, "verified_on"::text""".insertReturning(using UsersRow.jdbcDecoder) } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, UsersRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + val created = sql"create temporary table users_TEMP (like public.users) on commit drop".execute + val copied = streamingInsert(s"""copy users_TEMP("user_id", "name", "last_name", "email", "password", "created_at", "verified_on") from stdin""", batchSize, unsaved)(UsersRow.text) + val merged = sql"""insert into public.users("user_id", "name", "last_name", "email", "password", "created_at", "verified_on") + select * from users_TEMP + on conflict ("user_id") + do update set + "name" = EXCLUDED."name", + "last_name" = EXCLUDED."last_name", + "email" = EXCLUDED."email", + "password" = EXCLUDED."password", + "created_at" = EXCLUDED."created_at", + "verified_on" = EXCLUDED."verified_on" + ; + drop table users_TEMP;""".update + created *> copied *> merged + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/public/users/UsersRepoMock.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/public/users/UsersRepoMock.scala index 578a0d2762..fe9fddaac1 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/public/users/UsersRepoMock.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/public/users/UsersRepoMock.scala @@ -108,4 +108,13 @@ class UsersRepoMock(toRow: Function1[UsersRowUnsaved, UsersRow], UpdateResult(1, Chunk.single(unsaved)) } } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, UsersRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + unsaved.scanZIO(0L) { case (acc, row) => + ZIO.succeed { + map += (row.userId -> row) + acc + 1 + } + }.runLast.map(_.getOrElse(0L)) + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/purchasing/productvendor/ProductvendorRepo.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/purchasing/productvendor/ProductvendorRepo.scala index 4b42e7a0f3..d3511502ea 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/purchasing/productvendor/ProductvendorRepo.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/purchasing/productvendor/ProductvendorRepo.scala @@ -32,4 +32,7 @@ trait ProductvendorRepo { def update: UpdateBuilder[ProductvendorFields, ProductvendorRow] def update(row: ProductvendorRow): ZIO[ZConnection, Throwable, Boolean] def upsert(unsaved: ProductvendorRow): ZIO[ZConnection, Throwable, UpdateResult[ProductvendorRow]] + // Not implementable for zio-jdbc: upsertBatch + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, ProductvendorRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/purchasing/productvendor/ProductvendorRepoImpl.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/purchasing/productvendor/ProductvendorRepoImpl.scala index 1963407225..ff46675a05 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/purchasing/productvendor/ProductvendorRepoImpl.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/purchasing/productvendor/ProductvendorRepoImpl.scala @@ -155,4 +155,25 @@ class ProductvendorRepoImpl extends ProductvendorRepo { "modifieddate" = EXCLUDED."modifieddate" returning "productid", "businessentityid", "averageleadtime", "standardprice", "lastreceiptcost", "lastreceiptdate"::text, "minorderqty", "maxorderqty", "onorderqty", "unitmeasurecode", "modifieddate"::text""".insertReturning(using ProductvendorRow.jdbcDecoder) } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, ProductvendorRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + val created = sql"create temporary table productvendor_TEMP (like purchasing.productvendor) on commit drop".execute + val copied = streamingInsert(s"""copy productvendor_TEMP("productid", "businessentityid", "averageleadtime", "standardprice", "lastreceiptcost", "lastreceiptdate", "minorderqty", "maxorderqty", "onorderqty", "unitmeasurecode", "modifieddate") from stdin""", batchSize, unsaved)(ProductvendorRow.text) + val merged = sql"""insert into purchasing.productvendor("productid", "businessentityid", "averageleadtime", "standardprice", "lastreceiptcost", "lastreceiptdate", "minorderqty", "maxorderqty", "onorderqty", "unitmeasurecode", "modifieddate") + select * from productvendor_TEMP + on conflict ("productid", "businessentityid") + do update set + "averageleadtime" = EXCLUDED."averageleadtime", + "standardprice" = EXCLUDED."standardprice", + "lastreceiptcost" = EXCLUDED."lastreceiptcost", + "lastreceiptdate" = EXCLUDED."lastreceiptdate", + "minorderqty" = EXCLUDED."minorderqty", + "maxorderqty" = EXCLUDED."maxorderqty", + "onorderqty" = EXCLUDED."onorderqty", + "unitmeasurecode" = EXCLUDED."unitmeasurecode", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table productvendor_TEMP;""".update + created *> copied *> merged + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/purchasing/productvendor/ProductvendorRepoMock.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/purchasing/productvendor/ProductvendorRepoMock.scala index f5aa4d8503..ca967f9444 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/purchasing/productvendor/ProductvendorRepoMock.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/purchasing/productvendor/ProductvendorRepoMock.scala @@ -104,4 +104,13 @@ class ProductvendorRepoMock(toRow: Function1[ProductvendorRowUnsaved, Productven UpdateResult(1, Chunk.single(unsaved)) } } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, ProductvendorRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + unsaved.scanZIO(0L) { case (acc, row) => + ZIO.succeed { + map += (row.compositeId -> row) + acc + 1 + } + }.runLast.map(_.getOrElse(0L)) + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/purchasing/purchaseorderheader/PurchaseorderheaderRepo.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/purchasing/purchaseorderheader/PurchaseorderheaderRepo.scala index 3e7e3ea94d..8e4f2212ba 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/purchasing/purchaseorderheader/PurchaseorderheaderRepo.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/purchasing/purchaseorderheader/PurchaseorderheaderRepo.scala @@ -32,4 +32,7 @@ trait PurchaseorderheaderRepo { def update: UpdateBuilder[PurchaseorderheaderFields, PurchaseorderheaderRow] def update(row: PurchaseorderheaderRow): ZIO[ZConnection, Throwable, Boolean] def upsert(unsaved: PurchaseorderheaderRow): ZIO[ZConnection, Throwable, UpdateResult[PurchaseorderheaderRow]] + // Not implementable for zio-jdbc: upsertBatch + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, PurchaseorderheaderRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/purchasing/purchaseorderheader/PurchaseorderheaderRepoImpl.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/purchasing/purchaseorderheader/PurchaseorderheaderRepoImpl.scala index b2a9f8cf2f..60b8ca79f5 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/purchasing/purchaseorderheader/PurchaseorderheaderRepoImpl.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/purchasing/purchaseorderheader/PurchaseorderheaderRepoImpl.scala @@ -168,4 +168,27 @@ class PurchaseorderheaderRepoImpl extends PurchaseorderheaderRepo { "modifieddate" = EXCLUDED."modifieddate" returning "purchaseorderid", "revisionnumber", "status", "employeeid", "vendorid", "shipmethodid", "orderdate"::text, "shipdate"::text, "subtotal", "taxamt", "freight", "modifieddate"::text""".insertReturning(using PurchaseorderheaderRow.jdbcDecoder) } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, PurchaseorderheaderRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + val created = sql"create temporary table purchaseorderheader_TEMP (like purchasing.purchaseorderheader) on commit drop".execute + val copied = streamingInsert(s"""copy purchaseorderheader_TEMP("purchaseorderid", "revisionnumber", "status", "employeeid", "vendorid", "shipmethodid", "orderdate", "shipdate", "subtotal", "taxamt", "freight", "modifieddate") from stdin""", batchSize, unsaved)(PurchaseorderheaderRow.text) + val merged = sql"""insert into purchasing.purchaseorderheader("purchaseorderid", "revisionnumber", "status", "employeeid", "vendorid", "shipmethodid", "orderdate", "shipdate", "subtotal", "taxamt", "freight", "modifieddate") + select * from purchaseorderheader_TEMP + on conflict ("purchaseorderid") + do update set + "revisionnumber" = EXCLUDED."revisionnumber", + "status" = EXCLUDED."status", + "employeeid" = EXCLUDED."employeeid", + "vendorid" = EXCLUDED."vendorid", + "shipmethodid" = EXCLUDED."shipmethodid", + "orderdate" = EXCLUDED."orderdate", + "shipdate" = EXCLUDED."shipdate", + "subtotal" = EXCLUDED."subtotal", + "taxamt" = EXCLUDED."taxamt", + "freight" = EXCLUDED."freight", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table purchaseorderheader_TEMP;""".update + created *> copied *> merged + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/purchasing/purchaseorderheader/PurchaseorderheaderRepoMock.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/purchasing/purchaseorderheader/PurchaseorderheaderRepoMock.scala index 6e718679a8..bad95dac99 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/purchasing/purchaseorderheader/PurchaseorderheaderRepoMock.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/purchasing/purchaseorderheader/PurchaseorderheaderRepoMock.scala @@ -104,4 +104,13 @@ class PurchaseorderheaderRepoMock(toRow: Function1[PurchaseorderheaderRowUnsaved UpdateResult(1, Chunk.single(unsaved)) } } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, PurchaseorderheaderRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + unsaved.scanZIO(0L) { case (acc, row) => + ZIO.succeed { + map += (row.purchaseorderid -> row) + acc + 1 + } + }.runLast.map(_.getOrElse(0L)) + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/purchasing/shipmethod/ShipmethodRepo.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/purchasing/shipmethod/ShipmethodRepo.scala index 981ee3cb9f..411bd4d231 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/purchasing/shipmethod/ShipmethodRepo.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/purchasing/shipmethod/ShipmethodRepo.scala @@ -32,4 +32,7 @@ trait ShipmethodRepo { def update: UpdateBuilder[ShipmethodFields, ShipmethodRow] def update(row: ShipmethodRow): ZIO[ZConnection, Throwable, Boolean] def upsert(unsaved: ShipmethodRow): ZIO[ZConnection, Throwable, UpdateResult[ShipmethodRow]] + // Not implementable for zio-jdbc: upsertBatch + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, ShipmethodRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/purchasing/shipmethod/ShipmethodRepoImpl.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/purchasing/shipmethod/ShipmethodRepoImpl.scala index 1720360ccb..d62b096797 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/purchasing/shipmethod/ShipmethodRepoImpl.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/purchasing/shipmethod/ShipmethodRepoImpl.scala @@ -134,4 +134,21 @@ class ShipmethodRepoImpl extends ShipmethodRepo { "modifieddate" = EXCLUDED."modifieddate" returning "shipmethodid", "name", "shipbase", "shiprate", "rowguid", "modifieddate"::text""".insertReturning(using ShipmethodRow.jdbcDecoder) } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, ShipmethodRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + val created = sql"create temporary table shipmethod_TEMP (like purchasing.shipmethod) on commit drop".execute + val copied = streamingInsert(s"""copy shipmethod_TEMP("shipmethodid", "name", "shipbase", "shiprate", "rowguid", "modifieddate") from stdin""", batchSize, unsaved)(ShipmethodRow.text) + val merged = sql"""insert into purchasing.shipmethod("shipmethodid", "name", "shipbase", "shiprate", "rowguid", "modifieddate") + select * from shipmethod_TEMP + on conflict ("shipmethodid") + do update set + "name" = EXCLUDED."name", + "shipbase" = EXCLUDED."shipbase", + "shiprate" = EXCLUDED."shiprate", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table shipmethod_TEMP;""".update + created *> copied *> merged + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/purchasing/shipmethod/ShipmethodRepoMock.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/purchasing/shipmethod/ShipmethodRepoMock.scala index cfcfc0d4bf..4e5493283f 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/purchasing/shipmethod/ShipmethodRepoMock.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/purchasing/shipmethod/ShipmethodRepoMock.scala @@ -104,4 +104,13 @@ class ShipmethodRepoMock(toRow: Function1[ShipmethodRowUnsaved, ShipmethodRow], UpdateResult(1, Chunk.single(unsaved)) } } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, ShipmethodRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + unsaved.scanZIO(0L) { case (acc, row) => + ZIO.succeed { + map += (row.shipmethodid -> row) + acc + 1 + } + }.runLast.map(_.getOrElse(0L)) + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/purchasing/vendor/VendorRepo.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/purchasing/vendor/VendorRepo.scala index 8c492a5d32..86e851db01 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/purchasing/vendor/VendorRepo.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/purchasing/vendor/VendorRepo.scala @@ -33,4 +33,7 @@ trait VendorRepo { def update: UpdateBuilder[VendorFields, VendorRow] def update(row: VendorRow): ZIO[ZConnection, Throwable, Boolean] def upsert(unsaved: VendorRow): ZIO[ZConnection, Throwable, UpdateResult[VendorRow]] + // Not implementable for zio-jdbc: upsertBatch + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, VendorRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/purchasing/vendor/VendorRepoImpl.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/purchasing/vendor/VendorRepoImpl.scala index 4950511a5e..170d7d7701 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/purchasing/vendor/VendorRepoImpl.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/purchasing/vendor/VendorRepoImpl.scala @@ -139,4 +139,23 @@ class VendorRepoImpl extends VendorRepo { "modifieddate" = EXCLUDED."modifieddate" returning "businessentityid", "accountnumber", "name", "creditrating", "preferredvendorstatus", "activeflag", "purchasingwebserviceurl", "modifieddate"::text""".insertReturning(using VendorRow.jdbcDecoder) } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, VendorRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + val created = sql"create temporary table vendor_TEMP (like purchasing.vendor) on commit drop".execute + val copied = streamingInsert(s"""copy vendor_TEMP("businessentityid", "accountnumber", "name", "creditrating", "preferredvendorstatus", "activeflag", "purchasingwebserviceurl", "modifieddate") from stdin""", batchSize, unsaved)(VendorRow.text) + val merged = sql"""insert into purchasing.vendor("businessentityid", "accountnumber", "name", "creditrating", "preferredvendorstatus", "activeflag", "purchasingwebserviceurl", "modifieddate") + select * from vendor_TEMP + on conflict ("businessentityid") + do update set + "accountnumber" = EXCLUDED."accountnumber", + "name" = EXCLUDED."name", + "creditrating" = EXCLUDED."creditrating", + "preferredvendorstatus" = EXCLUDED."preferredvendorstatus", + "activeflag" = EXCLUDED."activeflag", + "purchasingwebserviceurl" = EXCLUDED."purchasingwebserviceurl", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table vendor_TEMP;""".update + created *> copied *> merged + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/purchasing/vendor/VendorRepoMock.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/purchasing/vendor/VendorRepoMock.scala index 1d744413b3..f34c631b9f 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/purchasing/vendor/VendorRepoMock.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/purchasing/vendor/VendorRepoMock.scala @@ -105,4 +105,13 @@ class VendorRepoMock(toRow: Function1[VendorRowUnsaved, VendorRow], UpdateResult(1, Chunk.single(unsaved)) } } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, VendorRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + unsaved.scanZIO(0L) { case (acc, row) => + ZIO.succeed { + map += (row.businessentityid -> row) + acc + 1 + } + }.runLast.map(_.getOrElse(0L)) + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/countryregioncurrency/CountryregioncurrencyRepo.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/countryregioncurrency/CountryregioncurrencyRepo.scala index 1b00302e48..e37bcc9730 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/countryregioncurrency/CountryregioncurrencyRepo.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/countryregioncurrency/CountryregioncurrencyRepo.scala @@ -32,4 +32,7 @@ trait CountryregioncurrencyRepo { def update: UpdateBuilder[CountryregioncurrencyFields, CountryregioncurrencyRow] def update(row: CountryregioncurrencyRow): ZIO[ZConnection, Throwable, Boolean] def upsert(unsaved: CountryregioncurrencyRow): ZIO[ZConnection, Throwable, UpdateResult[CountryregioncurrencyRow]] + // Not implementable for zio-jdbc: upsertBatch + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, CountryregioncurrencyRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/countryregioncurrency/CountryregioncurrencyRepoImpl.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/countryregioncurrency/CountryregioncurrencyRepoImpl.scala index 115741d379..e86e7c213f 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/countryregioncurrency/CountryregioncurrencyRepoImpl.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/countryregioncurrency/CountryregioncurrencyRepoImpl.scala @@ -121,4 +121,17 @@ class CountryregioncurrencyRepoImpl extends CountryregioncurrencyRepo { "modifieddate" = EXCLUDED."modifieddate" returning "countryregioncode", "currencycode", "modifieddate"::text""".insertReturning(using CountryregioncurrencyRow.jdbcDecoder) } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, CountryregioncurrencyRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + val created = sql"create temporary table countryregioncurrency_TEMP (like sales.countryregioncurrency) on commit drop".execute + val copied = streamingInsert(s"""copy countryregioncurrency_TEMP("countryregioncode", "currencycode", "modifieddate") from stdin""", batchSize, unsaved)(CountryregioncurrencyRow.text) + val merged = sql"""insert into sales.countryregioncurrency("countryregioncode", "currencycode", "modifieddate") + select * from countryregioncurrency_TEMP + on conflict ("countryregioncode", "currencycode") + do update set + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table countryregioncurrency_TEMP;""".update + created *> copied *> merged + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/countryregioncurrency/CountryregioncurrencyRepoMock.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/countryregioncurrency/CountryregioncurrencyRepoMock.scala index 6cdc2736fe..a7c3a291aa 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/countryregioncurrency/CountryregioncurrencyRepoMock.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/countryregioncurrency/CountryregioncurrencyRepoMock.scala @@ -104,4 +104,13 @@ class CountryregioncurrencyRepoMock(toRow: Function1[CountryregioncurrencyRowUns UpdateResult(1, Chunk.single(unsaved)) } } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, CountryregioncurrencyRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + unsaved.scanZIO(0L) { case (acc, row) => + ZIO.succeed { + map += (row.compositeId -> row) + acc + 1 + } + }.runLast.map(_.getOrElse(0L)) + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/creditcard/CreditcardRepo.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/creditcard/CreditcardRepo.scala index c8ec9706ea..17ea390716 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/creditcard/CreditcardRepo.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/creditcard/CreditcardRepo.scala @@ -34,4 +34,7 @@ trait CreditcardRepo { def update: UpdateBuilder[CreditcardFields, CreditcardRow] def update(row: CreditcardRow): ZIO[ZConnection, Throwable, Boolean] def upsert(unsaved: CreditcardRow): ZIO[ZConnection, Throwable, UpdateResult[CreditcardRow]] + // Not implementable for zio-jdbc: upsertBatch + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, CreditcardRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/creditcard/CreditcardRepoImpl.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/creditcard/CreditcardRepoImpl.scala index 8aec1fe2ae..12bff0a192 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/creditcard/CreditcardRepoImpl.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/creditcard/CreditcardRepoImpl.scala @@ -126,4 +126,21 @@ class CreditcardRepoImpl extends CreditcardRepo { "modifieddate" = EXCLUDED."modifieddate" returning "creditcardid", "cardtype", "cardnumber", "expmonth", "expyear", "modifieddate"::text""".insertReturning(using CreditcardRow.jdbcDecoder) } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, CreditcardRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + val created = sql"create temporary table creditcard_TEMP (like sales.creditcard) on commit drop".execute + val copied = streamingInsert(s"""copy creditcard_TEMP("creditcardid", "cardtype", "cardnumber", "expmonth", "expyear", "modifieddate") from stdin""", batchSize, unsaved)(CreditcardRow.text) + val merged = sql"""insert into sales.creditcard("creditcardid", "cardtype", "cardnumber", "expmonth", "expyear", "modifieddate") + select * from creditcard_TEMP + on conflict ("creditcardid") + do update set + "cardtype" = EXCLUDED."cardtype", + "cardnumber" = EXCLUDED."cardnumber", + "expmonth" = EXCLUDED."expmonth", + "expyear" = EXCLUDED."expyear", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table creditcard_TEMP;""".update + created *> copied *> merged + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/creditcard/CreditcardRepoMock.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/creditcard/CreditcardRepoMock.scala index 291a41d1f9..b646d6f38a 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/creditcard/CreditcardRepoMock.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/creditcard/CreditcardRepoMock.scala @@ -106,4 +106,13 @@ class CreditcardRepoMock(toRow: Function1[CreditcardRowUnsaved, CreditcardRow], UpdateResult(1, Chunk.single(unsaved)) } } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, CreditcardRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + unsaved.scanZIO(0L) { case (acc, row) => + ZIO.succeed { + map += (row.creditcardid -> row) + acc + 1 + } + }.runLast.map(_.getOrElse(0L)) + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/currency/CurrencyRepo.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/currency/CurrencyRepo.scala index debac359be..bdcbfb7d20 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/currency/CurrencyRepo.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/currency/CurrencyRepo.scala @@ -32,4 +32,7 @@ trait CurrencyRepo { def update: UpdateBuilder[CurrencyFields, CurrencyRow] def update(row: CurrencyRow): ZIO[ZConnection, Throwable, Boolean] def upsert(unsaved: CurrencyRow): ZIO[ZConnection, Throwable, UpdateResult[CurrencyRow]] + // Not implementable for zio-jdbc: upsertBatch + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, CurrencyRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/currency/CurrencyRepoImpl.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/currency/CurrencyRepoImpl.scala index 9249342c24..8c7538da1c 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/currency/CurrencyRepoImpl.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/currency/CurrencyRepoImpl.scala @@ -108,4 +108,18 @@ class CurrencyRepoImpl extends CurrencyRepo { "modifieddate" = EXCLUDED."modifieddate" returning "currencycode", "name", "modifieddate"::text""".insertReturning(using CurrencyRow.jdbcDecoder) } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, CurrencyRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + val created = sql"create temporary table currency_TEMP (like sales.currency) on commit drop".execute + val copied = streamingInsert(s"""copy currency_TEMP("currencycode", "name", "modifieddate") from stdin""", batchSize, unsaved)(CurrencyRow.text) + val merged = sql"""insert into sales.currency("currencycode", "name", "modifieddate") + select * from currency_TEMP + on conflict ("currencycode") + do update set + "name" = EXCLUDED."name", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table currency_TEMP;""".update + created *> copied *> merged + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/currency/CurrencyRepoMock.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/currency/CurrencyRepoMock.scala index 1ad286085d..9424f1700a 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/currency/CurrencyRepoMock.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/currency/CurrencyRepoMock.scala @@ -104,4 +104,13 @@ class CurrencyRepoMock(toRow: Function1[CurrencyRowUnsaved, CurrencyRow], UpdateResult(1, Chunk.single(unsaved)) } } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, CurrencyRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + unsaved.scanZIO(0L) { case (acc, row) => + ZIO.succeed { + map += (row.currencycode -> row) + acc + 1 + } + }.runLast.map(_.getOrElse(0L)) + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/currencyrate/CurrencyrateRepo.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/currencyrate/CurrencyrateRepo.scala index 124c5481b6..c5b9b66b85 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/currencyrate/CurrencyrateRepo.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/currencyrate/CurrencyrateRepo.scala @@ -32,4 +32,7 @@ trait CurrencyrateRepo { def update: UpdateBuilder[CurrencyrateFields, CurrencyrateRow] def update(row: CurrencyrateRow): ZIO[ZConnection, Throwable, Boolean] def upsert(unsaved: CurrencyrateRow): ZIO[ZConnection, Throwable, UpdateResult[CurrencyrateRow]] + // Not implementable for zio-jdbc: upsertBatch + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, CurrencyrateRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/currencyrate/CurrencyrateRepoImpl.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/currencyrate/CurrencyrateRepoImpl.scala index ffbc50a108..c1c91a89a5 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/currencyrate/CurrencyrateRepoImpl.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/currencyrate/CurrencyrateRepoImpl.scala @@ -128,4 +128,22 @@ class CurrencyrateRepoImpl extends CurrencyrateRepo { "modifieddate" = EXCLUDED."modifieddate" returning "currencyrateid", "currencyratedate"::text, "fromcurrencycode", "tocurrencycode", "averagerate", "endofdayrate", "modifieddate"::text""".insertReturning(using CurrencyrateRow.jdbcDecoder) } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, CurrencyrateRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + val created = sql"create temporary table currencyrate_TEMP (like sales.currencyrate) on commit drop".execute + val copied = streamingInsert(s"""copy currencyrate_TEMP("currencyrateid", "currencyratedate", "fromcurrencycode", "tocurrencycode", "averagerate", "endofdayrate", "modifieddate") from stdin""", batchSize, unsaved)(CurrencyrateRow.text) + val merged = sql"""insert into sales.currencyrate("currencyrateid", "currencyratedate", "fromcurrencycode", "tocurrencycode", "averagerate", "endofdayrate", "modifieddate") + select * from currencyrate_TEMP + on conflict ("currencyrateid") + do update set + "currencyratedate" = EXCLUDED."currencyratedate", + "fromcurrencycode" = EXCLUDED."fromcurrencycode", + "tocurrencycode" = EXCLUDED."tocurrencycode", + "averagerate" = EXCLUDED."averagerate", + "endofdayrate" = EXCLUDED."endofdayrate", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table currencyrate_TEMP;""".update + created *> copied *> merged + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/currencyrate/CurrencyrateRepoMock.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/currencyrate/CurrencyrateRepoMock.scala index fbb47db580..3917a4ba20 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/currencyrate/CurrencyrateRepoMock.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/currencyrate/CurrencyrateRepoMock.scala @@ -104,4 +104,13 @@ class CurrencyrateRepoMock(toRow: Function1[CurrencyrateRowUnsaved, Currencyrate UpdateResult(1, Chunk.single(unsaved)) } } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, CurrencyrateRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + unsaved.scanZIO(0L) { case (acc, row) => + ZIO.succeed { + map += (row.currencyrateid -> row) + acc + 1 + } + }.runLast.map(_.getOrElse(0L)) + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/customer/CustomerRepo.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/customer/CustomerRepo.scala index 709ff71e31..11a7ffa9f9 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/customer/CustomerRepo.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/customer/CustomerRepo.scala @@ -32,4 +32,7 @@ trait CustomerRepo { def update: UpdateBuilder[CustomerFields, CustomerRow] def update(row: CustomerRow): ZIO[ZConnection, Throwable, Boolean] def upsert(unsaved: CustomerRow): ZIO[ZConnection, Throwable, UpdateResult[CustomerRow]] + // Not implementable for zio-jdbc: upsertBatch + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, CustomerRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/customer/CustomerRepoImpl.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/customer/CustomerRepoImpl.scala index 3c5be1cab5..8c6ac3c9f0 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/customer/CustomerRepoImpl.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/customer/CustomerRepoImpl.scala @@ -129,4 +129,21 @@ class CustomerRepoImpl extends CustomerRepo { "modifieddate" = EXCLUDED."modifieddate" returning "customerid", "personid", "storeid", "territoryid", "rowguid", "modifieddate"::text""".insertReturning(using CustomerRow.jdbcDecoder) } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, CustomerRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + val created = sql"create temporary table customer_TEMP (like sales.customer) on commit drop".execute + val copied = streamingInsert(s"""copy customer_TEMP("customerid", "personid", "storeid", "territoryid", "rowguid", "modifieddate") from stdin""", batchSize, unsaved)(CustomerRow.text) + val merged = sql"""insert into sales.customer("customerid", "personid", "storeid", "territoryid", "rowguid", "modifieddate") + select * from customer_TEMP + on conflict ("customerid") + do update set + "personid" = EXCLUDED."personid", + "storeid" = EXCLUDED."storeid", + "territoryid" = EXCLUDED."territoryid", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table customer_TEMP;""".update + created *> copied *> merged + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/customer/CustomerRepoMock.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/customer/CustomerRepoMock.scala index 9381b34951..076868052b 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/customer/CustomerRepoMock.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/customer/CustomerRepoMock.scala @@ -104,4 +104,13 @@ class CustomerRepoMock(toRow: Function1[CustomerRowUnsaved, CustomerRow], UpdateResult(1, Chunk.single(unsaved)) } } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, CustomerRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + unsaved.scanZIO(0L) { case (acc, row) => + ZIO.succeed { + map += (row.customerid -> row) + acc + 1 + } + }.runLast.map(_.getOrElse(0L)) + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/personcreditcard/PersoncreditcardRepo.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/personcreditcard/PersoncreditcardRepo.scala index 71c56ddf07..4176a31b86 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/personcreditcard/PersoncreditcardRepo.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/personcreditcard/PersoncreditcardRepo.scala @@ -34,4 +34,7 @@ trait PersoncreditcardRepo { def update: UpdateBuilder[PersoncreditcardFields, PersoncreditcardRow] def update(row: PersoncreditcardRow): ZIO[ZConnection, Throwable, Boolean] def upsert(unsaved: PersoncreditcardRow): ZIO[ZConnection, Throwable, UpdateResult[PersoncreditcardRow]] + // Not implementable for zio-jdbc: upsertBatch + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, PersoncreditcardRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/personcreditcard/PersoncreditcardRepoImpl.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/personcreditcard/PersoncreditcardRepoImpl.scala index 172b8a34f9..cd81bfa6fa 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/personcreditcard/PersoncreditcardRepoImpl.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/personcreditcard/PersoncreditcardRepoImpl.scala @@ -122,4 +122,17 @@ class PersoncreditcardRepoImpl extends PersoncreditcardRepo { "modifieddate" = EXCLUDED."modifieddate" returning "businessentityid", "creditcardid", "modifieddate"::text""".insertReturning(using PersoncreditcardRow.jdbcDecoder) } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, PersoncreditcardRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + val created = sql"create temporary table personcreditcard_TEMP (like sales.personcreditcard) on commit drop".execute + val copied = streamingInsert(s"""copy personcreditcard_TEMP("businessentityid", "creditcardid", "modifieddate") from stdin""", batchSize, unsaved)(PersoncreditcardRow.text) + val merged = sql"""insert into sales.personcreditcard("businessentityid", "creditcardid", "modifieddate") + select * from personcreditcard_TEMP + on conflict ("businessentityid", "creditcardid") + do update set + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table personcreditcard_TEMP;""".update + created *> copied *> merged + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/personcreditcard/PersoncreditcardRepoMock.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/personcreditcard/PersoncreditcardRepoMock.scala index 1f97ec8e5d..11aa812aa2 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/personcreditcard/PersoncreditcardRepoMock.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/personcreditcard/PersoncreditcardRepoMock.scala @@ -106,4 +106,13 @@ class PersoncreditcardRepoMock(toRow: Function1[PersoncreditcardRowUnsaved, Pers UpdateResult(1, Chunk.single(unsaved)) } } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, PersoncreditcardRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + unsaved.scanZIO(0L) { case (acc, row) => + ZIO.succeed { + map += (row.compositeId -> row) + acc + 1 + } + }.runLast.map(_.getOrElse(0L)) + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/salesorderdetail/SalesorderdetailRepo.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/salesorderdetail/SalesorderdetailRepo.scala index fcf64e94e6..5fdc24bdb8 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/salesorderdetail/SalesorderdetailRepo.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/salesorderdetail/SalesorderdetailRepo.scala @@ -32,4 +32,7 @@ trait SalesorderdetailRepo { def update: UpdateBuilder[SalesorderdetailFields, SalesorderdetailRow] def update(row: SalesorderdetailRow): ZIO[ZConnection, Throwable, Boolean] def upsert(unsaved: SalesorderdetailRow): ZIO[ZConnection, Throwable, UpdateResult[SalesorderdetailRow]] + // Not implementable for zio-jdbc: upsertBatch + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, SalesorderdetailRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/salesorderdetail/SalesorderdetailRepoImpl.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/salesorderdetail/SalesorderdetailRepoImpl.scala index 53ac8405c5..136377185d 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/salesorderdetail/SalesorderdetailRepoImpl.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/salesorderdetail/SalesorderdetailRepoImpl.scala @@ -162,4 +162,24 @@ class SalesorderdetailRepoImpl extends SalesorderdetailRepo { "modifieddate" = EXCLUDED."modifieddate" returning "salesorderid", "salesorderdetailid", "carriertrackingnumber", "orderqty", "productid", "specialofferid", "unitprice", "unitpricediscount", "rowguid", "modifieddate"::text""".insertReturning(using SalesorderdetailRow.jdbcDecoder) } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, SalesorderdetailRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + val created = sql"create temporary table salesorderdetail_TEMP (like sales.salesorderdetail) on commit drop".execute + val copied = streamingInsert(s"""copy salesorderdetail_TEMP("salesorderid", "salesorderdetailid", "carriertrackingnumber", "orderqty", "productid", "specialofferid", "unitprice", "unitpricediscount", "rowguid", "modifieddate") from stdin""", batchSize, unsaved)(SalesorderdetailRow.text) + val merged = sql"""insert into sales.salesorderdetail("salesorderid", "salesorderdetailid", "carriertrackingnumber", "orderqty", "productid", "specialofferid", "unitprice", "unitpricediscount", "rowguid", "modifieddate") + select * from salesorderdetail_TEMP + on conflict ("salesorderid", "salesorderdetailid") + do update set + "carriertrackingnumber" = EXCLUDED."carriertrackingnumber", + "orderqty" = EXCLUDED."orderqty", + "productid" = EXCLUDED."productid", + "specialofferid" = EXCLUDED."specialofferid", + "unitprice" = EXCLUDED."unitprice", + "unitpricediscount" = EXCLUDED."unitpricediscount", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table salesorderdetail_TEMP;""".update + created *> copied *> merged + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/salesorderdetail/SalesorderdetailRepoMock.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/salesorderdetail/SalesorderdetailRepoMock.scala index f3c464945b..9c0332c8d7 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/salesorderdetail/SalesorderdetailRepoMock.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/salesorderdetail/SalesorderdetailRepoMock.scala @@ -104,4 +104,13 @@ class SalesorderdetailRepoMock(toRow: Function1[SalesorderdetailRowUnsaved, Sale UpdateResult(1, Chunk.single(unsaved)) } } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, SalesorderdetailRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + unsaved.scanZIO(0L) { case (acc, row) => + ZIO.succeed { + map += (row.compositeId -> row) + acc + 1 + } + }.runLast.map(_.getOrElse(0L)) + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/salesorderheader/SalesorderheaderRepo.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/salesorderheader/SalesorderheaderRepo.scala index b4a0ba40fb..0b4b8057c6 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/salesorderheader/SalesorderheaderRepo.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/salesorderheader/SalesorderheaderRepo.scala @@ -32,4 +32,7 @@ trait SalesorderheaderRepo { def update: UpdateBuilder[SalesorderheaderFields, SalesorderheaderRow] def update(row: SalesorderheaderRow): ZIO[ZConnection, Throwable, Boolean] def upsert(unsaved: SalesorderheaderRow): ZIO[ZConnection, Throwable, UpdateResult[SalesorderheaderRow]] + // Not implementable for zio-jdbc: upsertBatch + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, SalesorderheaderRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/salesorderheader/SalesorderheaderRepoImpl.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/salesorderheader/SalesorderheaderRepoImpl.scala index 67f2d5b14a..da802bde64 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/salesorderheader/SalesorderheaderRepoImpl.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/salesorderheader/SalesorderheaderRepoImpl.scala @@ -235,4 +235,40 @@ class SalesorderheaderRepoImpl extends SalesorderheaderRepo { "modifieddate" = EXCLUDED."modifieddate" returning "salesorderid", "revisionnumber", "orderdate"::text, "duedate"::text, "shipdate"::text, "status", "onlineorderflag", "purchaseordernumber", "accountnumber", "customerid", "salespersonid", "territoryid", "billtoaddressid", "shiptoaddressid", "shipmethodid", "creditcardid", "creditcardapprovalcode", "currencyrateid", "subtotal", "taxamt", "freight", "totaldue", "comment", "rowguid", "modifieddate"::text""".insertReturning(using SalesorderheaderRow.jdbcDecoder) } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, SalesorderheaderRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + val created = sql"create temporary table salesorderheader_TEMP (like sales.salesorderheader) on commit drop".execute + val copied = streamingInsert(s"""copy salesorderheader_TEMP("salesorderid", "revisionnumber", "orderdate", "duedate", "shipdate", "status", "onlineorderflag", "purchaseordernumber", "accountnumber", "customerid", "salespersonid", "territoryid", "billtoaddressid", "shiptoaddressid", "shipmethodid", "creditcardid", "creditcardapprovalcode", "currencyrateid", "subtotal", "taxamt", "freight", "totaldue", "comment", "rowguid", "modifieddate") from stdin""", batchSize, unsaved)(SalesorderheaderRow.text) + val merged = sql"""insert into sales.salesorderheader("salesorderid", "revisionnumber", "orderdate", "duedate", "shipdate", "status", "onlineorderflag", "purchaseordernumber", "accountnumber", "customerid", "salespersonid", "territoryid", "billtoaddressid", "shiptoaddressid", "shipmethodid", "creditcardid", "creditcardapprovalcode", "currencyrateid", "subtotal", "taxamt", "freight", "totaldue", "comment", "rowguid", "modifieddate") + select * from salesorderheader_TEMP + on conflict ("salesorderid") + do update set + "revisionnumber" = EXCLUDED."revisionnumber", + "orderdate" = EXCLUDED."orderdate", + "duedate" = EXCLUDED."duedate", + "shipdate" = EXCLUDED."shipdate", + "status" = EXCLUDED."status", + "onlineorderflag" = EXCLUDED."onlineorderflag", + "purchaseordernumber" = EXCLUDED."purchaseordernumber", + "accountnumber" = EXCLUDED."accountnumber", + "customerid" = EXCLUDED."customerid", + "salespersonid" = EXCLUDED."salespersonid", + "territoryid" = EXCLUDED."territoryid", + "billtoaddressid" = EXCLUDED."billtoaddressid", + "shiptoaddressid" = EXCLUDED."shiptoaddressid", + "shipmethodid" = EXCLUDED."shipmethodid", + "creditcardid" = EXCLUDED."creditcardid", + "creditcardapprovalcode" = EXCLUDED."creditcardapprovalcode", + "currencyrateid" = EXCLUDED."currencyrateid", + "subtotal" = EXCLUDED."subtotal", + "taxamt" = EXCLUDED."taxamt", + "freight" = EXCLUDED."freight", + "totaldue" = EXCLUDED."totaldue", + "comment" = EXCLUDED."comment", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table salesorderheader_TEMP;""".update + created *> copied *> merged + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/salesorderheader/SalesorderheaderRepoMock.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/salesorderheader/SalesorderheaderRepoMock.scala index ac01ebbca0..78c2984205 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/salesorderheader/SalesorderheaderRepoMock.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/salesorderheader/SalesorderheaderRepoMock.scala @@ -104,4 +104,13 @@ class SalesorderheaderRepoMock(toRow: Function1[SalesorderheaderRowUnsaved, Sale UpdateResult(1, Chunk.single(unsaved)) } } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, SalesorderheaderRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + unsaved.scanZIO(0L) { case (acc, row) => + ZIO.succeed { + map += (row.salesorderid -> row) + acc + 1 + } + }.runLast.map(_.getOrElse(0L)) + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/salesorderheadersalesreason/SalesorderheadersalesreasonRepo.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/salesorderheadersalesreason/SalesorderheadersalesreasonRepo.scala index 0726ebe73e..32536d2d51 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/salesorderheadersalesreason/SalesorderheadersalesreasonRepo.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/salesorderheadersalesreason/SalesorderheadersalesreasonRepo.scala @@ -32,4 +32,7 @@ trait SalesorderheadersalesreasonRepo { def update: UpdateBuilder[SalesorderheadersalesreasonFields, SalesorderheadersalesreasonRow] def update(row: SalesorderheadersalesreasonRow): ZIO[ZConnection, Throwable, Boolean] def upsert(unsaved: SalesorderheadersalesreasonRow): ZIO[ZConnection, Throwable, UpdateResult[SalesorderheadersalesreasonRow]] + // Not implementable for zio-jdbc: upsertBatch + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, SalesorderheadersalesreasonRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/salesorderheadersalesreason/SalesorderheadersalesreasonRepoImpl.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/salesorderheadersalesreason/SalesorderheadersalesreasonRepoImpl.scala index 49e765c06f..6ccef607af 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/salesorderheadersalesreason/SalesorderheadersalesreasonRepoImpl.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/salesorderheadersalesreason/SalesorderheadersalesreasonRepoImpl.scala @@ -121,4 +121,17 @@ class SalesorderheadersalesreasonRepoImpl extends SalesorderheadersalesreasonRep "modifieddate" = EXCLUDED."modifieddate" returning "salesorderid", "salesreasonid", "modifieddate"::text""".insertReturning(using SalesorderheadersalesreasonRow.jdbcDecoder) } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, SalesorderheadersalesreasonRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + val created = sql"create temporary table salesorderheadersalesreason_TEMP (like sales.salesorderheadersalesreason) on commit drop".execute + val copied = streamingInsert(s"""copy salesorderheadersalesreason_TEMP("salesorderid", "salesreasonid", "modifieddate") from stdin""", batchSize, unsaved)(SalesorderheadersalesreasonRow.text) + val merged = sql"""insert into sales.salesorderheadersalesreason("salesorderid", "salesreasonid", "modifieddate") + select * from salesorderheadersalesreason_TEMP + on conflict ("salesorderid", "salesreasonid") + do update set + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table salesorderheadersalesreason_TEMP;""".update + created *> copied *> merged + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/salesorderheadersalesreason/SalesorderheadersalesreasonRepoMock.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/salesorderheadersalesreason/SalesorderheadersalesreasonRepoMock.scala index d3eed73955..80540c704c 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/salesorderheadersalesreason/SalesorderheadersalesreasonRepoMock.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/salesorderheadersalesreason/SalesorderheadersalesreasonRepoMock.scala @@ -104,4 +104,13 @@ class SalesorderheadersalesreasonRepoMock(toRow: Function1[Salesorderheadersales UpdateResult(1, Chunk.single(unsaved)) } } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, SalesorderheadersalesreasonRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + unsaved.scanZIO(0L) { case (acc, row) => + ZIO.succeed { + map += (row.compositeId -> row) + acc + 1 + } + }.runLast.map(_.getOrElse(0L)) + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/salesperson/SalespersonRepo.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/salesperson/SalespersonRepo.scala index c4aa0d1150..8befdd62ba 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/salesperson/SalespersonRepo.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/salesperson/SalespersonRepo.scala @@ -33,4 +33,7 @@ trait SalespersonRepo { def update: UpdateBuilder[SalespersonFields, SalespersonRow] def update(row: SalespersonRow): ZIO[ZConnection, Throwable, Boolean] def upsert(unsaved: SalespersonRow): ZIO[ZConnection, Throwable, UpdateResult[SalespersonRow]] + // Not implementable for zio-jdbc: upsertBatch + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, SalespersonRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/salesperson/SalespersonRepoImpl.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/salesperson/SalespersonRepoImpl.scala index 8d50e89764..ada8243561 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/salesperson/SalespersonRepoImpl.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/salesperson/SalespersonRepoImpl.scala @@ -150,4 +150,24 @@ class SalespersonRepoImpl extends SalespersonRepo { "modifieddate" = EXCLUDED."modifieddate" returning "businessentityid", "territoryid", "salesquota", "bonus", "commissionpct", "salesytd", "saleslastyear", "rowguid", "modifieddate"::text""".insertReturning(using SalespersonRow.jdbcDecoder) } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, SalespersonRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + val created = sql"create temporary table salesperson_TEMP (like sales.salesperson) on commit drop".execute + val copied = streamingInsert(s"""copy salesperson_TEMP("businessentityid", "territoryid", "salesquota", "bonus", "commissionpct", "salesytd", "saleslastyear", "rowguid", "modifieddate") from stdin""", batchSize, unsaved)(SalespersonRow.text) + val merged = sql"""insert into sales.salesperson("businessentityid", "territoryid", "salesquota", "bonus", "commissionpct", "salesytd", "saleslastyear", "rowguid", "modifieddate") + select * from salesperson_TEMP + on conflict ("businessentityid") + do update set + "territoryid" = EXCLUDED."territoryid", + "salesquota" = EXCLUDED."salesquota", + "bonus" = EXCLUDED."bonus", + "commissionpct" = EXCLUDED."commissionpct", + "salesytd" = EXCLUDED."salesytd", + "saleslastyear" = EXCLUDED."saleslastyear", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table salesperson_TEMP;""".update + created *> copied *> merged + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/salesperson/SalespersonRepoMock.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/salesperson/SalespersonRepoMock.scala index f7c3cf1da6..2e5f55c4fc 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/salesperson/SalespersonRepoMock.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/salesperson/SalespersonRepoMock.scala @@ -105,4 +105,13 @@ class SalespersonRepoMock(toRow: Function1[SalespersonRowUnsaved, SalespersonRow UpdateResult(1, Chunk.single(unsaved)) } } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, SalespersonRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + unsaved.scanZIO(0L) { case (acc, row) => + ZIO.succeed { + map += (row.businessentityid -> row) + acc + 1 + } + }.runLast.map(_.getOrElse(0L)) + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/salespersonquotahistory/SalespersonquotahistoryRepo.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/salespersonquotahistory/SalespersonquotahistoryRepo.scala index 19ec5ee403..c4595a39f5 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/salespersonquotahistory/SalespersonquotahistoryRepo.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/salespersonquotahistory/SalespersonquotahistoryRepo.scala @@ -32,4 +32,7 @@ trait SalespersonquotahistoryRepo { def update: UpdateBuilder[SalespersonquotahistoryFields, SalespersonquotahistoryRow] def update(row: SalespersonquotahistoryRow): ZIO[ZConnection, Throwable, Boolean] def upsert(unsaved: SalespersonquotahistoryRow): ZIO[ZConnection, Throwable, UpdateResult[SalespersonquotahistoryRow]] + // Not implementable for zio-jdbc: upsertBatch + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, SalespersonquotahistoryRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/salespersonquotahistory/SalespersonquotahistoryRepoImpl.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/salespersonquotahistory/SalespersonquotahistoryRepoImpl.scala index b6c9bc46f0..b2da1492a5 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/salespersonquotahistory/SalespersonquotahistoryRepoImpl.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/salespersonquotahistory/SalespersonquotahistoryRepoImpl.scala @@ -133,4 +133,19 @@ class SalespersonquotahistoryRepoImpl extends SalespersonquotahistoryRepo { "modifieddate" = EXCLUDED."modifieddate" returning "businessentityid", "quotadate"::text, "salesquota", "rowguid", "modifieddate"::text""".insertReturning(using SalespersonquotahistoryRow.jdbcDecoder) } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, SalespersonquotahistoryRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + val created = sql"create temporary table salespersonquotahistory_TEMP (like sales.salespersonquotahistory) on commit drop".execute + val copied = streamingInsert(s"""copy salespersonquotahistory_TEMP("businessentityid", "quotadate", "salesquota", "rowguid", "modifieddate") from stdin""", batchSize, unsaved)(SalespersonquotahistoryRow.text) + val merged = sql"""insert into sales.salespersonquotahistory("businessentityid", "quotadate", "salesquota", "rowguid", "modifieddate") + select * from salespersonquotahistory_TEMP + on conflict ("businessentityid", "quotadate") + do update set + "salesquota" = EXCLUDED."salesquota", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table salespersonquotahistory_TEMP;""".update + created *> copied *> merged + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/salespersonquotahistory/SalespersonquotahistoryRepoMock.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/salespersonquotahistory/SalespersonquotahistoryRepoMock.scala index 6db7161aee..b7e29ce5a7 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/salespersonquotahistory/SalespersonquotahistoryRepoMock.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/salespersonquotahistory/SalespersonquotahistoryRepoMock.scala @@ -104,4 +104,13 @@ class SalespersonquotahistoryRepoMock(toRow: Function1[SalespersonquotahistoryRo UpdateResult(1, Chunk.single(unsaved)) } } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, SalespersonquotahistoryRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + unsaved.scanZIO(0L) { case (acc, row) => + ZIO.succeed { + map += (row.compositeId -> row) + acc + 1 + } + }.runLast.map(_.getOrElse(0L)) + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/salesreason/SalesreasonRepo.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/salesreason/SalesreasonRepo.scala index 670156811e..400898fbdb 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/salesreason/SalesreasonRepo.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/salesreason/SalesreasonRepo.scala @@ -32,4 +32,7 @@ trait SalesreasonRepo { def update: UpdateBuilder[SalesreasonFields, SalesreasonRow] def update(row: SalesreasonRow): ZIO[ZConnection, Throwable, Boolean] def upsert(unsaved: SalesreasonRow): ZIO[ZConnection, Throwable, UpdateResult[SalesreasonRow]] + // Not implementable for zio-jdbc: upsertBatch + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, SalesreasonRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/salesreason/SalesreasonRepoImpl.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/salesreason/SalesreasonRepoImpl.scala index 52076ec057..2a843f8dde 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/salesreason/SalesreasonRepoImpl.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/salesreason/SalesreasonRepoImpl.scala @@ -115,4 +115,19 @@ class SalesreasonRepoImpl extends SalesreasonRepo { "modifieddate" = EXCLUDED."modifieddate" returning "salesreasonid", "name", "reasontype", "modifieddate"::text""".insertReturning(using SalesreasonRow.jdbcDecoder) } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, SalesreasonRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + val created = sql"create temporary table salesreason_TEMP (like sales.salesreason) on commit drop".execute + val copied = streamingInsert(s"""copy salesreason_TEMP("salesreasonid", "name", "reasontype", "modifieddate") from stdin""", batchSize, unsaved)(SalesreasonRow.text) + val merged = sql"""insert into sales.salesreason("salesreasonid", "name", "reasontype", "modifieddate") + select * from salesreason_TEMP + on conflict ("salesreasonid") + do update set + "name" = EXCLUDED."name", + "reasontype" = EXCLUDED."reasontype", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table salesreason_TEMP;""".update + created *> copied *> merged + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/salesreason/SalesreasonRepoMock.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/salesreason/SalesreasonRepoMock.scala index f22e606f0b..0017e7b026 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/salesreason/SalesreasonRepoMock.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/salesreason/SalesreasonRepoMock.scala @@ -104,4 +104,13 @@ class SalesreasonRepoMock(toRow: Function1[SalesreasonRowUnsaved, SalesreasonRow UpdateResult(1, Chunk.single(unsaved)) } } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, SalesreasonRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + unsaved.scanZIO(0L) { case (acc, row) => + ZIO.succeed { + map += (row.salesreasonid -> row) + acc + 1 + } + }.runLast.map(_.getOrElse(0L)) + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/salestaxrate/SalestaxrateRepo.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/salestaxrate/SalestaxrateRepo.scala index 89c8a059f1..1f3c781ed4 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/salestaxrate/SalestaxrateRepo.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/salestaxrate/SalestaxrateRepo.scala @@ -32,4 +32,7 @@ trait SalestaxrateRepo { def update: UpdateBuilder[SalestaxrateFields, SalestaxrateRow] def update(row: SalestaxrateRow): ZIO[ZConnection, Throwable, Boolean] def upsert(unsaved: SalestaxrateRow): ZIO[ZConnection, Throwable, UpdateResult[SalestaxrateRow]] + // Not implementable for zio-jdbc: upsertBatch + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, SalestaxrateRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/salestaxrate/SalestaxrateRepoImpl.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/salestaxrate/SalestaxrateRepoImpl.scala index a4a4e9c7bd..7ec348ba4e 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/salestaxrate/SalestaxrateRepoImpl.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/salestaxrate/SalestaxrateRepoImpl.scala @@ -137,4 +137,22 @@ class SalestaxrateRepoImpl extends SalestaxrateRepo { "modifieddate" = EXCLUDED."modifieddate" returning "salestaxrateid", "stateprovinceid", "taxtype", "taxrate", "name", "rowguid", "modifieddate"::text""".insertReturning(using SalestaxrateRow.jdbcDecoder) } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, SalestaxrateRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + val created = sql"create temporary table salestaxrate_TEMP (like sales.salestaxrate) on commit drop".execute + val copied = streamingInsert(s"""copy salestaxrate_TEMP("salestaxrateid", "stateprovinceid", "taxtype", "taxrate", "name", "rowguid", "modifieddate") from stdin""", batchSize, unsaved)(SalestaxrateRow.text) + val merged = sql"""insert into sales.salestaxrate("salestaxrateid", "stateprovinceid", "taxtype", "taxrate", "name", "rowguid", "modifieddate") + select * from salestaxrate_TEMP + on conflict ("salestaxrateid") + do update set + "stateprovinceid" = EXCLUDED."stateprovinceid", + "taxtype" = EXCLUDED."taxtype", + "taxrate" = EXCLUDED."taxrate", + "name" = EXCLUDED."name", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table salestaxrate_TEMP;""".update + created *> copied *> merged + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/salestaxrate/SalestaxrateRepoMock.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/salestaxrate/SalestaxrateRepoMock.scala index 2ff8b8c5a2..fbc7dc857e 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/salestaxrate/SalestaxrateRepoMock.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/salestaxrate/SalestaxrateRepoMock.scala @@ -104,4 +104,13 @@ class SalestaxrateRepoMock(toRow: Function1[SalestaxrateRowUnsaved, Salestaxrate UpdateResult(1, Chunk.single(unsaved)) } } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, SalestaxrateRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + unsaved.scanZIO(0L) { case (acc, row) => + ZIO.succeed { + map += (row.salestaxrateid -> row) + acc + 1 + } + }.runLast.map(_.getOrElse(0L)) + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/salesterritory/SalesterritoryRepo.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/salesterritory/SalesterritoryRepo.scala index 10c0ccfe63..8386e8d03e 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/salesterritory/SalesterritoryRepo.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/salesterritory/SalesterritoryRepo.scala @@ -32,4 +32,7 @@ trait SalesterritoryRepo { def update: UpdateBuilder[SalesterritoryFields, SalesterritoryRow] def update(row: SalesterritoryRow): ZIO[ZConnection, Throwable, Boolean] def upsert(unsaved: SalesterritoryRow): ZIO[ZConnection, Throwable, UpdateResult[SalesterritoryRow]] + // Not implementable for zio-jdbc: upsertBatch + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, SalesterritoryRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/salesterritory/SalesterritoryRepoImpl.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/salesterritory/SalesterritoryRepoImpl.scala index e7540f5d7c..931da224c3 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/salesterritory/SalesterritoryRepoImpl.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/salesterritory/SalesterritoryRepoImpl.scala @@ -157,4 +157,25 @@ class SalesterritoryRepoImpl extends SalesterritoryRepo { "modifieddate" = EXCLUDED."modifieddate" returning "territoryid", "name", "countryregioncode", "group", "salesytd", "saleslastyear", "costytd", "costlastyear", "rowguid", "modifieddate"::text""".insertReturning(using SalesterritoryRow.jdbcDecoder) } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, SalesterritoryRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + val created = sql"create temporary table salesterritory_TEMP (like sales.salesterritory) on commit drop".execute + val copied = streamingInsert(s"""copy salesterritory_TEMP("territoryid", "name", "countryregioncode", "group", "salesytd", "saleslastyear", "costytd", "costlastyear", "rowguid", "modifieddate") from stdin""", batchSize, unsaved)(SalesterritoryRow.text) + val merged = sql"""insert into sales.salesterritory("territoryid", "name", "countryregioncode", "group", "salesytd", "saleslastyear", "costytd", "costlastyear", "rowguid", "modifieddate") + select * from salesterritory_TEMP + on conflict ("territoryid") + do update set + "name" = EXCLUDED."name", + "countryregioncode" = EXCLUDED."countryregioncode", + "group" = EXCLUDED."group", + "salesytd" = EXCLUDED."salesytd", + "saleslastyear" = EXCLUDED."saleslastyear", + "costytd" = EXCLUDED."costytd", + "costlastyear" = EXCLUDED."costlastyear", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table salesterritory_TEMP;""".update + created *> copied *> merged + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/salesterritory/SalesterritoryRepoMock.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/salesterritory/SalesterritoryRepoMock.scala index 43cef8c347..52c4921243 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/salesterritory/SalesterritoryRepoMock.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/salesterritory/SalesterritoryRepoMock.scala @@ -104,4 +104,13 @@ class SalesterritoryRepoMock(toRow: Function1[SalesterritoryRowUnsaved, Salester UpdateResult(1, Chunk.single(unsaved)) } } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, SalesterritoryRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + unsaved.scanZIO(0L) { case (acc, row) => + ZIO.succeed { + map += (row.territoryid -> row) + acc + 1 + } + }.runLast.map(_.getOrElse(0L)) + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/salesterritoryhistory/SalesterritoryhistoryRepo.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/salesterritoryhistory/SalesterritoryhistoryRepo.scala index 11e7a642ec..8026964c2d 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/salesterritoryhistory/SalesterritoryhistoryRepo.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/salesterritoryhistory/SalesterritoryhistoryRepo.scala @@ -32,4 +32,7 @@ trait SalesterritoryhistoryRepo { def update: UpdateBuilder[SalesterritoryhistoryFields, SalesterritoryhistoryRow] def update(row: SalesterritoryhistoryRow): ZIO[ZConnection, Throwable, Boolean] def upsert(unsaved: SalesterritoryhistoryRow): ZIO[ZConnection, Throwable, UpdateResult[SalesterritoryhistoryRow]] + // Not implementable for zio-jdbc: upsertBatch + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, SalesterritoryhistoryRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/salesterritoryhistory/SalesterritoryhistoryRepoImpl.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/salesterritoryhistory/SalesterritoryhistoryRepoImpl.scala index 4fc126ce8e..6ca7bd632c 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/salesterritoryhistory/SalesterritoryhistoryRepoImpl.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/salesterritoryhistory/SalesterritoryhistoryRepoImpl.scala @@ -138,4 +138,19 @@ class SalesterritoryhistoryRepoImpl extends SalesterritoryhistoryRepo { "modifieddate" = EXCLUDED."modifieddate" returning "businessentityid", "territoryid", "startdate"::text, "enddate"::text, "rowguid", "modifieddate"::text""".insertReturning(using SalesterritoryhistoryRow.jdbcDecoder) } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, SalesterritoryhistoryRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + val created = sql"create temporary table salesterritoryhistory_TEMP (like sales.salesterritoryhistory) on commit drop".execute + val copied = streamingInsert(s"""copy salesterritoryhistory_TEMP("businessentityid", "territoryid", "startdate", "enddate", "rowguid", "modifieddate") from stdin""", batchSize, unsaved)(SalesterritoryhistoryRow.text) + val merged = sql"""insert into sales.salesterritoryhistory("businessentityid", "territoryid", "startdate", "enddate", "rowguid", "modifieddate") + select * from salesterritoryhistory_TEMP + on conflict ("businessentityid", "startdate", "territoryid") + do update set + "enddate" = EXCLUDED."enddate", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table salesterritoryhistory_TEMP;""".update + created *> copied *> merged + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/salesterritoryhistory/SalesterritoryhistoryRepoMock.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/salesterritoryhistory/SalesterritoryhistoryRepoMock.scala index d8b9113f06..1f5d7520dd 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/salesterritoryhistory/SalesterritoryhistoryRepoMock.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/salesterritoryhistory/SalesterritoryhistoryRepoMock.scala @@ -104,4 +104,13 @@ class SalesterritoryhistoryRepoMock(toRow: Function1[SalesterritoryhistoryRowUns UpdateResult(1, Chunk.single(unsaved)) } } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, SalesterritoryhistoryRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + unsaved.scanZIO(0L) { case (acc, row) => + ZIO.succeed { + map += (row.compositeId -> row) + acc + 1 + } + }.runLast.map(_.getOrElse(0L)) + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/shoppingcartitem/ShoppingcartitemRepo.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/shoppingcartitem/ShoppingcartitemRepo.scala index fa18348b75..aae757ab3d 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/shoppingcartitem/ShoppingcartitemRepo.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/shoppingcartitem/ShoppingcartitemRepo.scala @@ -32,4 +32,7 @@ trait ShoppingcartitemRepo { def update: UpdateBuilder[ShoppingcartitemFields, ShoppingcartitemRow] def update(row: ShoppingcartitemRow): ZIO[ZConnection, Throwable, Boolean] def upsert(unsaved: ShoppingcartitemRow): ZIO[ZConnection, Throwable, UpdateResult[ShoppingcartitemRow]] + // Not implementable for zio-jdbc: upsertBatch + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, ShoppingcartitemRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/shoppingcartitem/ShoppingcartitemRepoImpl.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/shoppingcartitem/ShoppingcartitemRepoImpl.scala index 13d8b9ddf2..95b39e32b3 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/shoppingcartitem/ShoppingcartitemRepoImpl.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/shoppingcartitem/ShoppingcartitemRepoImpl.scala @@ -130,4 +130,21 @@ class ShoppingcartitemRepoImpl extends ShoppingcartitemRepo { "modifieddate" = EXCLUDED."modifieddate" returning "shoppingcartitemid", "shoppingcartid", "quantity", "productid", "datecreated"::text, "modifieddate"::text""".insertReturning(using ShoppingcartitemRow.jdbcDecoder) } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, ShoppingcartitemRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + val created = sql"create temporary table shoppingcartitem_TEMP (like sales.shoppingcartitem) on commit drop".execute + val copied = streamingInsert(s"""copy shoppingcartitem_TEMP("shoppingcartitemid", "shoppingcartid", "quantity", "productid", "datecreated", "modifieddate") from stdin""", batchSize, unsaved)(ShoppingcartitemRow.text) + val merged = sql"""insert into sales.shoppingcartitem("shoppingcartitemid", "shoppingcartid", "quantity", "productid", "datecreated", "modifieddate") + select * from shoppingcartitem_TEMP + on conflict ("shoppingcartitemid") + do update set + "shoppingcartid" = EXCLUDED."shoppingcartid", + "quantity" = EXCLUDED."quantity", + "productid" = EXCLUDED."productid", + "datecreated" = EXCLUDED."datecreated", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table shoppingcartitem_TEMP;""".update + created *> copied *> merged + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/shoppingcartitem/ShoppingcartitemRepoMock.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/shoppingcartitem/ShoppingcartitemRepoMock.scala index a33d37745f..f23e815f7e 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/shoppingcartitem/ShoppingcartitemRepoMock.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/shoppingcartitem/ShoppingcartitemRepoMock.scala @@ -104,4 +104,13 @@ class ShoppingcartitemRepoMock(toRow: Function1[ShoppingcartitemRowUnsaved, Shop UpdateResult(1, Chunk.single(unsaved)) } } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, ShoppingcartitemRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + unsaved.scanZIO(0L) { case (acc, row) => + ZIO.succeed { + map += (row.shoppingcartitemid -> row) + acc + 1 + } + }.runLast.map(_.getOrElse(0L)) + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/specialoffer/SpecialofferRepo.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/specialoffer/SpecialofferRepo.scala index 00d3b8c63c..214ab73d9b 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/specialoffer/SpecialofferRepo.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/specialoffer/SpecialofferRepo.scala @@ -32,4 +32,7 @@ trait SpecialofferRepo { def update: UpdateBuilder[SpecialofferFields, SpecialofferRow] def update(row: SpecialofferRow): ZIO[ZConnection, Throwable, Boolean] def upsert(unsaved: SpecialofferRow): ZIO[ZConnection, Throwable, UpdateResult[SpecialofferRow]] + // Not implementable for zio-jdbc: upsertBatch + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, SpecialofferRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/specialoffer/SpecialofferRepoImpl.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/specialoffer/SpecialofferRepoImpl.scala index 55399e98bf..8e855342a0 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/specialoffer/SpecialofferRepoImpl.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/specialoffer/SpecialofferRepoImpl.scala @@ -153,4 +153,26 @@ class SpecialofferRepoImpl extends SpecialofferRepo { "modifieddate" = EXCLUDED."modifieddate" returning "specialofferid", "description", "discountpct", "type", "category", "startdate"::text, "enddate"::text, "minqty", "maxqty", "rowguid", "modifieddate"::text""".insertReturning(using SpecialofferRow.jdbcDecoder) } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, SpecialofferRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + val created = sql"create temporary table specialoffer_TEMP (like sales.specialoffer) on commit drop".execute + val copied = streamingInsert(s"""copy specialoffer_TEMP("specialofferid", "description", "discountpct", "type", "category", "startdate", "enddate", "minqty", "maxqty", "rowguid", "modifieddate") from stdin""", batchSize, unsaved)(SpecialofferRow.text) + val merged = sql"""insert into sales.specialoffer("specialofferid", "description", "discountpct", "type", "category", "startdate", "enddate", "minqty", "maxqty", "rowguid", "modifieddate") + select * from specialoffer_TEMP + on conflict ("specialofferid") + do update set + "description" = EXCLUDED."description", + "discountpct" = EXCLUDED."discountpct", + "type" = EXCLUDED."type", + "category" = EXCLUDED."category", + "startdate" = EXCLUDED."startdate", + "enddate" = EXCLUDED."enddate", + "minqty" = EXCLUDED."minqty", + "maxqty" = EXCLUDED."maxqty", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table specialoffer_TEMP;""".update + created *> copied *> merged + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/specialoffer/SpecialofferRepoMock.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/specialoffer/SpecialofferRepoMock.scala index 3977a2f2ed..72ee503d8b 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/specialoffer/SpecialofferRepoMock.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/specialoffer/SpecialofferRepoMock.scala @@ -104,4 +104,13 @@ class SpecialofferRepoMock(toRow: Function1[SpecialofferRowUnsaved, Specialoffer UpdateResult(1, Chunk.single(unsaved)) } } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, SpecialofferRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + unsaved.scanZIO(0L) { case (acc, row) => + ZIO.succeed { + map += (row.specialofferid -> row) + acc + 1 + } + }.runLast.map(_.getOrElse(0L)) + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/specialofferproduct/SpecialofferproductRepo.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/specialofferproduct/SpecialofferproductRepo.scala index 816c9302df..e871aa2268 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/specialofferproduct/SpecialofferproductRepo.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/specialofferproduct/SpecialofferproductRepo.scala @@ -32,4 +32,7 @@ trait SpecialofferproductRepo { def update: UpdateBuilder[SpecialofferproductFields, SpecialofferproductRow] def update(row: SpecialofferproductRow): ZIO[ZConnection, Throwable, Boolean] def upsert(unsaved: SpecialofferproductRow): ZIO[ZConnection, Throwable, UpdateResult[SpecialofferproductRow]] + // Not implementable for zio-jdbc: upsertBatch + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, SpecialofferproductRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/specialofferproduct/SpecialofferproductRepoImpl.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/specialofferproduct/SpecialofferproductRepoImpl.scala index e6ea351d9f..07b54ae9d3 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/specialofferproduct/SpecialofferproductRepoImpl.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/specialofferproduct/SpecialofferproductRepoImpl.scala @@ -129,4 +129,18 @@ class SpecialofferproductRepoImpl extends SpecialofferproductRepo { "modifieddate" = EXCLUDED."modifieddate" returning "specialofferid", "productid", "rowguid", "modifieddate"::text""".insertReturning(using SpecialofferproductRow.jdbcDecoder) } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, SpecialofferproductRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + val created = sql"create temporary table specialofferproduct_TEMP (like sales.specialofferproduct) on commit drop".execute + val copied = streamingInsert(s"""copy specialofferproduct_TEMP("specialofferid", "productid", "rowguid", "modifieddate") from stdin""", batchSize, unsaved)(SpecialofferproductRow.text) + val merged = sql"""insert into sales.specialofferproduct("specialofferid", "productid", "rowguid", "modifieddate") + select * from specialofferproduct_TEMP + on conflict ("specialofferid", "productid") + do update set + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table specialofferproduct_TEMP;""".update + created *> copied *> merged + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/specialofferproduct/SpecialofferproductRepoMock.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/specialofferproduct/SpecialofferproductRepoMock.scala index 8f1c036d53..e0398a13f6 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/specialofferproduct/SpecialofferproductRepoMock.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/specialofferproduct/SpecialofferproductRepoMock.scala @@ -104,4 +104,13 @@ class SpecialofferproductRepoMock(toRow: Function1[SpecialofferproductRowUnsaved UpdateResult(1, Chunk.single(unsaved)) } } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, SpecialofferproductRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + unsaved.scanZIO(0L) { case (acc, row) => + ZIO.succeed { + map += (row.compositeId -> row) + acc + 1 + } + }.runLast.map(_.getOrElse(0L)) + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/store/StoreRepo.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/store/StoreRepo.scala index 1585505442..36ec748ee0 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/store/StoreRepo.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/store/StoreRepo.scala @@ -33,4 +33,7 @@ trait StoreRepo { def update: UpdateBuilder[StoreFields, StoreRow] def update(row: StoreRow): ZIO[ZConnection, Throwable, Boolean] def upsert(unsaved: StoreRow): ZIO[ZConnection, Throwable, UpdateResult[StoreRow]] + // Not implementable for zio-jdbc: upsertBatch + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, StoreRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/store/StoreRepoImpl.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/store/StoreRepoImpl.scala index f6929c1341..ee54b94402 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/store/StoreRepoImpl.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/store/StoreRepoImpl.scala @@ -127,4 +127,21 @@ class StoreRepoImpl extends StoreRepo { "modifieddate" = EXCLUDED."modifieddate" returning "businessentityid", "name", "salespersonid", "demographics", "rowguid", "modifieddate"::text""".insertReturning(using StoreRow.jdbcDecoder) } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, StoreRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + val created = sql"create temporary table store_TEMP (like sales.store) on commit drop".execute + val copied = streamingInsert(s"""copy store_TEMP("businessentityid", "name", "salespersonid", "demographics", "rowguid", "modifieddate") from stdin""", batchSize, unsaved)(StoreRow.text) + val merged = sql"""insert into sales.store("businessentityid", "name", "salespersonid", "demographics", "rowguid", "modifieddate") + select * from store_TEMP + on conflict ("businessentityid") + do update set + "name" = EXCLUDED."name", + "salespersonid" = EXCLUDED."salespersonid", + "demographics" = EXCLUDED."demographics", + "rowguid" = EXCLUDED."rowguid", + "modifieddate" = EXCLUDED."modifieddate" + ; + drop table store_TEMP;""".update + created *> copied *> merged + } } diff --git a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/store/StoreRepoMock.scala b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/store/StoreRepoMock.scala index edd8c79fe2..a150d4e541 100644 --- a/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/store/StoreRepoMock.scala +++ b/typo-tester-zio-jdbc/generated-and-checked-in/adventureworks/sales/store/StoreRepoMock.scala @@ -105,4 +105,13 @@ class StoreRepoMock(toRow: Function1[StoreRowUnsaved, StoreRow], UpdateResult(1, Chunk.single(unsaved)) } } + /* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */ + override def upsertStreaming(unsaved: ZStream[ZConnection, Throwable, StoreRow], batchSize: Int = 10000): ZIO[ZConnection, Throwable, Long] = { + unsaved.scanZIO(0L) { case (acc, row) => + ZIO.succeed { + map += (row.businessentityid -> row) + acc + 1 + } + }.runLast.map(_.getOrElse(0L)) + } } diff --git a/typo-tester-zio-jdbc/src/scala/adventureworks/production/product/RepoTest.scala b/typo-tester-zio-jdbc/src/scala/adventureworks/production/product/RepoTest.scala new file mode 100644 index 0000000000..4f6e0d290a --- /dev/null +++ b/typo-tester-zio-jdbc/src/scala/adventureworks/production/product/RepoTest.scala @@ -0,0 +1,33 @@ +package adventureworks.production.product + +import adventureworks.customtypes.* +import adventureworks.production.unitmeasure.* +import adventureworks.public.Name +import adventureworks.{SnapshotTest, withConnection} +import org.scalatest.Assertion +import zio.Chunk +import zio.stream.ZStream + +class RepoTest extends SnapshotTest { + def runTest(unitmeasureRepo: UnitmeasureRepo): Assertion = + withConnection { + val um1 = UnitmeasureRow(unitmeasurecode = UnitmeasureId("kg1"), name = Name("name1"), TypoLocalDateTime.now) + val um2 = UnitmeasureRow(unitmeasurecode = UnitmeasureId("kg2"), name = Name("name2"), TypoLocalDateTime.now) + for { + _ <- unitmeasureRepo.upsertStreaming(ZStream(um1, um2)) + _ <- unitmeasureRepo.selectAll.runCollect.map(all => assert(List(um1, um2) == all.sortBy(_.name))) + um1a = um1.copy(name = Name("name1a")) + um2a = um2.copy(name = Name("name2a")) + _ <- unitmeasureRepo.upsertStreaming(ZStream(um1a, um2a)) + all <- unitmeasureRepo.selectAll.runCollect + } yield assert(Chunk(um1a, um2a) == all.sortBy(_.name)) + } + + test("in-memory") { + runTest(new UnitmeasureRepoMock(_.toRow(TypoLocalDateTime.now))) + } + + test("pg") { + runTest(new UnitmeasureRepoImpl) + } +} diff --git a/typo/src/scala/typo/TypesScala.scala b/typo/src/scala/typo/TypesScala.scala index 8d2571cbc9..3f19085984 100644 --- a/typo/src/scala/typo/TypesScala.scala +++ b/typo/src/scala/typo/TypesScala.scala @@ -14,6 +14,7 @@ object TypesScala { val Float = sc.Type.Qualified("scala.Float") val Function1 = sc.Type.Qualified("scala.Function1") val Int = sc.Type.Qualified("scala.Int") + val Iterable = sc.Type.Qualified("scala.collection.Iterable") val Iterator = sc.Type.Qualified("scala.collection.Iterator") val Left = sc.Type.Qualified("scala.Left") val List = sc.Type.Qualified("scala.List") diff --git a/typo/src/scala/typo/internal/ComputedTable.scala b/typo/src/scala/typo/internal/ComputedTable.scala index b30f2f338d..e356af3750 100644 --- a/typo/src/scala/typo/internal/ComputedTable.scala +++ b/typo/src/scala/typo/internal/ComputedTable.scala @@ -176,6 +176,12 @@ case class ComputedTable( RepoMethod.Insert(dbTable.name, cols, unsavedParam, names.RowName) }, if (options.enableStreamingInserts) Some(RepoMethod.InsertStreaming(dbTable.name, cols, names.RowName)) else None, + maybeId.collect { + case id if options.enableStreamingInserts => RepoMethod.UpsertStreaming(dbTable.name, cols, id, names.RowName) + }, + maybeId.collect { case id => + RepoMethod.UpsertBatch(dbTable.name, cols, id, names.RowName) + }, maybeUnsavedRow.map { unsavedRow => val unsavedParam = sc.Param(sc.Ident("unsaved"), unsavedRow.tpe, None) RepoMethod.InsertUnsaved(dbTable.name, cols, unsavedRow, unsavedParam, default, names.RowName) diff --git a/typo/src/scala/typo/internal/RepoMethod.scala b/typo/src/scala/typo/internal/RepoMethod.scala index 39288119f8..1e955d2730 100644 --- a/typo/src/scala/typo/internal/RepoMethod.scala +++ b/typo/src/scala/typo/internal/RepoMethod.scala @@ -86,6 +86,22 @@ object RepoMethod { rowType: sc.Type ) extends Mutator("upsert") + case class UpsertBatch( + relName: db.RelationName, + cols: NonEmptyList[ComputedColumn], + id: IdComputed, + rowType: sc.Type + ) extends Mutator("upsertBatch") + + case class UpsertStreaming( + relName: db.RelationName, + cols: NonEmptyList[ComputedColumn], + id: IdComputed, + rowType: sc.Type + ) extends Mutator("upsertStreaming") { + override val comment: Option[String] = Some("/* NOTE: this functionality is not safe if you use auto-commit mode! it runs 3 SQL statements */") + } + case class Insert( relName: db.RelationName, cols: NonEmptyList[ComputedColumn], diff --git a/typo/src/scala/typo/internal/codegen/DbLib.scala b/typo/src/scala/typo/internal/codegen/DbLib.scala index fc636e0431..4433915ddd 100644 --- a/typo/src/scala/typo/internal/codegen/DbLib.scala +++ b/typo/src/scala/typo/internal/codegen/DbLib.scala @@ -4,7 +4,7 @@ package codegen trait DbLib { def defaultedInstance: List[sc.Given] - def repoSig(repoMethod: RepoMethod): sc.Code + def repoSig(repoMethod: RepoMethod): Either[DbLib.NotImplementedFor, sc.Code] def repoImpl(repoMethod: RepoMethod): sc.Code def mockRepoImpl(id: IdComputed, repoMethod: RepoMethod, maybeToRow: Option[sc.Param]): sc.Code def testInsertMethod(x: ComputedTestInserts.InsertMethod): sc.Value @@ -17,6 +17,8 @@ trait DbLib { } object DbLib { + case class NotImplementedFor(library: String) + sealed trait RowType object RowType { case object Readable extends RowType diff --git a/typo/src/scala/typo/internal/codegen/DbLibAnorm.scala b/typo/src/scala/typo/internal/codegen/DbLibAnorm.scala index 03196afb12..405e46f5a6 100644 --- a/typo/src/scala/typo/internal/codegen/DbLibAnorm.scala +++ b/typo/src/scala/typo/internal/codegen/DbLibAnorm.scala @@ -6,6 +6,7 @@ import typo.internal.analysis.MaybeReturnsRows class DbLibAnorm(pkg: sc.QIdent, inlineImplicits: Boolean, default: ComputedDefault, enableStreamingInserts: Boolean) extends DbLib { + val BatchSql = sc.Type.Qualified("anorm.BatchSql") val Column = sc.Type.Qualified("anorm.Column") val ToStatement = sc.Type.Qualified("anorm.ToStatement") val ToSql = sc.Type.Qualified("anorm.ToSql") @@ -20,6 +21,7 @@ class DbLibAnorm(pkg: sc.QIdent, inlineImplicits: Boolean, default: ComputedDefa val TypeDoesNotMatch = sc.Type.Qualified("anorm.TypeDoesNotMatch") val SimpleSql = sc.Type.Qualified("anorm.SimpleSql") val Row = sc.Type.Qualified("anorm.Row") + val managed = sc.Type.Qualified("resource.managed") def rowParserFor(rowType: sc.Type) = code"$rowType.$rowParserName(1)" @@ -35,16 +37,48 @@ class DbLibAnorm(pkg: sc.QIdent, inlineImplicits: Boolean, default: ComputedDefa val arrayParameterMetaDataName = sc.Ident("arrayParameterMetaData") val textSupport: Option[DbLibTextSupport] = if (enableStreamingInserts) Some(new DbLibTextSupport(pkg, inlineImplicits, None, default)) else None + val ExecuteReturningSyntax = sc.QIdent(List[List[sc.Ident]](List(sc.Ident("anorm")), pkg.idents, List(sc.Ident("ExecuteReturningSyntax"))).flatten) - override val additionalFiles: List[typo.sc.File] = - textSupport match { - case Some(textSupport) => - List( - sc.File(textSupport.Text, DbLibTextImplementations.Text, Nil, scope = Scope.Main), - sc.File(textSupport.streamingInsert, DbLibTextImplementations.streamingInsertAnorm(textSupport.Text), Nil, scope = Scope.Main) + override val additionalFiles: List[typo.sc.File] = { + List[List[sc.File]]( + List( + sc.File( + tpe = sc.Type.Qualified(ExecuteReturningSyntax), + contents = { + // drop structured imports from anorm.*, as the auto-import thing would need to be more clever to handle this + code"""|object ${ExecuteReturningSyntax.name} { + | /* add executeReturning to anorm. it needs to be inside the package, because everything is hidden */ + | implicit class Ops(batchSql: BatchSql) { + | def executeReturning[T](parser: ResultSetParser[T])(implicit c: ${TypesJava.Connection}): T = + | $managed(batchSql.getFilledStatement(c, getGeneratedKeys = true))(using StatementResource, statementClassTag).acquireAndGet { ps => + | ps.executeBatch() + | Sql + | .asTry( + | parser, + | $managed(ps.getGeneratedKeys)(using ResultSetResource, resultSetClassTag), + | onFirstRow = false, + | ColumnAliaser.empty + | ) + | .get + | } + | } + |} + |""".stripMargin + }, + secondaryTypes = Nil, + scope = Scope.Main ) - case None => Nil - } + ), + textSupport match { + case Some(textSupport) => + List( + sc.File(textSupport.Text, DbLibTextImplementations.Text, Nil, scope = Scope.Main), + sc.File(textSupport.streamingInsert, DbLibTextImplementations.streamingInsertAnorm(textSupport.Text), Nil, scope = Scope.Main) + ) + case None => Nil + } + ).flatten + } def runtimeInterpolateValue(name: sc.Code, tpe: sc.Type, forbidInline: Boolean = false): sc.Code = if (inlineImplicits && !forbidInline) @@ -158,58 +192,62 @@ class DbLibAnorm(pkg: sc.QIdent, inlineImplicits: Boolean, default: ComputedDefa case other => sc.Summon(ToStatement.of(other)).code } - override def repoSig(repoMethod: RepoMethod): sc.Code = { + override def repoSig(repoMethod: RepoMethod): Right[Nothing, sc.Code] = { val name = repoMethod.methodName repoMethod match { case RepoMethod.SelectBuilder(_, fieldsType, rowType) => - code"def $name: ${sc.Type.dsl.SelectBuilder.of(fieldsType, rowType)}" + Right(code"def $name: ${sc.Type.dsl.SelectBuilder.of(fieldsType, rowType)}") case RepoMethod.SelectAll(_, _, rowType) => - code"def $name(implicit c: ${TypesJava.Connection}): ${TypesScala.List.of(rowType)}" + Right(code"def $name(implicit c: ${TypesJava.Connection}): ${TypesScala.List.of(rowType)}") case RepoMethod.SelectById(_, _, id, rowType) => - code"def $name(${id.param})(implicit c: ${TypesJava.Connection}): ${TypesScala.Option.of(rowType)}" + Right(code"def $name(${id.param})(implicit c: ${TypesJava.Connection}): ${TypesScala.Option.of(rowType)}") case RepoMethod.SelectByIds(_, _, idComputed, idsParam, rowType) => val usedDefineds = idComputed.userDefinedColTypes.zipWithIndex.map { case (colType, i) => sc.Param(sc.Ident(s"toStatement$i"), ToStatement.of(sc.Type.ArrayOf(colType)), None) } val params = sc.Param(sc.Ident("c"), TypesJava.Connection, None) :: usedDefineds - code"def $name($idsParam)(implicit ${params.map(_.code).mkCode(", ")}): ${TypesScala.List.of(rowType)}" + Right(code"def $name($idsParam)(implicit ${params.map(_.code).mkCode(", ")}): ${TypesScala.List.of(rowType)}") case RepoMethod.SelectByIdsTracked(x) => val usedDefineds = x.idComputed.userDefinedColTypes.zipWithIndex.map { case (colType, i) => sc.Param(sc.Ident(s"toStatement$i"), ToStatement.of(sc.Type.ArrayOf(colType)), None) } val params = sc.Param(sc.Ident("c"), TypesJava.Connection, None) :: usedDefineds - code"def $name(${x.idsParam})(implicit ${params.map(_.code).mkCode(", ")}): ${TypesScala.Map.of(x.idComputed.tpe, x.rowType)}" + Right(code"def $name(${x.idsParam})(implicit ${params.map(_.code).mkCode(", ")}): ${TypesScala.Map.of(x.idComputed.tpe, x.rowType)}") case RepoMethod.SelectByUnique(_, keyColumns, _, rowType) => - code"def $name(${keyColumns.map(_.param.code).mkCode(", ")})(implicit c: ${TypesJava.Connection}): ${TypesScala.Option.of(rowType)}" + Right(code"def $name(${keyColumns.map(_.param.code).mkCode(", ")})(implicit c: ${TypesJava.Connection}): ${TypesScala.Option.of(rowType)}") case RepoMethod.SelectByFieldValues(_, _, _, fieldValueOrIdsParam, rowType) => - code"def $name($fieldValueOrIdsParam)(implicit c: ${TypesJava.Connection}): ${TypesScala.List.of(rowType)}" + Right(code"def $name($fieldValueOrIdsParam)(implicit c: ${TypesJava.Connection}): ${TypesScala.List.of(rowType)}") case RepoMethod.UpdateBuilder(_, fieldsType, rowType) => - code"def $name: ${sc.Type.dsl.UpdateBuilder.of(fieldsType, rowType)}" + Right(code"def $name: ${sc.Type.dsl.UpdateBuilder.of(fieldsType, rowType)}") case RepoMethod.UpdateFieldValues(_, id, varargs, _, _, _) => - code"def $name(${id.param}, $varargs)(implicit c: ${TypesJava.Connection}): ${TypesScala.Boolean}" + Right(code"def $name(${id.param}, $varargs)(implicit c: ${TypesJava.Connection}): ${TypesScala.Boolean}") case RepoMethod.Update(_, _, _, param, _) => - code"def $name($param)(implicit c: ${TypesJava.Connection}): ${TypesScala.Boolean}" + Right(code"def $name($param)(implicit c: ${TypesJava.Connection}): ${TypesScala.Boolean}") case RepoMethod.Insert(_, _, unsavedParam, rowType) => - code"def $name($unsavedParam)(implicit c: ${TypesJava.Connection}): $rowType" + Right(code"def $name($unsavedParam)(implicit c: ${TypesJava.Connection}): $rowType") case RepoMethod.InsertStreaming(_, _, rowType) => - code"def $name(unsaved: ${TypesScala.Iterator.of(rowType)}, batchSize: ${TypesScala.Int} = 10000)(implicit c: ${TypesJava.Connection}): ${TypesScala.Long}" + Right(code"def $name(unsaved: ${TypesScala.Iterator.of(rowType)}, batchSize: ${TypesScala.Int} = 10000)(implicit c: ${TypesJava.Connection}): ${TypesScala.Long}") case RepoMethod.Upsert(_, _, _, unsavedParam, rowType) => - code"def $name($unsavedParam)(implicit c: ${TypesJava.Connection}): $rowType" + Right(code"def $name($unsavedParam)(implicit c: ${TypesJava.Connection}): $rowType") + case RepoMethod.UpsertBatch(_, _, _, rowType) => + Right(code"def $name(unsaved: ${TypesScala.Iterable.of(rowType)})(implicit c: ${TypesJava.Connection}): ${TypesScala.List.of(rowType)}") + case RepoMethod.UpsertStreaming(_, _, _, rowType) => + Right(code"def $name(unsaved: ${TypesScala.Iterator.of(rowType)}, batchSize: ${TypesScala.Int} = 10000)(implicit c: ${TypesJava.Connection}): ${TypesScala.Int}") case RepoMethod.InsertUnsaved(_, _, _, unsavedParam, _, rowType) => - code"def $name($unsavedParam)(implicit c: ${TypesJava.Connection}): $rowType" + Right(code"def $name($unsavedParam)(implicit c: ${TypesJava.Connection}): $rowType") case RepoMethod.InsertUnsavedStreaming(_, unsaved) => - code"def $name(unsaved: ${TypesScala.Iterator.of(unsaved.tpe)}, batchSize: ${TypesScala.Int} = 10000)(implicit c: ${TypesJava.Connection}): ${TypesScala.Long}" + Right(code"def $name(unsaved: ${TypesScala.Iterator.of(unsaved.tpe)}, batchSize: ${TypesScala.Int} = 10000)(implicit c: ${TypesJava.Connection}): ${TypesScala.Long}") case RepoMethod.DeleteBuilder(_, fieldsType, rowType) => - code"def $name: ${sc.Type.dsl.DeleteBuilder.of(fieldsType, rowType)}" + Right(code"def $name: ${sc.Type.dsl.DeleteBuilder.of(fieldsType, rowType)}") case RepoMethod.Delete(_, id) => - code"def $name(${id.param})(implicit c: ${TypesJava.Connection}): ${TypesScala.Boolean}" + Right(code"def $name(${id.param})(implicit c: ${TypesJava.Connection}): ${TypesScala.Boolean}") case RepoMethod.DeleteByIds(_, idComputed, idsParam) => val usedDefineds = idComputed.userDefinedColTypes.zipWithIndex.map { case (colType, i) => sc.Param(sc.Ident(s"toStatement$i"), ToStatement.of(sc.Type.ArrayOf(colType)), None) } val params = sc.Param(sc.Ident("c"), TypesJava.Connection, None) :: usedDefineds - code"def $name($idsParam)(implicit ${params.map(_.code).mkCode(", ")}): ${TypesScala.Int}" + Right(code"def $name($idsParam)(implicit ${params.map(_.code).mkCode(", ")}): ${TypesScala.Int}") case RepoMethod.SqlFile(sqlScript) => val params = sc.Params(sqlScript.params.map(p => sc.Param(p.name, p.tpe, None))) val retType = sqlScript.maybeRowName match { case MaybeReturnsRows.Query(rowName) => TypesScala.List.of(rowName) case MaybeReturnsRows.Update => TypesScala.Int } - code"def $name$params(implicit c: ${TypesJava.Connection}): $retType" + Right(code"def $name$params(implicit c: ${TypesJava.Connection}): $retType") } } @@ -375,9 +413,12 @@ class DbLibAnorm(pkg: sc.QIdent, inlineImplicits: Boolean, default: ComputedDefa runtimeInterpolateValue(code"${unsavedParam.name}.${c.name}", c.tpe).code ++ SqlCast.toPgCode(c) } - val pickExcludedCols = cols.toList - .filterNot(c => id.cols.exists(_.name == c.name)) - .map { c => code"${c.dbName.code} = EXCLUDED.${c.dbName.code}" } + val conflictAction = cols.toList.filterNot(c => id.cols.exists(_.name == c.name)) match { + case Nil => code"do nothing" + case nonEmpty => + code"""|do update set + | ${nonEmpty.map { c => code"${c.dbName.code} = EXCLUDED.${c.dbName.code}" }.mkCode(",\n")}""".stripMargin + } val sql = SQL { code"""|insert into $relName(${dbNames(cols, isRead = false)}) @@ -385,8 +426,7 @@ class DbLibAnorm(pkg: sc.QIdent, inlineImplicits: Boolean, default: ComputedDefa | ${values.mkCode(",\n")} |) |on conflict (${dbNames(id.cols, isRead = false)}) - |do update set - | ${pickExcludedCols.mkCode(",\n")} + |$conflictAction |returning ${dbNames(cols, isRead = true)} |""".stripMargin } @@ -394,6 +434,60 @@ class DbLibAnorm(pkg: sc.QIdent, inlineImplicits: Boolean, default: ComputedDefa code"""|$sql | .executeInsert(${rowParserFor(rowType)}.single) |""" + case RepoMethod.UpsertBatch(relName, cols, id, rowType) => + val conflictAction = cols.toList.filterNot(c => id.cols.exists(_.name == c.name)) match { + case Nil => code"do nothing" + case nonEmpty => + code"""|do update set + | ${nonEmpty.map { c => code"${c.dbName.code} = EXCLUDED.${c.dbName.code}" }.mkCode(",\n")}""".stripMargin + } + + val sql = sc.s { + code"""|insert into $relName(${dbNames(cols, isRead = false)}) + |values (${cols.map(c => code"{${c.dbName.value}}${SqlCast.toPgCode(c)}").mkCode(", ")}) + |on conflict (${dbNames(id.cols, isRead = false)}) + |$conflictAction + |returning ${dbNames(cols, isRead = true)} + |""".stripMargin + } + + code"""|def toNamedParameter(row: $rowType): ${TypesScala.List.of(NamedParameter)} = ${TypesScala.List}( + | ${cols.map(c => code"$NamedParameter(${sc.StrLit(c.dbName.value)}, $ParameterValue(row.${c.name}, null, ${lookupToStatementFor(c.tpe)}))").mkCode(",\n")} + |) + |unsaved.toList match { + | case Nil => ${TypesScala.Nil} + | case head :: rest => + | new $ExecuteReturningSyntax.Ops( + | $BatchSql( + | $sql, + | toNamedParameter(head), + | rest.map(toNamedParameter)* + | ) + | ).executeReturning(${rowParserFor(rowType)}.*) + |}""".stripMargin + + case RepoMethod.UpsertStreaming(relName, cols, id, rowType) => + val conflictAction = cols.toList.filterNot(c => id.cols.exists(_.name == c.name)) match { + case Nil => code"do nothing" + case nonEmpty => + code"""|do update set + | ${nonEmpty.map { c => code"${c.dbName.code} = EXCLUDED.${c.dbName.code}" }.mkCode(",\n")}""".stripMargin + } + val tempTablename = s"${relName.name}_TEMP" + + val copySql = sc.s(code"copy $tempTablename(${dbNames(cols, isRead = false)}) from stdin") + + val mergeSql = SQL { + code"""|insert into $relName(${dbNames(cols, isRead = false)}) + |select * from $tempTablename + |on conflict (${dbNames(id.cols, isRead = false)}) + |$conflictAction + |; + |drop table $tempTablename;""".stripMargin + } + code"""|${SQL(code"create temporary table $tempTablename (like $relName) on commit drop")}.execute(): @${TypesScala.nowarn} + |${textSupport.get.streamingInsert}($copySql, batchSize, unsaved)(${textSupport.get.lookupTextFor(rowType)}, c): @${TypesScala.nowarn} + |$mergeSql.executeUpdate()""".stripMargin case RepoMethod.InsertUnsaved(relName, cols, unsaved, unsavedParam, default, rowType) => val cases0 = unsaved.restCols.map { col => @@ -572,6 +666,16 @@ class DbLibAnorm(pkg: sc.QIdent, inlineImplicits: Boolean, default: ComputedDefa case RepoMethod.Upsert(_, _, _, unsavedParam, _) => code"""|map.put(${unsavedParam.name}.${id.paramName}, ${unsavedParam.name}): @${TypesScala.nowarn} |${unsavedParam.name}""" + case RepoMethod.UpsertStreaming(_, _, id, _) => + code"""|unsaved.foreach { row => + | map += (row.${id.paramName} -> row) + |} + |unsaved.size""".stripMargin + case RepoMethod.UpsertBatch(_, _, id, _) => + code"""|unsaved.map { row => + | map += (row.${id.paramName} -> row) + | row + |}.toList""".stripMargin case RepoMethod.InsertUnsaved(_, _, _, unsavedParam, _, _) => code"insert(${maybeToRow.get.name}(${unsavedParam.name}))" case RepoMethod.InsertStreaming(_, _, _) => diff --git a/typo/src/scala/typo/internal/codegen/DbLibDoobie.scala b/typo/src/scala/typo/internal/codegen/DbLibDoobie.scala index 7579232a75..2db8c6f807 100644 --- a/typo/src/scala/typo/internal/codegen/DbLibDoobie.scala +++ b/typo/src/scala/typo/internal/codegen/DbLibDoobie.scala @@ -27,6 +27,8 @@ class DbLibDoobie(pkg: sc.QIdent, inlineImplicits: Boolean, default: ComputedDef val fromWrite = sc.Type.Qualified("doobie.syntax.SqlInterpolator.SingleFragment.fromWrite") val FragmentOps = sc.Type.Qualified("doobie.postgres.syntax.FragmentOps") val JdbcType = sc.Type.Qualified("doobie.enumerated.JdbcType") + val Update = sc.Type.Qualified("doobie.util.update.Update") + val catsStdInstancesForList = sc.Type.Qualified("cats.instances.list.catsStdInstancesForList") val arrayGetName: sc.Ident = sc.Ident("arrayGet") val arrayPutName: sc.Ident = sc.Ident("arrayPut") @@ -61,64 +63,68 @@ class DbLibDoobie(pkg: sc.QIdent, inlineImplicits: Boolean, default: ComputedDef code"${composite.cols.map(cc => code"${cc.dbName.code} = ${runtimeInterpolateValue(code"${composite.paramName}.${cc.name}", cc.tpe)}").mkCode(" AND ")}" } - override def repoSig(repoMethod: RepoMethod): sc.Code = { + override def repoSig(repoMethod: RepoMethod): Right[Nothing, sc.Code] = { val name = repoMethod.methodName repoMethod match { case RepoMethod.SelectBuilder(_, fieldsType, rowType) => - code"def $name: ${sc.Type.dsl.SelectBuilder.of(fieldsType, rowType)}" + Right(code"def $name: ${sc.Type.dsl.SelectBuilder.of(fieldsType, rowType)}") case RepoMethod.SelectAll(_, _, rowType) => - code"def $name: ${fs2Stream.of(ConnectionIO, rowType)}" + Right(code"def $name: ${fs2Stream.of(ConnectionIO, rowType)}") case RepoMethod.SelectById(_, _, id, rowType) => - code"def $name(${id.param}): ${ConnectionIO.of(TypesScala.Option.of(rowType))}" + Right(code"def $name(${id.param}): ${ConnectionIO.of(TypesScala.Option.of(rowType))}") case RepoMethod.SelectByIds(_, _, idComputed, idsParam, rowType) => val usedDefineds = idComputed.userDefinedColTypes.zipWithIndex.map { case (colType, i) => sc.Param(sc.Ident(s"puts$i"), Put.of(sc.Type.ArrayOf(colType)), None) } usedDefineds match { case Nil => - code"def $name($idsParam): ${fs2Stream.of(ConnectionIO, rowType)}" + Right(code"def $name($idsParam): ${fs2Stream.of(ConnectionIO, rowType)}") case nonEmpty => - code"def $name($idsParam)(implicit ${nonEmpty.map(_.code).mkCode(", ")}): ${fs2Stream.of(ConnectionIO, rowType)}" + Right(code"def $name($idsParam)(implicit ${nonEmpty.map(_.code).mkCode(", ")}): ${fs2Stream.of(ConnectionIO, rowType)}") } case RepoMethod.SelectByIdsTracked(x) => val usedDefineds = x.idComputed.userDefinedColTypes.zipWithIndex.map { case (colType, i) => sc.Param(sc.Ident(s"puts$i"), Put.of(sc.Type.ArrayOf(colType)), None) } val returnType = ConnectionIO.of(TypesScala.Map.of(x.idComputed.tpe, x.rowType)) usedDefineds match { case Nil => - code"def $name(${x.idsParam}): $returnType" + Right(code"def $name(${x.idsParam}): $returnType") case nonEmpty => - code"def $name(${x.idsParam})(implicit ${nonEmpty.map(_.code).mkCode(", ")}): $returnType" + Right(code"def $name(${x.idsParam})(implicit ${nonEmpty.map(_.code).mkCode(", ")}): $returnType") } case RepoMethod.SelectByUnique(_, keyColumns, _, rowType) => - code"def $name(${keyColumns.map(_.param.code).mkCode(", ")}): ${ConnectionIO.of(TypesScala.Option.of(rowType))}" + Right(code"def $name(${keyColumns.map(_.param.code).mkCode(", ")}): ${ConnectionIO.of(TypesScala.Option.of(rowType))}") case RepoMethod.SelectByFieldValues(_, _, _, fieldValueOrIdsParam, rowType) => - code"def $name($fieldValueOrIdsParam): ${fs2Stream.of(ConnectionIO, rowType)}" + Right(code"def $name($fieldValueOrIdsParam): ${fs2Stream.of(ConnectionIO, rowType)}") case RepoMethod.UpdateBuilder(_, fieldsType, rowType) => - code"def $name: ${sc.Type.dsl.UpdateBuilder.of(fieldsType, rowType)}" + Right(code"def $name: ${sc.Type.dsl.UpdateBuilder.of(fieldsType, rowType)}") case RepoMethod.UpdateFieldValues(_, id, varargs, _, _, _) => - code"def $name(${id.param}, $varargs): ${ConnectionIO.of(TypesScala.Boolean)}" + Right(code"def $name(${id.param}, $varargs): ${ConnectionIO.of(TypesScala.Boolean)}") case RepoMethod.Update(_, _, _, param, _) => - code"def $name($param): ${ConnectionIO.of(TypesScala.Boolean)}" + Right(code"def $name($param): ${ConnectionIO.of(TypesScala.Boolean)}") case RepoMethod.Insert(_, _, unsavedParam, rowType) => - code"def $name($unsavedParam): ${ConnectionIO.of(rowType)}" + Right(code"def $name($unsavedParam): ${ConnectionIO.of(rowType)}") case RepoMethod.InsertUnsaved(_, _, _, unsavedParam, _, rowType) => - code"def $name($unsavedParam): ${ConnectionIO.of(rowType)}" + Right(code"def $name($unsavedParam): ${ConnectionIO.of(rowType)}") case RepoMethod.InsertStreaming(_, _, rowType) => - code"def $name(unsaved: ${fs2Stream.of(ConnectionIO, rowType)}, batchSize: ${TypesScala.Int} = 10000): ${ConnectionIO.of(TypesScala.Long)}" + Right(code"def $name(unsaved: ${fs2Stream.of(ConnectionIO, rowType)}, batchSize: ${TypesScala.Int} = 10000): ${ConnectionIO.of(TypesScala.Long)}") + case RepoMethod.UpsertBatch(_, _, _, rowType) => + Right(code"def $name(unsaved: ${TypesScala.List.of(rowType)}): ${fs2Stream.of(ConnectionIO, rowType)}") case RepoMethod.InsertUnsavedStreaming(_, unsaved) => - code"def $name(unsaved: ${fs2Stream.of(ConnectionIO, unsaved.tpe)}, batchSize: ${TypesScala.Int} = 10000): ${ConnectionIO.of(TypesScala.Long)}" + Right(code"def $name(unsaved: ${fs2Stream.of(ConnectionIO, unsaved.tpe)}, batchSize: ${TypesScala.Int} = 10000): ${ConnectionIO.of(TypesScala.Long)}") case RepoMethod.Upsert(_, _, _, unsavedParam, rowType) => - code"def $name($unsavedParam): ${ConnectionIO.of(rowType)}" + Right(code"def $name($unsavedParam): ${ConnectionIO.of(rowType)}") + case RepoMethod.UpsertStreaming(_, _, _, rowType) => + Right(code"def $name(unsaved: ${fs2Stream.of(ConnectionIO, rowType)}, batchSize: ${TypesScala.Int} = 10000): ${ConnectionIO.of(TypesScala.Int)}") case RepoMethod.DeleteBuilder(_, fieldsType, rowType) => - code"def $name: ${sc.Type.dsl.DeleteBuilder.of(fieldsType, rowType)}" + Right(code"def $name: ${sc.Type.dsl.DeleteBuilder.of(fieldsType, rowType)}") case RepoMethod.Delete(_, id) => - code"def $name(${id.param}): ${ConnectionIO.of(TypesScala.Boolean)}" + Right(code"def $name(${id.param}): ${ConnectionIO.of(TypesScala.Boolean)}") case RepoMethod.DeleteByIds(_, idComputed, idsParam) => val usedDefineds = idComputed.userDefinedColTypes.zipWithIndex.map { case (colType, i) => sc.Param(sc.Ident(s"put$i"), Put.of(sc.Type.ArrayOf(colType)), None) } usedDefineds match { case Nil => - code"def $name(${idsParam}): ${ConnectionIO.of(TypesScala.Int)}" + Right(code"def $name($idsParam): ${ConnectionIO.of(TypesScala.Int)}") case nonEmpty => - code"def $name(${idsParam})(implicit ${nonEmpty.map(_.code).mkCode(", ")}): ${ConnectionIO.of(TypesScala.Int)}" + Right(code"def $name($idsParam)(implicit ${nonEmpty.map(_.code).mkCode(", ")}): ${ConnectionIO.of(TypesScala.Int)}") } case RepoMethod.SqlFile(sqlScript) => @@ -129,7 +135,7 @@ class DbLibDoobie(pkg: sc.QIdent, inlineImplicits: Boolean, default: ComputedDef case MaybeReturnsRows.Update => ConnectionIO.of(TypesScala.Int) } - code"def $name$params: $retType" + Right(code"def $name$params: $retType") } } @@ -286,6 +292,7 @@ class DbLibDoobie(pkg: sc.QIdent, inlineImplicits: Boolean, default: ComputedDef if (fixVerySlowImplicit) code"new $FragmentOps($sql).copyIn(unsaved, batchSize)(using ${textSupport.get.lookupTextFor(rowType)})" else code"new $FragmentOps($sql).copyIn[$rowType](unsaved, batchSize)" + case RepoMethod.InsertUnsavedStreaming(relName, unsaved) => val sql = SQL(code"COPY $relName(${dbNames(unsaved.allCols, isRead = false)}) FROM STDIN (DEFAULT '${textSupport.get.DefaultValue}')") @@ -297,24 +304,81 @@ class DbLibDoobie(pkg: sc.QIdent, inlineImplicits: Boolean, default: ComputedDef code"${runtimeInterpolateValue(code"${unsavedParam.name}.${c.name}", c.tpe)}${SqlCast.toPgCode(c)}" } - val pickExcludedCols = cols.toList - .filterNot(c => id.cols.exists(_.name == c.name)) - .map { c => code"${c.dbName.code} = EXCLUDED.${c.dbName.code}" } - + val conflictAction = cols.toList.filterNot(c => id.cols.exists(_.name == c.name)) match { + case Nil => code"do nothing" + case nonEmpty => + code"""|do update set + | ${nonEmpty.map { c => code"${c.dbName.code} = EXCLUDED.${c.dbName.code}" }.mkCode(",\n")}""".stripMargin + } val sql = SQL { code"""|insert into $relName(${dbNames(cols, isRead = false)}) |values ( | ${values.mkCode(",\n")} |) |on conflict (${dbNames(id.cols, isRead = false)}) - |do update set - | ${pickExcludedCols.mkCode(",\n")} + |$conflictAction |returning ${dbNames(cols, isRead = true)} |""".stripMargin } code"${query(sql, rowType)}.unique" + case RepoMethod.UpsertBatch(relName, cols, id, rowType) => + val conflictAction = cols.toList.filterNot(c => id.cols.exists(_.name == c.name)) match { + case Nil => code"do nothing" + case nonEmpty => + code"""|do update set + | ${nonEmpty.map { c => code"${c.dbName.code} = EXCLUDED.${c.dbName.code}" }.mkCode(",\n")}""".stripMargin + } + + val sql = sc.s { + code"""|insert into $relName(${dbNames(cols, isRead = false)}) + |values (${cols.map(c => code"?${SqlCast.toPgCode(c)}").mkCode(code",")}) + |on conflict (${dbNames(id.cols, isRead = false)}) + |$conflictAction + |returning ${dbNames(cols, isRead = true)}""".stripMargin + } + + if (fixVerySlowImplicit) + code"""|${Update.of(rowType)}( + | $sql + |)(using $rowType.$writeName) + |.updateManyWithGeneratedKeys[$rowType](${dbNames(cols, isRead = false)})(unsaved)(using $catsStdInstancesForList, $rowType.$readName)""".stripMargin + else + code"""|${Update.of(rowType)}( + | $sql + |).updateManyWithGeneratedKeys[$rowType](${dbNames(cols, isRead = false)})(unsaved)""" + + case RepoMethod.UpsertStreaming(relName, cols, id, rowType) => + val conflictAction = cols.toList.filterNot(c => id.cols.exists(_.name == c.name)) match { + case Nil => code"do nothing" + case nonEmpty => + code"""|do update set + | ${nonEmpty.map { c => code"${c.dbName.code} = EXCLUDED.${c.dbName.code}" }.mkCode(",\n")}""".stripMargin + } + val tempTablename = s"${relName.name}_TEMP" + + val streamingInsert = { + val sql = SQL(code"copy $tempTablename(${dbNames(cols, isRead = false)}) from stdin") + if (fixVerySlowImplicit) code"new $FragmentOps($sql).copyIn(unsaved, batchSize)(using ${textSupport.get.lookupTextFor(rowType)})" + else code"new $FragmentOps($sql).copyIn[$rowType](unsaved, batchSize)" + } + + val mergeSql = SQL { + code"""|insert into $relName(${dbNames(cols, isRead = false)}) + |select * from $tempTablename + |on conflict (${dbNames(id.cols, isRead = false)}) + |$conflictAction + |; + |drop table $tempTablename;""".stripMargin + } + + code"""|for { + | _ <- ${SQL(code"create temporary table $tempTablename (like $relName) on commit drop")}.update.run + | _ <- $streamingInsert + | res <- $mergeSql.update.run + |} yield res""".stripMargin + case RepoMethod.Insert(relName, cols, unsavedParam, rowType) => val values = cols.map { c => code"${runtimeInterpolateValue(code"${unsavedParam.name}.${c.name}", c.tpe)}${SqlCast.toPgCode(c)}" @@ -405,8 +469,7 @@ class DbLibDoobie(pkg: sc.QIdent, inlineImplicits: Boolean, default: ComputedDef | ${x.idsParam.name}.view.flatMap(id => byId.get(id).map(x => (id, x))).toMap |}""".stripMargin case RepoMethod.SelectByUnique(_, keyColumns, _, _) => - code"${delayCIO}(map.values.find(v => ${keyColumns.map(c => code"${c.name} == v.${c.name}").mkCode(" && ")}))" - + code"$delayCIO(map.values.find(v => ${keyColumns.map(c => code"${c.name} == v.${c.name}").mkCode(" && ")}))" case RepoMethod.SelectByFieldValues(_, cols, fieldValue, fieldValueOrIdsParam, _) => val cases = cols.map { col => code"case (acc, $fieldValue.${col.name}(value)) => acc.filter(_.${col.name} == value)" @@ -462,6 +525,22 @@ class DbLibDoobie(pkg: sc.QIdent, inlineImplicits: Boolean, default: ComputedDef | map.put(${unsavedParam.name}.${id.paramName}, ${unsavedParam.name}): @${TypesScala.nowarn} | ${unsavedParam.name} |}""".stripMargin + case RepoMethod.UpsertStreaming(_, _, _, _) => + code"""|unsaved.compile.toList.map { rows => + | var num = 0 + | rows.foreach { row => + | map += (row.${id.paramName} -> row) + | num += 1 + | } + | num + |}""".stripMargin + case RepoMethod.UpsertBatch(_, _, _, _) => + code"""|$fs2Stream.emits { + | unsaved.map { row => + | map += (row.${id.paramName} -> row) + | row + | } + |}""".stripMargin case RepoMethod.InsertUnsaved(_, _, _, unsavedParam, _, _) => code"insert(${maybeToRow.get.name}(${unsavedParam.name}))" case RepoMethod.InsertStreaming(_, _, _) => @@ -715,9 +794,60 @@ class DbLibDoobie(pkg: sc.QIdent, inlineImplicits: Boolean, default: ComputedDef sc.Given(tparams = Nil, name = readName, implicitParams = Nil, tpe = Read.of(tpe), body = body) } + + val write = { + val puts = { + val all = cols.map { c => + c.tpe match { + case TypesScala.Optional(underlying) => code"(${lookupPutFor(underlying)}, $Nullability.Nullable)" + case other => code"(${lookupPutFor(other)}, $Nullability.NoNulls)" + } + } + code"${TypesScala.List}(${all.mkCode(",\n")})" + } + + val toList = { + val all = cols.map(c => code"x.${c.name}") + code"x => ${TypesScala.List}(${all.mkCode(", ")})" + } + val unsafeSet = { + val all = cols.zipWithIndex.map { case (c, i) => + c.tpe match { + case TypesScala.Optional(underlying) => code"${lookupPutFor(underlying)}.unsafeSetNullable(rs, i + $i, a.${c.name})" + case other => code"${lookupPutFor(other)}.unsafeSetNonNullable(rs, i + $i, a.${c.name})" + } + } + code"""|(rs, i, a) => { + | ${all.mkCode("\n")} + |}""".stripMargin + } + + val unsafeUpdate = { + val all = cols.zipWithIndex.map { case (c, i) => + c.tpe match { + case TypesScala.Optional(underlying) => code"${lookupPutFor(underlying)}.unsafeUpdateNullable(ps, i + $i, a.${c.name})" + case other => code"${lookupPutFor(other)}.unsafeUpdateNonNullable(ps, i + $i, a.${c.name})" + } + } + code"""|(ps, i, a) => { + | ${all.mkCode("\n")} + |}""".stripMargin + } + + val body = + code"""|new ${Write.of(tpe)}( + | puts = $puts, + | toList = $toList, + | unsafeSet = $unsafeSet, + | unsafeUpdate = $unsafeUpdate + |) + |""".stripMargin + + sc.Given(tparams = Nil, name = writeName, implicitParams = Nil, tpe = Write.of(tpe), body = body) + } rowType match { case DbLib.RowType.Writable => text.toList - case DbLib.RowType.ReadWriteable => List(read) ++ text + case DbLib.RowType.ReadWriteable => List(read, write) ++ text case DbLib.RowType.Readable => List(read) } } diff --git a/typo/src/scala/typo/internal/codegen/DbLibZioJdbc.scala b/typo/src/scala/typo/internal/codegen/DbLibZioJdbc.scala index 1f9594d729..3fc4a1d14a 100644 --- a/typo/src/scala/typo/internal/codegen/DbLibZioJdbc.scala +++ b/typo/src/scala/typo/internal/codegen/DbLibZioJdbc.scala @@ -178,69 +178,78 @@ class DbLibZioJdbc(pkg: sc.QIdent, inlineImplicits: Boolean, dslEnabled: Boolean code"${composite.cols.map(cc => code"${cc.dbName} = ${runtimeInterpolateValue(code"${composite.paramName}.${cc.name}", cc.tpe)}").mkCode(" AND ")}" } - override def repoSig(repoMethod: RepoMethod): sc.Code = { + override def repoSig(repoMethod: RepoMethod): Either[DbLib.NotImplementedFor, sc.Code] = { val name = repoMethod.methodName repoMethod match { case RepoMethod.SelectBuilder(_, fieldsType, rowType) => - code"def $name: ${sc.Type.dsl.SelectBuilder.of(fieldsType, rowType)}" + Right(code"def $name: ${sc.Type.dsl.SelectBuilder.of(fieldsType, rowType)}") case RepoMethod.SelectAll(_, _, rowType) => - code"def $name: ${ZStream.of(ZConnection, Throwable, rowType)}" + Right(code"def $name: ${ZStream.of(ZConnection, Throwable, rowType)}") case RepoMethod.SelectById(_, _, id, rowType) => - code"def $name(${id.param}): ${ZIO.of(ZConnection, Throwable, TypesScala.Option.of(rowType))}" + Right(code"def $name(${id.param}): ${ZIO.of(ZConnection, Throwable, TypesScala.Option.of(rowType))}") case RepoMethod.SelectByIds(_, _, idComputed, idsParam, rowType) => val usedDefineds = idComputed.userDefinedColTypes.zipWithIndex.map { case (colType, i) => sc.Param(sc.Ident(s"encoder$i"), JdbcEncoder.of(sc.Type.ArrayOf(colType)), None) } usedDefineds match { case Nil => - code"def $name($idsParam): ${ZStream.of(ZConnection, Throwable, rowType)}" + Right(code"def $name($idsParam): ${ZStream.of(ZConnection, Throwable, rowType)}") case nonEmpty => - code"def $name($idsParam)(implicit ${nonEmpty.map(_.code).mkCode(", ")}): ${ZStream.of(ZConnection, Throwable, rowType)}" + Right(code"def $name($idsParam)(implicit ${nonEmpty.map(_.code).mkCode(", ")}): ${ZStream.of(ZConnection, Throwable, rowType)}") } case RepoMethod.SelectByIdsTracked(x) => val usedDefineds = x.idComputed.userDefinedColTypes.zipWithIndex.map { case (colType, i) => sc.Param(sc.Ident(s"encoder$i"), JdbcEncoder.of(sc.Type.ArrayOf(colType)), None) } val returnType = ZIO.of(ZConnection, Throwable, TypesScala.Map.of(x.idComputed.tpe, x.rowType)) usedDefineds match { case Nil => - code"def $name(${x.idsParam}): $returnType" + Right(code"def $name(${x.idsParam}): $returnType") case nonEmpty => - code"def $name(${x.idsParam})(implicit ${nonEmpty.map(_.code).mkCode(", ")}): $returnType" + Right(code"def $name(${x.idsParam})(implicit ${nonEmpty.map(_.code).mkCode(", ")}): $returnType") } case RepoMethod.SelectByUnique(_, keyColumns, _, rowType) => - code"def $name(${keyColumns.map(_.param.code).mkCode(", ")}): ${ZIO.of(ZConnection, Throwable, TypesScala.Option.of(rowType))}" + Right(code"def $name(${keyColumns.map(_.param.code).mkCode(", ")}): ${ZIO.of(ZConnection, Throwable, TypesScala.Option.of(rowType))}") case RepoMethod.SelectByFieldValues(_, _, _, fieldValueOrIdsParam, rowType) => - code"def $name($fieldValueOrIdsParam): ${ZStream.of(ZConnection, Throwable, rowType)}" + Right(code"def $name($fieldValueOrIdsParam): ${ZStream.of(ZConnection, Throwable, rowType)}") case RepoMethod.UpdateBuilder(_, fieldsType, rowType) => - code"def $name: ${sc.Type.dsl.UpdateBuilder.of(fieldsType, rowType)}" + Right(code"def $name: ${sc.Type.dsl.UpdateBuilder.of(fieldsType, rowType)}") case RepoMethod.UpdateFieldValues(_, id, varargs, _, _, _) => - code"def $name(${id.param}, $varargs): ${ZIO.of(ZConnection, Throwable, TypesScala.Boolean)}" + Right(code"def $name(${id.param}, $varargs): ${ZIO.of(ZConnection, Throwable, TypesScala.Boolean)}") case RepoMethod.Update(_, _, _, param, _) => - code"def $name($param): ${ZIO.of(ZConnection, Throwable, TypesScala.Boolean)}" + Right(code"def $name($param): ${ZIO.of(ZConnection, Throwable, TypesScala.Boolean)}") case RepoMethod.Insert(_, _, unsavedParam, rowType) => - code"def $name($unsavedParam): ${ZIO.of(ZConnection, Throwable, rowType)}" + Right(code"def $name($unsavedParam): ${ZIO.of(ZConnection, Throwable, rowType)}") case RepoMethod.InsertUnsaved(_, _, _, unsavedParam, _, rowType) => - code"def $name($unsavedParam): ${ZIO.of(ZConnection, Throwable, rowType)}" + Right(code"def $name($unsavedParam): ${ZIO.of(ZConnection, Throwable, rowType)}") case RepoMethod.InsertStreaming(_, _, rowType) => val in = ZStream.of(ZConnection, TypesJava.Throwable, rowType) val out = ZIO.of(ZConnection, TypesJava.Throwable, TypesScala.Long) - code"def $name(unsaved: $in, batchSize: Int = 10000): $out" + Right(code"def $name(unsaved: $in, batchSize: Int = 10000): $out") + case RepoMethod.UpsertBatch(_, _, _, _) => +// val in = TypesScala.List.of(rowType) +// val out = ZIO.of(ZConnection, TypesJava.Throwable, TypesScala.List.of(rowType)) +// Right(code"def $name(unsaved: $in): $out") + Left(DbLib.NotImplementedFor("zio-jdbc")) + case RepoMethod.UpsertStreaming(_, _, _, rowType) => + val in = ZStream.of(ZConnection, TypesJava.Throwable, rowType) + val out = ZIO.of(ZConnection, TypesJava.Throwable, TypesScala.Long) + Right(code"def $name(unsaved: $in, batchSize: Int = 10000): $out") case RepoMethod.Upsert(_, _, _, unsavedParam, rowType) => - code"def $name($unsavedParam): ${ZIO.of(ZConnection, Throwable, UpdateResult.of(rowType))}" + Right(code"def $name($unsavedParam): ${ZIO.of(ZConnection, Throwable, UpdateResult.of(rowType))}") case RepoMethod.InsertUnsavedStreaming(_, unsaved) => val in = ZStream.of(ZConnection, TypesJava.Throwable, unsaved.tpe) val out = ZIO.of(ZConnection, TypesJava.Throwable, TypesScala.Long) - code"def $name(unsaved: $in, batchSize: ${TypesScala.Int} = 10000): $out" + Right(code"def $name(unsaved: $in, batchSize: ${TypesScala.Int} = 10000): $out") case RepoMethod.DeleteBuilder(_, fieldsType, rowType) => - code"def $name: ${sc.Type.dsl.DeleteBuilder.of(fieldsType, rowType)}" + Right(code"def $name: ${sc.Type.dsl.DeleteBuilder.of(fieldsType, rowType)}") case RepoMethod.Delete(_, id) => - code"def $name(${id.param}): ${ZIO.of(ZConnection, Throwable, TypesScala.Boolean)}" + Right(code"def $name(${id.param}): ${ZIO.of(ZConnection, Throwable, TypesScala.Boolean)}") case RepoMethod.DeleteByIds(_, idComputed, idsParam) => val usedDefineds = idComputed.userDefinedColTypes.zipWithIndex.map { case (colType, i) => sc.Param(sc.Ident(s"encoder$i"), JdbcEncoder.of(sc.Type.ArrayOf(colType)), None) } usedDefineds match { case Nil => - code"def $name(${idsParam}): ${ZIO.of(ZConnection, Throwable, TypesScala.Long)}" + Right(code"def $name(${idsParam}): ${ZIO.of(ZConnection, Throwable, TypesScala.Long)}") case nonEmpty => - code"def $name(${idsParam})(implicit ${nonEmpty.map(_.code).mkCode(", ")}): ${ZIO.of(ZConnection, Throwable, TypesScala.Long)}" + Right(code"def $name(${idsParam})(implicit ${nonEmpty.map(_.code).mkCode(", ")}): ${ZIO.of(ZConnection, Throwable, TypesScala.Long)}") } case RepoMethod.SqlFile(sqlScript) => val params = sc.Params(sqlScript.params.map(p => sc.Param(p.name, p.tpe, None))) @@ -250,7 +259,7 @@ class DbLibZioJdbc(pkg: sc.QIdent, inlineImplicits: Boolean, dslEnabled: Boolean case MaybeReturnsRows.Update => ZIO.of(ZConnection, Throwable, TypesScala.Long) } - code"def $name$params: $retType" + Right(code"def $name$params: $retType") } } @@ -394,36 +403,50 @@ class DbLibZioJdbc(pkg: sc.QIdent, inlineImplicits: Boolean, dslEnabled: Boolean code"${runtimeInterpolateValue(code"${unsavedParam.name}.${c.name}", c.tpe)}${SqlCast.toPgCode(c)}" } - val pickExcludedCols = cols.toList - .filterNot(c => id.cols.exists(_.name == c.name)) - .map { c => code"${c.dbName} = EXCLUDED.${c.dbName}" } + val conflictAction = cols.toList.filterNot(c => id.cols.exists(_.name == c.name)) match { + case Nil => code"do nothing" + case nonEmpty => + code"""|do update set + | ${nonEmpty.map { c => code"${c.dbName.code} = EXCLUDED.${c.dbName.code}" }.mkCode(",\n")}""".stripMargin + } - val base: sc.Code = + val sql = SQL { code"""|insert into $relName(${dbNames(cols, isRead = false)}) |values ( | ${values.mkCode(",\n")} |) - |on conflict (${dbNames(id.cols, isRead = false)})""".stripMargin - - val exclusion: Option[sc.Code] = - if (pickExcludedCols.isEmpty) None - else - Some { - code"""|do update set - | ${pickExcludedCols.mkCode(",\n")}""".stripMargin - } + |on conflict (${dbNames(id.cols, isRead = false)}) + |$conflictAction + |returning ${dbNames(cols, isRead = true)}""".stripMargin + } - val returning: sc.Code = code"returning ${dbNames(cols, isRead = true)}" + code"$sql.insertReturning(using ${lookupJdbcDecoder(rowType)})" - val sql = SQL { - List( - Some(base), - exclusion, - Some(returning) - ).flatten.mkCode("\n") + case RepoMethod.UpsertBatch(_, _, _, _) => + "???" + case RepoMethod.UpsertStreaming(relName, cols, id, rowType) => + val conflictAction = cols.toList.filterNot(c => id.cols.exists(_.name == c.name)) match { + case Nil => code"do nothing" + case nonEmpty => + code"""|do update set + | ${nonEmpty.map { c => code"${c.dbName.code} = EXCLUDED.${c.dbName.code}" }.mkCode(",\n")}""".stripMargin } + val tempTablename = s"${relName.name}_TEMP" - code"$sql.insertReturning(using ${lookupJdbcDecoder(rowType)})" + val copySql = sc.s(code"copy $tempTablename(${dbNames(cols, isRead = false)}) from stdin") + + val mergeSql = SQL { + code"""|insert into $relName(${dbNames(cols, isRead = false)}) + |select * from $tempTablename + |on conflict (${dbNames(id.cols, isRead = false)}) + |$conflictAction + |; + |drop table $tempTablename;""".stripMargin + } + code"""|val created = ${SQL(code"create temporary table $tempTablename (like $relName) on commit drop")}.execute + |val copied = ${textSupport.get.streamingInsert}($copySql, batchSize, unsaved)(${textSupport.get.lookupTextFor(rowType)}) + |val merged = $mergeSql.update + |created *> copied *> merged""".stripMargin case RepoMethod.Insert(relName, cols, unsavedParam, rowType) => val values = cols.map { c => @@ -579,6 +602,20 @@ class DbLibZioJdbc(pkg: sc.QIdent, inlineImplicits: Boolean, dslEnabled: Boolean | map.put(${unsavedParam.name}.${id.paramName}, ${unsavedParam.name}): @${TypesScala.nowarn} | $UpdateResult(1, $Chunk.single(${unsavedParam.name})) |}""".stripMargin + case RepoMethod.UpsertBatch(_, _, id, _) => + code"""|ZIO.succeed { + | unsaved.map{ row => + | map += (row.${id.paramName} -> row) + | row + | } + |}""".stripMargin + case RepoMethod.UpsertStreaming(_, _, _, _) => + code"""|unsaved.scanZIO(0L) { case (acc, row) => + | ZIO.succeed { + | map += (row.${id.paramName} -> row) + | acc + 1 + | } + |}.runLast.map(_.getOrElse(0L))""".stripMargin case RepoMethod.InsertUnsaved(_, _, _, unsavedParam, _, _) => code"insert(${maybeToRow.get.name}(${unsavedParam.name}))" diff --git a/typo/src/scala/typo/internal/codegen/FilesRelation.scala b/typo/src/scala/typo/internal/codegen/FilesRelation.scala index 4bf3acc721..55e571ae4d 100644 --- a/typo/src/scala/typo/internal/codegen/FilesRelation.scala +++ b/typo/src/scala/typo/internal/codegen/FilesRelation.scala @@ -297,7 +297,10 @@ case class FilesRelation( def RepoTraitFile(dbLib: DbLib, repoMethods: NonEmptyList[RepoMethod]): sc.File = { val renderedMethods = repoMethods.map { repoMethod => - code"${repoMethod.comment.fold("")(c => c + "\n")}${dbLib.repoSig(repoMethod)}" + dbLib.repoSig(repoMethod) match { + case Left(DbLib.NotImplementedFor(lib)) => code"// Not implementable for $lib: ${repoMethod.methodName}" + case Right(sig) => code"${repoMethod.comment.fold("")(c => c + "\n")}$sig" + } } val str = code"""trait ${names.RepoName.name} { @@ -309,10 +312,12 @@ case class FilesRelation( } def RepoImplFile(dbLib: DbLib, repoMethods: NonEmptyList[RepoMethod]): sc.File = { - val renderedMethods: NonEmptyList[sc.Code] = repoMethods.map { repoMethod => - code"""|${repoMethod.comment.fold("")(c => c + "\n")}override ${dbLib.repoSig(repoMethod)} = { - | ${dbLib.repoImpl(repoMethod)} - |}""".stripMargin + val renderedMethods: List[sc.Code] = repoMethods.toList.flatMap { repoMethod => + dbLib.repoSig(repoMethod).toOption.map { sig => + code"""|${repoMethod.comment.fold("")(c => c + "\n")}override $sig = { + | ${dbLib.repoImpl(repoMethod)} + |}""".stripMargin + } } val str = code"""|class ${names.RepoImplName.name} extends ${names.RepoName} { @@ -329,11 +334,13 @@ case class FilesRelation( sc.Param(sc.Ident("toRow"), TypesScala.Function1.of(unsaved.tpe, names.RowName), None) } - val methods: NonEmptyList[sc.Code] = - repoMethods.map { repoMethod => - code"""|${repoMethod.comment.fold("")(c => c + "\n")}override ${dbLib.repoSig(repoMethod)} = { - | ${dbLib.mockRepoImpl(idComputed, repoMethod, maybeToRowParam)} - |}""".stripMargin + val methods: List[sc.Code] = + repoMethods.toList.flatMap { repoMethod => + dbLib.repoSig(repoMethod).toOption.map { sig => + code"""|${repoMethod.comment.fold("")(c => c + "\n")}override $sig = { + | ${dbLib.mockRepoImpl(idComputed, repoMethod, maybeToRowParam)} + |}""".stripMargin + } } val classParams = List( diff --git a/typo/src/scala/typo/sc.scala b/typo/src/scala/typo/sc.scala index 5463e7d5d7..0b68e08e46 100644 --- a/typo/src/scala/typo/sc.scala +++ b/typo/src/scala/typo/sc.scala @@ -140,6 +140,7 @@ object sc { TypesScala.Float, TypesScala.Function1, TypesScala.Int, + TypesScala.Iterable, TypesScala.Iterator, TypesJava.Character, TypesJava.Integer,