diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 21e7429d..8ad7fb5e 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -8,12 +8,7 @@ on: jobs: test: - strategy: - matrix: - java: [ '11' ] - - name: test with java ${{ matrix.java }} - + name: test runs-on: ubuntu-latest steps: @@ -22,7 +17,7 @@ jobs: - name: Set up JDK uses: actions/setup-java@v3 with: - java-version: ${{ matrix.java }} + java-version: '17' distribution: 'temurin' - name: Run Gradle diff --git a/libpretixsync/build-postgres.gradle b/libpretixsync/build-postgres.gradle index ba234617..42bd4312 100644 --- a/libpretixsync/build-postgres.gradle +++ b/libpretixsync/build-postgres.gradle @@ -6,9 +6,10 @@ plugins { id 'jacoco' id 'org.jetbrains.kotlin.jvm' id 'org.jetbrains.kotlin.kapt' - id 'com.google.protobuf' version '0.9.4' + id 'com.google.protobuf' id 'com.github.gmazzo.buildconfig' version "5.3.5" id 'org.gradle.idea' + id 'app.cash.sqldelight' } apply from: 'versions.gradle' @@ -107,6 +108,8 @@ dependencies { implementation "net.i2p.crypto:eddsa:$eddsa_version" implementation "com.google.protobuf:protobuf-javalite:$protobuf_version" + implementation "app.cash.sqldelight:jdbc-driver:$sqldelight_version" + kapt "io.requery:requery-processor:$requery_version" annotationProcessor "javax.annotation:jsr250-api:$jsr250_version" @@ -121,3 +124,13 @@ task copyTestResources(type: Copy) { into "${buildDir}/classes/test" } processTestResources.dependsOn copyTestResources + +sqldelight { + databases { + SyncDatabase { + packageName = "eu.pretix.libpretixsync.sqldelight" + dialect "app.cash.sqldelight:postgresql-dialect:$sqldelight_version" + srcDirs('src/main/sqldelight/postgres', 'src/main/sqldelight/common', 'src/main/sqldelight/migrations') + } + } +} diff --git a/libpretixsync/build.gradle b/libpretixsync/build.gradle index ef51c120..e35fefab 100644 --- a/libpretixsync/build.gradle +++ b/libpretixsync/build.gradle @@ -4,9 +4,10 @@ plugins { id 'jacoco' id 'org.jetbrains.kotlin.jvm' id 'org.jetbrains.kotlin.kapt' - id 'com.google.protobuf' version '0.9.4' + id 'com.google.protobuf' id 'com.github.gmazzo.buildconfig' version '5.3.5' id 'org.gradle.idea' + id 'app.cash.sqldelight' } apply from: 'versions.gradle' @@ -104,6 +105,8 @@ dependencies { implementation "net.i2p.crypto:eddsa:$eddsa_version" implementation "com.google.protobuf:protobuf-javalite:$protobuf_version" + implementation "app.cash.sqldelight:sqlite-driver:$sqldelight_version" + kapt "io.requery:requery-processor:$requery_version" annotationProcessor "javax.annotation:jsr250-api:$jsr250_version" @@ -118,3 +121,17 @@ task copyTestResources(type: Copy) { into "${buildDir}/classes/test" } processTestResources.dependsOn copyTestResources + +sqldelight { + databases { + SyncDatabase { + packageName = "eu.pretix.libpretixsync.sqldelight" + srcDirs('src/main/sqldelight/sqlite', 'src/main/sqldelight/common', 'src/main/sqldelight/migrations') + + // Oldest dialect supported by SQLDelight 2.0.2 + // In Android projects, it will auto-select based on SDK + // but not go lower than 3.18 (Android 9 is still on 3.8) + dialect "app.cash.sqldelight:sqlite-3-18-dialect:$sqldelight_version" + } + } +} diff --git a/libpretixsync/settings.gradle b/libpretixsync/settings.gradle index fdd5d5d6..6f100bad 100644 --- a/libpretixsync/settings.gradle +++ b/libpretixsync/settings.gradle @@ -12,7 +12,8 @@ pluginManagement { // but building it inside the multi module project doesn't exit with "unknown version already on classpath" error plugins { id "org.jetbrains.kotlin.jvm" version "1.9.23" apply false + id "com.google.protobuf" version "0.9.4" apply false + id "app.cash.sqldelight" version "2.0.2" apply false } rootProject.name = 'eu.pretix.libpretixsync' - diff --git a/libpretixsync/src/main/java/eu/pretix/libpretixsync/api/PretixApi.kt b/libpretixsync/src/main/java/eu/pretix/libpretixsync/api/PretixApi.kt index ded37cb3..de7b221f 100644 --- a/libpretixsync/src/main/java/eu/pretix/libpretixsync/api/PretixApi.kt +++ b/libpretixsync/src/main/java/eu/pretix/libpretixsync/api/PretixApi.kt @@ -3,9 +3,9 @@ package eu.pretix.libpretixsync.api import eu.pretix.libpretixsync.DummySentryImplementation import eu.pretix.libpretixsync.SentryInterface import eu.pretix.libpretixsync.config.ConfigStore +import eu.pretix.libpretixsync.db.AbstractQueuedCheckIn import eu.pretix.libpretixsync.db.Answer -import eu.pretix.libpretixsync.db.Question -import eu.pretix.libpretixsync.db.QueuedCheckIn +import eu.pretix.libpretixsync.models.Question import eu.pretix.libpretixsync.utils.NetUtils import eu.pretix.libpretixsync.utils.URLFragmentEncoder import okhttp3.MediaType @@ -46,7 +46,7 @@ open class PretixApi(url: String, key: String, orgaSlug: String, version: Int, h fun redeem(eventSlug: String, secret: String, datetime: Date?, force: Boolean, nonce: String?, answers: List?, listId: Long, ignore_unpaid: Boolean, pdf_data: Boolean, type: String?, source_type: String?, callTimeout: Long? = null, questions_supported: Boolean = true): ApiResponse { var dt: String? = null if (datetime != null) { - dt = QueuedCheckIn.formatDatetime(datetime) + dt = AbstractQueuedCheckIn.formatDatetime(datetime) } return redeem(eventSlug, secret, dt, force, nonce, answers, listId, ignore_unpaid, pdf_data, type, source_type, callTimeout, questions_supported) } @@ -73,9 +73,9 @@ open class PretixApi(url: String, key: String, orgaSlug: String, version: Int, h "pdf" -> "application/pdf".toMediaTypeOrNull()!! else -> "application/unknown".toMediaTypeOrNull()!! }, a.value.split("/").last()) - answerbody.put("" + (a.question as Question).getServer_id(), fileid) + answerbody.put("" + (a.question as Question).serverId, fileid) } else { - answerbody.put("" + (a.question as Question).getServer_id(), a.value) + answerbody.put("" + (a.question as Question).serverId, a.value) } } } @@ -93,7 +93,7 @@ open class PretixApi(url: String, key: String, orgaSlug: String, version: Int, h fun redeem(lists: List, secret: String, datetime: Date?, force: Boolean, nonce: String?, answers: List?, ignore_unpaid: Boolean, pdf_data: Boolean, type: String?, source_type: String?, callTimeout: Long? = null, questions_supported: Boolean = true): ApiResponse { var dt: String? = null if (datetime != null) { - dt = QueuedCheckIn.formatDatetime(datetime) + dt = AbstractQueuedCheckIn.formatDatetime(datetime) } return redeem(lists, secret, dt, force, nonce, answers, ignore_unpaid, pdf_data, type, source_type, callTimeout, questions_supported) } @@ -120,9 +120,9 @@ open class PretixApi(url: String, key: String, orgaSlug: String, version: Int, h "pdf" -> "application/pdf".toMediaTypeOrNull()!! else -> "application/unknown".toMediaTypeOrNull()!! }, a.value.split("/").last()) - answerbody.put("" + (a.question as Question).getServer_id(), fileid) + answerbody.put("" + (a.question as Question).serverId, fileid) } else { - answerbody.put("" + (a.question as Question).getServer_id(), a.value) + answerbody.put("" + (a.question as Question).serverId, a.value) } } } @@ -314,7 +314,7 @@ open class PretixApi(url: String, key: String, orgaSlug: String, version: Int, h } @Throws(ApiException::class) - open fun downloadFile(full_url: String): ApiResponse? { + open fun downloadFile(full_url: String): ApiResponse { var request = Request.Builder() .url(full_url) .header("Authorization", "Device $key") diff --git a/libpretixsync/src/main/java/eu/pretix/libpretixsync/check/AsyncCheckProvider.kt b/libpretixsync/src/main/java/eu/pretix/libpretixsync/check/AsyncCheckProvider.kt index ff1f993f..a2dc317d 100644 --- a/libpretixsync/src/main/java/eu/pretix/libpretixsync/check/AsyncCheckProvider.kt +++ b/libpretixsync/src/main/java/eu/pretix/libpretixsync/check/AsyncCheckProvider.kt @@ -7,8 +7,18 @@ import eu.pretix.libpretixsync.config.ConfigStore import eu.pretix.libpretixsync.crypto.isValidSignature import eu.pretix.libpretixsync.crypto.readPubkeyFromPem import eu.pretix.libpretixsync.crypto.sig1.TicketProtos -import eu.pretix.libpretixsync.db.* -import eu.pretix.libpretixsync.db.Order +import eu.pretix.libpretixsync.db.AbstractQueuedCheckIn +import eu.pretix.libpretixsync.db.Answer +import eu.pretix.libpretixsync.db.NonceGenerator +import eu.pretix.libpretixsync.db.QuestionLike +import eu.pretix.libpretixsync.models.CheckIn +import eu.pretix.libpretixsync.models.Event +import eu.pretix.libpretixsync.models.Order as OrderModel +import eu.pretix.libpretixsync.models.OrderPosition as OrderPositionModel +import eu.pretix.libpretixsync.models.Question +import eu.pretix.libpretixsync.models.QueuedCheckIn +import eu.pretix.libpretixsync.models.db.toModel +import eu.pretix.libpretixsync.sqldelight.SyncDatabase import eu.pretix.libpretixsync.utils.cleanInput import eu.pretix.libpretixsync.utils.codec.binary.Base64 import eu.pretix.libpretixsync.utils.codec.binary.Base64.decodeBase64 @@ -16,7 +26,6 @@ import eu.pretix.libpretixsync.utils.logic.JsonLogic import eu.pretix.libpretixsync.utils.logic.truthy import io.requery.BlockingEntityStore import io.requery.Persistable -import io.requery.kotlin.Logical import io.requery.query.* import org.joda.time.DateTime import org.joda.time.DateTimeZone @@ -28,9 +37,11 @@ import org.json.JSONException import org.json.JSONObject import java.lang.Exception import java.nio.charset.Charset +import java.time.Instant +import java.time.OffsetDateTime import java.util.* -class AsyncCheckProvider(private val config: ConfigStore, private val dataStore: BlockingEntityStore) : TicketCheckProvider { +class AsyncCheckProvider(private val config: ConfigStore, private val db: SyncDatabase) : TicketCheckProvider { private var sentry: SentryInterface = DummySentryImplementation() /* @@ -55,7 +66,7 @@ class AsyncCheckProvider(private val config: ConfigStore, private val dataStore: val dt = now() val jdoc = JSONObject() - jdoc.put("datetime", QueuedCheckIn.formatDatetime(dt.toDate())) + jdoc.put("datetime", AbstractQueuedCheckIn.formatDatetime(dt.toDate())) if (raw_barcode.contains(Regex("[\\p{C}]"))) { jdoc.put("raw_barcode", "binary:" + Base64.encodeBase64(raw_barcode.toByteArray(Charset.defaultCharset())).toString(Charset.defaultCharset())) } else { @@ -72,12 +83,12 @@ class AsyncCheckProvider(private val config: ConfigStore, private val dataStore: if (variation != null && variation > 0) jdoc.put("variation", variation) if (subevent != null && subevent > 0) jdoc.put("subevent", subevent) - val qo = QueuedCall() val api = PretixApi.fromConfig(config) // todo: uses wrong http client - qo.setUrl(api.eventResourceUrl(eventSlug, "checkinlists") + listId + "/failed_checkins/") - qo.setBody(jdoc.toString()) - qo.setIdempotency_key(NonceGenerator.nextNonce()) - dataStore.insert(qo) + db.queuedCallQueries.insert( + body = jdoc.toString(), + idempotency_key = NonceGenerator.nextNonce(), + url = api.eventResourceUrl(eventSlug, "checkinlists") + listId + "/failed_checkins/", + ) } private fun initJsonLogic(event: Event, subeventId: Long, tz: DateTimeZone): JsonLogic { @@ -137,13 +148,19 @@ class AsyncCheckProvider(private val config: ConfigStore, private val dataStore: } jsonLogic.addOperation("buildTime") { l, d -> val t = l?.getOrNull(0) - var evjson = event.json - if (subeventId != 0L) { - val subevent = dataStore.select(SubEvent::class.java) - .where(SubEvent.EVENT_SLUG.eq(event.slug)) - .and(SubEvent.SERVER_ID.eq(subeventId)) - .get().first() - evjson = subevent.json + + // Re-fetch event/sub-event to get raw JSON and use date values from that + // Should be less risky than converting back and forth between java.time and Joda + val evjson = if (subeventId != 0L) { + val jsonData = db.subEventQueries.selectByServerIdAndSlug( + server_id = subeventId, + event_slug = event.slug, + ).executeAsOne().json_data + + JSONObject(jsonData) + } else { + val jsonData = db.eventQueries.selectById(event.id).executeAsOne().json_data + JSONObject(jsonData) } if (t == "custom") { ISODateTimeFormat.dateTimeParser().parseDateTime(l.getOrNull(1) as String?) @@ -223,41 +240,43 @@ class AsyncCheckProvider(private val config: ConfigStore, private val dataStore: var askQuestions = false for (q in questions) { - if (!q.isAskDuringCheckin && !q.isShowDuringCheckin) { + val questionJson = db.questionQueries.selectByServerId(q.serverId).executeAsOne().json_data!! + + if (!q.askDuringCheckIn && !q.showDuringCheckIn) { continue } var answer: String? = "" - if (answerMap.containsKey(q.getServer_id())) { - answer = answerMap[q.getServer_id()] + if (answerMap.containsKey(q.serverId)) { + answer = answerMap[q.serverId] try { answer = q.clean_answer(answer, q.options, false) val jo = JSONObject() jo.put("answer", answer) - jo.put("question", q.getServer_id()) - if (q.isAskDuringCheckin) { + jo.put("question", q.serverId) + if (q.askDuringCheckIn) { givenAnswers.put(jo) } - if (q.isShowDuringCheckin) { - shownAnswers.add(TicketCheckProvider.QuestionAnswer(q, answer)) + if (q.showDuringCheckIn) { + shownAnswers.add(TicketCheckProvider.QuestionAnswer(q, questionJson, answer)) } } catch (e: QuestionLike.ValidationException) { answer = "" - if (q.isAskDuringCheckin) { + if (q.askDuringCheckIn) { askQuestions = true } } catch (e: JSONException) { answer = "" - if (q.isAskDuringCheckin) { + if (q.askDuringCheckIn) { askQuestions = true } } } else { - if (q.isAskDuringCheckin) { + if (q.askDuringCheckIn) { askQuestions = true } } - if (q.isAskDuringCheckin) { - requiredAnswers.add(TicketCheckProvider.QuestionAnswer(q, answer)) + if (q.askDuringCheckIn) { + requiredAnswers.add(TicketCheckProvider.QuestionAnswer(q, questionJson, answer)) } } @@ -266,9 +285,9 @@ class AsyncCheckProvider(private val config: ConfigStore, private val dataStore: private fun checkOfflineWithoutData(eventsAndCheckinLists: Map, ticketid: String, type: TicketCheckProvider.CheckInType, answers: List?, nonce: String?, allowQuestions: Boolean): TicketCheckProvider.CheckResult { val dt = now() - val events = dataStore.select(Event::class.java) - .where(Event.SLUG.`in`(eventsAndCheckinLists.keys.toList())) - .get().toList() + val events = db.eventQueries.selectBySlugList(eventsAndCheckinLists.keys.toList()) + .executeAsList() + .map { it.toModel() } var decoded: SignedTicketData? = null var event: Event? = null for (e in events) { @@ -283,24 +302,19 @@ class AsyncCheckProvider(private val config: ConfigStore, private val dataStore: } val listId = eventsAndCheckinLists[event.slug] ?: return TicketCheckProvider.CheckResult(TicketCheckProvider.CheckResult.Type.ERROR, "Check-in list not set for event", offline = true) val eventSlug = event.slug - val list = dataStore.select(CheckInList::class.java) - .where(CheckInList.SERVER_ID.eq(listId)) - .and(CheckInList.EVENT_SLUG.eq(eventSlug)) - .get().firstOrNull() - ?: return TicketCheckProvider.CheckResult(TicketCheckProvider.CheckResult.Type.ERROR, "Check-in list not found", offline = true) - - val is_revoked = dataStore.count(RevokedTicketSecret::class.java) - .where(RevokedTicketSecret.SECRET.eq(ticketid)) - .get().value() + val list = db.checkInListQueries.selectByServerIdAndEventSlug( + server_id = listId, + event_slug = eventSlug, + ).executeAsOneOrNull()?.toModel() + ?: return TicketCheckProvider.CheckResult(TicketCheckProvider.CheckResult.Type.ERROR, "Check-in list not found", offline = true) + + val is_revoked = db.revokedTicketSecretQueries.countForSecret(ticketid).executeAsOne() if (is_revoked > 0) { storeFailedCheckin(eventSlug, listId, "revoked", ticketid, type, nonce = nonce) return TicketCheckProvider.CheckResult(TicketCheckProvider.CheckResult.Type.REVOKED, offline = true) } - val is_blocked = dataStore.count(BlockedTicketSecret::class.java) - .where(BlockedTicketSecret.SECRET.eq(ticketid)) - .and(BlockedTicketSecret.BLOCKED.eq(true)) - .get().value() + val is_blocked = db.blockedTicketSecretQueries.countBlockedForSecret(ticketid).executeAsOne() if (is_blocked > 0) { storeFailedCheckin(eventSlug, listId, "blocked", ticketid, type, nonce = nonce) return TicketCheckProvider.CheckResult(TicketCheckProvider.CheckResult.Type.BLOCKED, offline = true) @@ -317,27 +331,26 @@ class AsyncCheckProvider(private val config: ConfigStore, private val dataStore: } } - if (!list.all_items) { - val is_in_list = dataStore.count(CheckInList_Item::class.java) - .leftJoin(Item::class.java).on(CheckInList_Item.ITEM_ID.eq(Item.ID)) - .where(Item.SERVER_ID.eq(decoded.item)) - .and(CheckInList_Item.CHECK_IN_LIST_ID.eq(list.getId())) - .get().value() - if (is_in_list == 0) { + if (!list.allItems) { + val is_in_list = db.checkInListQueries.checkIfItemIsInList( + checkin_list_id = list.id, + item_id = decoded.item, + ).executeAsOne() + if (is_in_list == 0L) { storeFailedCheckin(eventSlug, listId, "product", ticketid, type, subevent = decoded.subevent, nonce = nonce) return TicketCheckProvider.CheckResult(TicketCheckProvider.CheckResult.Type.PRODUCT, offline = true) } } - if (list.getSubevent_id() != null && list.getSubevent_id() > 0 && list.getSubevent_id() != decoded.subevent) { + if (list.subEventId != null && list.subEventId > 0 && list.subEventId != decoded.subevent) { storeFailedCheckin(eventSlug, listId, "invalid", ticketid, type, nonce = nonce) return TicketCheckProvider.CheckResult(TicketCheckProvider.CheckResult.Type.INVALID, offline = true) } - val item = dataStore.select(Item::class.java) - .where(Item.SERVER_ID.eq(decoded.item)) - .and(Item.EVENT_SLUG.eq(eventSlug)) - .get().firstOrNull() + val item = db.itemQueries.selectByServerIdAndEventSlug( + server_id = decoded.item, + event_slug = eventSlug, + ).executeAsOneOrNull()?.toModel() if (item == null) { storeFailedCheckin(eventSlug, listId, "product", ticketid, type, subevent = decoded.subevent, nonce = nonce) return TicketCheckProvider.CheckResult(TicketCheckProvider.CheckResult.Type.ERROR, "Item not found", offline = true) @@ -349,7 +362,7 @@ class AsyncCheckProvider(private val config: ConfigStore, private val dataStore: res.ticket = item.internalName val variation = if (decoded.variation != null && decoded.variation!! > 0L) { try { - item.getVariation(decoded.variation) ?: null + item.getVariation(decoded.variation!!) } catch (e: JSONException) { sentry.captureException(e) null @@ -358,27 +371,22 @@ class AsyncCheckProvider(private val config: ConfigStore, private val dataStore: if (variation != null) { res.variation = variation.stringValue } - var require_attention = false - try { - require_attention = item.json.optBoolean("checkin_attention", false) - } catch (e: JSONException) { - sentry.captureException(e) - } + val require_attention = item.checkInAttention res.isRequireAttention = require_attention || (variation?.isCheckin_attention == true) - res.checkinTexts = listOfNotNull(variation?.checkin_text?.trim(), item.checkin_text?.trim()).filterNot { it.isBlank() }.filterNot { it.isBlank() || it == "null" } + res.checkinTexts = listOfNotNull(variation?.checkin_text?.trim(), item.checkInText?.trim()).filterNot { it.isBlank() }.filterNot { it.isBlank() || it == "null" } - val queuedCheckIns = dataStore.select(QueuedCheckIn::class.java) - .where(QueuedCheckIn.SECRET.eq(ticketid)) - .get().toList().filter { - it.getCheckinListId() == listId - }.sortedWith(compareBy({ it.fullDatetime }, { it.id })) + val queuedCheckIns = db.queuedCheckInQueries.selectBySecret(ticketid) + .executeAsList() + .filter { it.checkinListId == listId } + .map { it.toModel() } + .sortedWith(compareBy({ it.dateTime }, { it.id })) val rules = list.rules if (type == TicketCheckProvider.CheckInType.ENTRY && rules != null && rules.length() > 0) { val data = mutableMapOf() - val tz = DateTimeZone.forID(event.getTimezone()) + val tz = DateTimeZone.forID(event.timezone.toString()) val jsonLogic = initJsonLogic(event, decoded.subevent ?: 0, tz) - data.put("product", item.getServer_id().toString()) + data.put("product", item.serverId.toString()) data.put("variation", if (decoded.variation != null && decoded.variation!! > 0) { decoded.variation.toString() } else { @@ -423,7 +431,7 @@ class AsyncCheckProvider(private val config: ConfigStore, private val dataStore: } data.put("minutes_since_last_entry", minutes_since_entries.minOrNull() ?: -1) data.put("minutes_since_first_entry", minutes_since_entries.maxOrNull() ?: -1) - data.put("entry_status", if (queuedCheckIns.lastOrNull()?.getType() == "entry") { + data.put("entry_status", if (queuedCheckIns.lastOrNull()?.type == "entry") { "present" } else { "absent" @@ -465,12 +473,14 @@ class AsyncCheckProvider(private val config: ConfigStore, private val dataStore: } } - val questions = item.questions + val questions = db.questionQueries.selectForItem(item.id) + .executeAsList() + .map { it.toModel() } val answerMap = mutableMapOf() if (answers != null) { for (a in answers) { - answerMap[(a.question as Question).getServer_id()] = a.value + answerMap[(a.question as Question).serverId] = a.value } } var givenAnswers = JSONArray() @@ -493,10 +503,10 @@ class AsyncCheckProvider(private val config: ConfigStore, private val dataStore: } else { val entry_allowed = ( type == TicketCheckProvider.CheckInType.EXIT || - list.isAllowMultipleEntries || + list.allowMultipleEntries || queuedCheckIns.isEmpty() || queuedCheckIns.all { it.type == "exit" } || - (list.isAllowEntryAfterExit && queuedCheckIns.last().type == "exit") + (list.allowEntryAfterExit && queuedCheckIns.last().type == "exit") ) if (!entry_allowed) { res.isCheckinAllowed = false @@ -506,20 +516,18 @@ class AsyncCheckProvider(private val config: ConfigStore, private val dataStore: } else { res.isCheckinAllowed = true res.type = TicketCheckProvider.CheckResult.Type.VALID - val qci = QueuedCheckIn() - if (nonce != null) { - qci.setNonce(nonce) - } else { - qci.generateNonce() - } - qci.setSecret(ticketid) - qci.setDatetime(dt.toDate()) - qci.setDatetime_string(QueuedCheckIn.formatDatetime(dt.toDate())) - qci.setAnswers(givenAnswers.toString()) - qci.setEvent_slug(eventSlug) - qci.setType(type.toString().lowercase(Locale.getDefault())) - qci.setCheckinListId(listId) - dataStore.insert(qci) + + db.queuedCheckInQueries.insert( + answers = givenAnswers.toString(), + checkinListId = listId, + datetime = dt.toDate(), + datetime_string = AbstractQueuedCheckIn.formatDatetime(dt.toDate()), + event_slug = eventSlug, + nonce = nonce ?: NonceGenerator.nextNonce(), + secret = ticketid, + source_type = null, + type = type.toString().lowercase(Locale.getDefault()), + ) } } return res @@ -545,25 +553,22 @@ class AsyncCheckProvider(private val config: ConfigStore, private val dataStore: sentry.addBreadcrumb("provider.check", "offline check started") - val tickets = dataStore.select(OrderPosition::class.java) - .leftJoin(Order::class.java).on(Order.ID.eq(OrderPosition.ORDER_ID)) - .where(OrderPosition.SECRET.eq(ticketid_cleaned)) - .and(Order.EVENT_SLUG.`in`(eventsAndCheckinLists.keys.toList())) - .get().toList() + val tickets = db.orderPositionQueries.selectBySecretAndEventSlugs( + secret = ticketid_cleaned, + event_slugs = eventsAndCheckinLists.keys.toList(), + ).executeAsList().map { it.toModel() } + if (tickets.size == 0) { - val medium = dataStore.select(ReusableMedium::class.java) - .leftJoin(OrderPosition::class.java).on(OrderPosition.SERVER_ID.eq(ReusableMedium.LINKED_ORDERPOSITION_ID)) - .leftJoin(Order::class.java).on(Order.ID.eq(OrderPosition.ORDER_ID)) - .where(ReusableMedium.IDENTIFIER.eq(ticketid_cleaned)) - .and(ReusableMedium.TYPE.eq(source_type)) - .and(Order.EVENT_SLUG.`in`(eventsAndCheckinLists.keys.toList())) - .get().firstOrNull() + val medium = db.reusableMediumQueries.selectForCheck( + identifier = ticketid_cleaned, + type = source_type, + event_slugs = eventsAndCheckinLists.keys.toList(), + ).executeAsOneOrNull()?.toModel() if (medium != null) { - val tickets = dataStore.select(OrderPosition::class.java) - .leftJoin(Order::class.java).on(Order.ID.eq(OrderPosition.ORDER_ID)) - .where(OrderPosition.SERVER_ID.eq(medium.getLinked_orderposition_id())) - .and(Order.EVENT_SLUG.`in`(eventsAndCheckinLists.keys.toList())) - .get().toList() + val tickets = db.orderPositionQueries.selectByServerIdAndEventSlugs( + server_id = medium.linkedOrderPositionServerId, + event_slugs = eventsAndCheckinLists.keys.toList(), + ).executeAsList().map { it.toModel() } return checkOfflineWithData( eventsAndCheckinLists, ticketid_cleaned, @@ -585,7 +590,8 @@ class AsyncCheckProvider(private val config: ConfigStore, private val dataStore: allowQuestions, ) } else if (tickets.size > 1) { - val eventSlug = tickets[0].getOrder().getEvent_slug() + val eventSlug = db.orderQueries.selectById(tickets[0].orderId).executeAsOne().event_slug!! + val itemServerId = db.itemQueries.selectById(tickets[0].itemId).executeAsOne().server_id storeFailedCheckin( eventSlug, eventsAndCheckinLists[eventSlug] ?: return TicketCheckProvider.CheckResult( @@ -596,10 +602,10 @@ class AsyncCheckProvider(private val config: ConfigStore, private val dataStore: "ambiguous", ticketid_cleaned, type, - position = tickets[0].getServer_id(), - item = tickets[0].getItem().getServer_id(), - variation = tickets[0].getVariation_id(), - subevent = tickets[0].getSubevent_id(), + position = tickets[0].serverId, + item = itemServerId, + variation = tickets[0].variationServerId, + subevent = tickets[0].subEventServerId, nonce = nonce, ) return TicketCheckProvider.CheckResult(TicketCheckProvider.CheckResult.Type.AMBIGUOUS) @@ -607,42 +613,54 @@ class AsyncCheckProvider(private val config: ConfigStore, private val dataStore: return checkOfflineWithData(eventsAndCheckinLists, ticketid_cleaned, tickets, answers, ignore_unpaid, type, nonce = nonce, allowQuestions = allowQuestions) } - private fun checkOfflineWithData(eventsAndCheckinLists: Map, secret: String, tickets: List, answers: List?, ignore_unpaid: Boolean, type: TicketCheckProvider.CheckInType, nonce: String?, allowQuestions: Boolean): TicketCheckProvider.CheckResult { + private fun checkOfflineWithData(eventsAndCheckinLists: Map, secret: String, tickets: List, answers: List?, ignore_unpaid: Boolean, type: TicketCheckProvider.CheckInType, nonce: String?, allowQuestions: Boolean): TicketCheckProvider.CheckResult { // !!! When extending this, also extend checkOfflineWithoutData !!! val dt = now() - val eventSlug = tickets[0].getOrder().getEvent_slug() - val event = dataStore.select(Event::class.java) - .where(Event.SLUG.eq(eventSlug)) - .get().firstOrNull() + + val order = db.orderQueries.selectById(tickets[0].orderId).executeAsOne().toModel() + val item = db.itemQueries.selectById(tickets[0].itemId).executeAsOne().toModel() + + val eventSlug = order.eventSlug + val event = db.eventQueries.selectBySlug(eventSlug).executeAsOneOrNull()?.toModel() + ?: return TicketCheckProvider.CheckResult(TicketCheckProvider.CheckResult.Type.ERROR, "Event not found", offline = true) val listId = eventsAndCheckinLists[eventSlug] ?: return TicketCheckProvider.CheckResult(TicketCheckProvider.CheckResult.Type.ERROR, "No check-in list selected", offline = true) - val list = dataStore.select(CheckInList::class.java) - .where(CheckInList.SERVER_ID.eq(listId)) - .and(CheckInList.EVENT_SLUG.eq(eventSlug)) - .get().firstOrNull() - ?: return TicketCheckProvider.CheckResult(TicketCheckProvider.CheckResult.Type.ERROR, "Check-in list not found", offline = true) + val list = db.checkInListQueries.selectByServerIdAndEventSlug( + server_id = listId, + event_slug = eventSlug, + ).executeAsOneOrNull()?.toModel() + ?: return TicketCheckProvider.CheckResult(TicketCheckProvider.CheckResult.Type.ERROR, "Check-in list not found", offline = true) - val position = if (list.isAddonMatch) { + val position = if (list.addonMatch) { // Add-on matching, as per spec, but only if we have data, it's impossible in data-less mode val candidates = mutableListOf(tickets[0]) - candidates.addAll(tickets[0].getOrder().getPositions().filter { - it.addonToId == tickets[0].getServer_id() + + val positions = db.orderPositionQueries.selectForOrder(order.id).executeAsList().map { it.toModel() } + candidates.addAll(positions.filter { + it.addonToServerId == tickets[0].serverId }) - val filteredCandidates = if (!list.all_items) { - val items = dataStore.select(CheckInList_Item.ITEM_ID) - .where(CheckInList_Item.CHECK_IN_LIST_ID.eq(list.getId())) - .get().toList().map { it.get(0) }.toHashSet() - candidates.filter { candidate -> items.contains(candidate.getItem().getId()) } + val filteredCandidates = if (!list.allItems) { + val items = db.checkInListQueries.selectItemIdsForList(list.id) + .executeAsList() + .map { + // Not-null assertion needed for SQLite + it.id!! + } + .toHashSet() + candidates.filter { candidate -> + val candidateItem = db.itemQueries.selectById(candidate.itemId).executeAsOne() + items.contains(candidateItem.id) + } } else { // This is a useless configuration that the backend won't allow, but we'll still handle // it here for completeness candidates } if (filteredCandidates.isEmpty()) { - storeFailedCheckin(eventSlug, list.getServer_id(), "product", secret, type, position = tickets[0].getServer_id(), item = tickets[0].getItem().getServer_id(), variation = tickets[0].getVariation_id(), subevent = tickets[0].getSubevent_id(), nonce = nonce) + storeFailedCheckin(eventSlug, list.serverId, "product", secret, type, position = tickets[0].serverId, item = item.serverId, variation = tickets[0].variationServerId, subevent = tickets[0].subEventServerId, nonce = nonce) return TicketCheckProvider.CheckResult(TicketCheckProvider.CheckResult.Type.PRODUCT, offline = true) } else if (filteredCandidates.size > 1) { - storeFailedCheckin(eventSlug, list.getServer_id(), "ambiguous", secret, type, position = tickets[0].getServer_id(), item = tickets[0].getItem().getServer_id(), variation = tickets[0].getVariation_id(), subevent = tickets[0].getSubevent_id(), nonce = nonce) + storeFailedCheckin(eventSlug, list.serverId, "ambiguous", secret, type, position = tickets[0].serverId, item = item.serverId, variation = tickets[0].variationServerId, subevent = tickets[0].subEventServerId, nonce = nonce) return TicketCheckProvider.CheckResult(TicketCheckProvider.CheckResult.Type.AMBIGUOUS, offline = true) } filteredCandidates[0] @@ -650,15 +668,18 @@ class AsyncCheckProvider(private val config: ConfigStore, private val dataStore: tickets[0] } - val item = position.getItem() - val order = position.getOrder() + val positionItem = if (position.id == tickets[0].id) { + item + } else { + db.itemQueries.selectById(position.itemId).executeAsOne().toModel() + } val jPosition: JSONObject jPosition = try { - position.json + JSONObject(db.orderPositionQueries.selectById(position.id).executeAsOne().json_data) } catch (e: JSONException) { sentry.captureException(e) - storeFailedCheckin(eventSlug, list.getServer_id(), "error", position.secret, type, position = position.getServer_id(), item = position.getItem().getServer_id(), variation = position.getVariation_id(), subevent = position.getSubevent_id(), nonce = nonce) + storeFailedCheckin(eventSlug, list.serverId, "error", position.secret!!, type, position = position.serverId, item = positionItem.serverId, variation = position.variationServerId, subevent = position.subEventServerId, nonce = nonce) return TicketCheckProvider.CheckResult(TicketCheckProvider.CheckResult.Type.ERROR, offline = true) } @@ -666,8 +687,8 @@ class AsyncCheckProvider(private val config: ConfigStore, private val dataStore: val res = TicketCheckProvider.CheckResult(TicketCheckProvider.CheckResult.Type.ERROR, offline = true) res.scanType = type - res.ticket = position.getItem().internalName - val varid = position.variationId + res.ticket = positionItem.internalName + val varid = position.variationServerId val variation = if (varid != null) { try { item.getVariation(varid) @@ -680,88 +701,87 @@ class AsyncCheckProvider(private val config: ConfigStore, private val dataStore: res.variation = variation.stringValue } - res.attendee_name = position.attendee_name + res.attendee_name = position.attendeeName res.seat = position.seatName - res.orderCode = position.getOrder().getCode() - res.positionId = position.getPositionid() + res.orderCode = order.code + res.positionId = position.positionId res.position = jPosition - res.eventSlug = list.event_slug - var require_attention = position.getOrder().isCheckin_attention + res.eventSlug = list.eventSlug + var require_attention = order.requiresCheckInAttention try { - require_attention = require_attention || item.json.optBoolean("checkin_attention", false) + require_attention = require_attention || item.checkInAttention } catch (e: JSONException) { sentry.captureException(e) } + res.isRequireAttention = require_attention || variation?.isCheckin_attention == true - res.checkinTexts = listOfNotNull(order.checkin_text?.trim(), variation?.checkin_text?.trim(), item.checkin_text?.trim()).filterNot { it.isBlank() || it == "null" } + res.checkinTexts = listOfNotNull(order.checkInText?.trim(), variation?.checkin_text?.trim(), item.checkInText?.trim()).filterNot { it.isBlank() || it == "null" } - val storedCheckIns = dataStore.select(CheckIn::class.java) - .where(CheckIn.POSITION_ID.eq(position.getId())) - .get().toList() + val storedCheckIns = db.checkInQueries.selectByPositionId(position.id).executeAsList().map { it.toModel() } val checkIns = storedCheckIns.filter { - it.getListId() == listId + it.listServerId == listId }.sortedWith(compareBy({ it.fullDatetime }, { it.id })) - if (order.getStatus() != "p" && order.getStatus() != "n") { + if (order.status != OrderModel.Status.PAID && order.status != OrderModel.Status.PENDING) { res.type = TicketCheckProvider.CheckResult.Type.CANCELED res.isCheckinAllowed = false - storeFailedCheckin(eventSlug, list.getServer_id(), "canceled", position.secret, type, position = position.getServer_id(), item = position.getItem().getServer_id(), variation = position.getVariation_id(), subevent = position.getSubevent_id(), nonce = nonce) + storeFailedCheckin(eventSlug, list.serverId, "canceled", position.secret!!, type, position = position.serverId, item = positionItem.serverId, variation = position.variationServerId, subevent = position.subEventServerId, nonce = nonce) return res } - if (position.isBlocked) { + if (position.blocked) { res.type = TicketCheckProvider.CheckResult.Type.BLOCKED res.isCheckinAllowed = false - storeFailedCheckin(eventSlug, list.getServer_id(), "blocked", position.secret, type, position = position.getServer_id(), item = position.getItem().getServer_id(), variation = position.getVariation_id(), subevent = position.getSubevent_id(), nonce = nonce) + storeFailedCheckin(eventSlug, list.serverId, "blocked", position.secret!!, type, position = position.serverId, item = positionItem.serverId, variation = position.variationServerId, subevent = position.subEventServerId, nonce = nonce) return res } - if (order.status != "p" && order.isRequireApproval) { + if (order.status != OrderModel.Status.PAID && order.requiresApproval) { res.type = TicketCheckProvider.CheckResult.Type.UNAPPROVED res.isCheckinAllowed = false - storeFailedCheckin(eventSlug, list.getServer_id(), "unapproved", position.secret, type, position = position.getServer_id(), item = position.getItem().getServer_id(), variation = position.getVariation_id(), subevent = position.getSubevent_id(), nonce = nonce) + storeFailedCheckin(eventSlug, list.serverId, "unapproved", position.secret!!, type, position = position.serverId, item = positionItem.serverId, variation = position.variationServerId, subevent = position.subEventServerId, nonce = nonce) return res } if (type != TicketCheckProvider.CheckInType.EXIT) { val validFrom = position.validFrom - if (validFrom != null && validFrom.isAfter(now())) { + if (validFrom != null && validFrom.isAfter(javaTimeNow())) { res.type = TicketCheckProvider.CheckResult.Type.INVALID_TIME res.isCheckinAllowed = false - storeFailedCheckin(eventSlug, list.getServer_id(), "invalid_time", position.secret, type, position = position.getServer_id(), item = position.getItem().getServer_id(), variation = position.getVariation_id(), subevent = position.getSubevent_id(), nonce = nonce) + storeFailedCheckin(eventSlug, list.serverId, "invalid_time", position.secret!!, type, position = position.serverId, item = positionItem.serverId, variation = position.variationServerId, subevent = position.subEventServerId, nonce = nonce) return res } val validUntil = position.validUntil - if (validUntil != null && validUntil.isBefore(now())) { + if (validUntil != null && validUntil.isBefore(javaTimeNow())) { res.type = TicketCheckProvider.CheckResult.Type.INVALID_TIME res.isCheckinAllowed = false - storeFailedCheckin(eventSlug, list.getServer_id(), "invalid_time", position.secret, type, position = position.getServer_id(), item = position.getItem().getServer_id(), variation = position.getVariation_id(), subevent = position.getSubevent_id(), nonce = nonce) + storeFailedCheckin(eventSlug, list.serverId, "invalid_time", position.secret!!, type, position = position.serverId, item = positionItem.serverId, variation = position.variationServerId, subevent = position.subEventServerId, nonce = nonce) return res } } - if (!list.all_items) { - val is_in_list = dataStore.count(CheckInList_Item::class.java) - .where(CheckInList_Item.ITEM_ID.eq(item.getId())) - .and(CheckInList_Item.CHECK_IN_LIST_ID.eq(list.getId())) - .get().value() - if (is_in_list == 0) { - storeFailedCheckin(eventSlug, list.getServer_id(), "product", position.secret, type, position = position.getServer_id(), item = position.getItem().getServer_id(), variation = position.getVariation_id(), subevent = position.getSubevent_id(), nonce = nonce) + if (!list.allItems) { + val is_in_list = db.checkInListQueries.checkIfItemIsInList( + checkin_list_id = list.id, + item_id = item.id, + ).executeAsOne() + if (is_in_list == 0L) { + storeFailedCheckin(eventSlug, list.serverId, "product", position.secret!!, type, position = position.serverId, item = positionItem.serverId, variation = position.variationServerId, subevent = position.subEventServerId, nonce = nonce) res.type = TicketCheckProvider.CheckResult.Type.PRODUCT res.isCheckinAllowed = false return res } } - if (list.getSubevent_id() != null && list.getSubevent_id() > 0 && list.getSubevent_id() != position.subeventId) { - storeFailedCheckin(eventSlug, list.getServer_id(), "invalid", position.secret, type, position = position.getServer_id(), item = position.getItem().getServer_id(), variation = position.getVariation_id(), subevent = position.getSubevent_id(), nonce = nonce) + if (list.subEventId != null && list.subEventId > 0 && list.subEventId != position.subEventServerId) { + storeFailedCheckin(eventSlug, list.subEventId, "invalid", position.secret!!, type, position = position.serverId, item = positionItem.serverId, variation = position.variationServerId, subevent = position.subEventServerId, nonce = nonce) return TicketCheckProvider.CheckResult(TicketCheckProvider.CheckResult.Type.INVALID, offline = true) } - if (!order.isValidStatus && !(ignore_unpaid && list.include_pending)) { + if (!order.hasValidStatus && !(ignore_unpaid && list.includePending)) { res.type = TicketCheckProvider.CheckResult.Type.UNPAID - res.isCheckinAllowed = list.include_pending && !order.isValid_if_pending - storeFailedCheckin(eventSlug, list.getServer_id(), "unpaid", position.secret, type, position = position.getServer_id(), item = position.getItem().getServer_id(), variation = position.getVariation_id(), subevent = position.getSubevent_id(), nonce = nonce) + res.isCheckinAllowed = list.includePending && !order.validIfPending + storeFailedCheckin(eventSlug, list.serverId, "unpaid", position.secret!!, type, position = position.serverId, item = positionItem.serverId, variation = position.variationServerId, subevent = position.subEventServerId, nonce = nonce) return res } @@ -770,50 +790,50 @@ class AsyncCheckProvider(private val config: ConfigStore, private val dataStore: val rules = list.rules if (type == TicketCheckProvider.CheckInType.ENTRY && rules != null && rules.length() > 0) { val data = mutableMapOf() - val tz = DateTimeZone.forID(event.getTimezone()) - val jsonLogic = initJsonLogic(event, position.getSubevent_id(), tz) - data.put("product", position.getItem().getServer_id().toString()) - data.put("variation", position.getVariation_id().toString()) + val tz = DateTimeZone.forID(event.timezone.toString()) + val jsonLogic = initJsonLogic(event, position.subEventServerId!!, tz) + data.put("product", positionItem.serverId.toString()) + data.put("variation", position.variationServerId.toString()) data.put("gate", config.deviceKnownGateID.toString()) data.put("now", dt) data.put("now_isoweekday", dt.withZone(tz).dayOfWeek().get()) data.put("entries_number", checkIns.filter { it.type == "entry" }.size) data.put("entries_today", checkIns.filter { - DateTime(it.fullDatetime).withZone(tz).toLocalDate() == dt.withZone(tz).toLocalDate() && it.type == "entry" + it.fullDatetime.withZone(tz).toLocalDate() == dt.withZone(tz).toLocalDate() && it.type == "entry" }.size) data.put("entries_since", { cutoff: DateTime -> checkIns.filter { - DateTime(it.fullDatetime).withZone(tz).isAfter(cutoff.minus(Duration.millis(1))) && it.type == "entry" + it.fullDatetime.withZone(tz).isAfter(cutoff.minus(Duration.millis(1))) && it.type == "entry" }.size }) data.put("entries_days_since", { cutoff: DateTime -> checkIns.filter { - DateTime(it.fullDatetime).withZone(tz).isAfter(cutoff.minus(Duration.millis(1))) && it.type == "entry" + it.fullDatetime.withZone(tz).isAfter(cutoff.minus(Duration.millis(1))) && it.type == "entry" }.map { - DateTime(it.fullDatetime).withZone(tz).toLocalDate() + it.fullDatetime.withZone(tz).toLocalDate() }.toHashSet().size }) data.put("entries_before", { cutoff: DateTime -> checkIns.filter { - DateTime(it.fullDatetime).withZone(tz).isBefore(cutoff) && it.type == "entry" + it.fullDatetime.withZone(tz).isBefore(cutoff) && it.type == "entry" }.size }) data.put("entries_days_before", { cutoff: DateTime -> checkIns.filter { - DateTime(it.fullDatetime).withZone(tz).isBefore(cutoff) && it.type == "entry" + it.fullDatetime.withZone(tz).isBefore(cutoff) && it.type == "entry" }.map { - DateTime(it.fullDatetime).withZone(tz).toLocalDate() + it.fullDatetime.withZone(tz).toLocalDate() }.toHashSet().size }) data.put("entries_days", checkIns.filter { it.type == "entry" }.map { - DateTime(it.fullDatetime).withZone(tz).toLocalDate() + it.fullDatetime.withZone(tz).toLocalDate() }.toHashSet().size) val minutes_since_entries = checkIns.filter { it.type == "entry" }.map { - Duration(DateTime(it.fullDatetime).withZone(tz), dt).toStandardMinutes().minutes + Duration(it.fullDatetime.withZone(tz), dt).toStandardMinutes().minutes } data.put("minutes_since_last_entry", minutes_since_entries.minOrNull() ?: -1) data.put("minutes_since_first_entry", minutes_since_entries.maxOrNull() ?: -1) - data.put("entry_status", if (checkIns.lastOrNull()?.getType() == "entry") { + data.put("entry_status", if (checkIns.lastOrNull()?.type == "entry") { "present" } else { "absent" @@ -825,14 +845,14 @@ class AsyncCheckProvider(private val config: ConfigStore, private val dataStore: res.isCheckinAllowed = false storeFailedCheckin( eventSlug, - list.getServer_id(), + list.serverId, "rules", - position.secret, + position.secret!!, type, - position = position.getServer_id(), - item = position.getItem().getServer_id(), - variation = position.getVariation_id(), - subevent = position.getSubevent_id(), + position = position.serverId, + item = positionItem.serverId, + variation = position.variationServerId, + subevent = position.subEventServerId, nonce = nonce ) return res @@ -843,14 +863,14 @@ class AsyncCheckProvider(private val config: ConfigStore, private val dataStore: res.reasonExplanation = "Custom rule evaluation failed ($e)" storeFailedCheckin( eventSlug, - list.getServer_id(), + list.serverId, "rules", - position.secret, + position.secret!!, type, - position = position.getServer_id(), - item = position.getItem().getServer_id(), - variation = position.getVariation_id(), - subevent = position.getSubevent_id(), + position = position.serverId, + item = positionItem.serverId, + variation = position.variationServerId, + subevent = position.subEventServerId, nonce = nonce ) return res @@ -859,11 +879,14 @@ class AsyncCheckProvider(private val config: ConfigStore, private val dataStore: // !!! When extending this, also extend checkOfflineWithoutData !!! - val questions = item.questions - val answerMap = position.answers + val questions = db.questionQueries.selectForItem(item.id) + .executeAsList() + .map { it.toModel() } + + val answerMap = position.answers?.toMutableMap() ?: mutableMapOf() if (answers != null) { for (a in answers) { - answerMap[(a.question as Question).getServer_id()] = a.value + answerMap[(a.question as Question).serverId] = a.value } } var givenAnswers = JSONArray() @@ -888,40 +911,40 @@ class AsyncCheckProvider(private val config: ConfigStore, private val dataStore: } else { val entry_allowed = ( type == TicketCheckProvider.CheckInType.EXIT || - list.isAllowMultipleEntries || + list.allowMultipleEntries || checkIns.isEmpty() || checkIns.all { it.type == "exit" } || - (list.isAllowEntryAfterExit && checkIns.last().type == "exit") + (list.allowEntryAfterExit && checkIns.last().type == "exit") ) if (!entry_allowed) { res.isCheckinAllowed = false - res.firstScanned = checkIns.first().fullDatetime + res.firstScanned = checkIns.first().fullDatetime.toDate() res.type = TicketCheckProvider.CheckResult.Type.USED - storeFailedCheckin(eventSlug, list.getServer_id(), "already_redeemed", position.secret, type, position = position.getServer_id(), item = position.getItem().getServer_id(), variation = position.getVariation_id(), subevent = position.getSubevent_id(), nonce = nonce) + storeFailedCheckin(eventSlug, list.serverId, "already_redeemed", position.secret!!, type, position = position.serverId, item = positionItem.serverId, variation = position.variationServerId, subevent = position.subEventServerId, nonce = nonce) } else { res.isCheckinAllowed = true res.type = TicketCheckProvider.CheckResult.Type.VALID - val qci = QueuedCheckIn() - if (nonce != null) { - qci.setNonce(nonce) - } else { - qci.generateNonce() - } - qci.setSecret(position.secret) - qci.setDatetime(dt.toDate()) - qci.setDatetime_string(QueuedCheckIn.formatDatetime(dt.toDate())) - qci.setAnswers(givenAnswers.toString()) - qci.setEvent_slug(eventSlug) - qci.setType(type.toString().lowercase(Locale.getDefault())) - qci.setCheckinListId(listId) - dataStore.insert(qci) - val ci = CheckIn() - ci.setListId(listId) - ci.setPosition(position) - ci.setType(type.toString().lowercase(Locale.getDefault())) - ci.setDatetime(dt.toDate()) - ci.setJson_data("{\"local\": true, \"type\": \"${type.toString().lowercase(Locale.getDefault())}\", \"datetime\": \"${QueuedCheckIn.formatDatetime(dt.toDate())}\"}") - dataStore.insert(ci) + + db.queuedCheckInQueries.insert( + answers = givenAnswers.toString(), + checkinListId = listId, + datetime = dt.toDate(), + datetime_string = AbstractQueuedCheckIn.formatDatetime(dt.toDate()), + event_slug = eventSlug, + nonce = nonce ?: NonceGenerator.nextNonce(), + secret = position.secret, + source_type = null, + type = type.toString().lowercase(Locale.getDefault()), + ) + + db.checkInQueries.insert( + server_id = null, + listId = listId, + position = position.id, + type = type.toString().lowercase(Locale.getDefault()), + datetime = dt.toDate(), + json_data = "{\"local\": true, \"type\": \"${type.toString().lowercase(Locale.getDefault())}\", \"datetime\": \"${AbstractQueuedCheckIn.formatDatetime(dt.toDate())}\"}", + ) } } @@ -938,56 +961,118 @@ class AsyncCheckProvider(private val config: ConfigStore, private val dataStore: return results } - var search: LogicalCondition<*, *> - search = OrderPosition.SECRET.upper().like("$query%") - .or(OrderPosition.ATTENDEE_NAME.upper().like("%$query%")) - .or(OrderPosition.ATTENDEE_EMAIL.upper().like("%$query%")) - .or(Order.EMAIL.upper().like("%$query%")) - .or(Order.CODE.upper().like("$query%")) - - var listfilters: Logical<*, *>? = null + val eventFilter = mutableListOf() + val eventItemFilterEvents = mutableListOf() + val eventItemFilterItems = mutableListOf() + val eventSubEventFilterEvents = mutableListOf() + val eventSubEventFilterSubEvents = mutableListOf() + val allFilterEvents = mutableListOf() + val allFilterItems = mutableListOf() + val allFilterSubEvents = mutableListOf() for (entry in eventsAndCheckinLists.entries) { - val list = dataStore.select(CheckInList::class.java) - .where(CheckInList.SERVER_ID.eq(entry.value)) - .and(CheckInList.EVENT_SLUG.eq(entry.key)) - .get().firstOrNull() - ?: throw CheckException("Check-in list not found") - - var listfilter: Logical<*, *> = Order.EVENT_SLUG.eq(entry.key) - if (!list.all_items) { - val itemids: MutableList = ArrayList() - for (item in list.items) { - itemids.add(item.getId()) - } - listfilter = Item.ID.`in`(itemids).and(listfilter) + val list = db.checkInListQueries.selectByServerIdAndEventSlug( + server_id = entry.value, + event_slug = entry.key, + ).executeAsOneOrNull() ?: throw CheckException("Check-in list not found") + + val itemIds = if (!list.all_items) { + db.checkInListQueries.selectItemIdsForList(list.id) + .executeAsList() + .map { + // Not-null assertion needed for SQLite + it.id!! + } + .ifEmpty { null } + } else { + null } - if (list.getSubevent_id() != null && list.getSubevent_id() > 0) { - listfilter = OrderPosition.SUBEVENT_ID.eq(list.getSubevent_id()).and(listfilter) + + val subEventId = if (list.subevent_id != null && list.subevent_id > 0) { + list.subevent_id + } else { + null } - if (listfilters == null) { - listfilters = listfilter + + if (itemIds != null && subEventId != null) { + allFilterEvents.add(entry.key) + allFilterItems.addAll(itemIds) + allFilterSubEvents.add(subEventId) + } else if (itemIds != null) { + eventItemFilterEvents.add(entry.key) + eventItemFilterItems.addAll(itemIds) + } else if (subEventId != null) { + eventSubEventFilterEvents.add(entry.key) + eventSubEventFilterSubEvents.add(subEventId) } else { - listfilters = listfilter.or(listfilters) + eventFilter.add(entry.key) } } - search = search.and(listfilters) - - val positions: List - // The weird typecasting is apparently due to a bug in the Java compiler -// see https://github.com/requery/requery/issues/229#issuecomment-240470748 - positions = (dataStore.select(OrderPosition::class.java) - .leftJoin(Order::class.java).on(Order.ID.eq(OrderPosition.ORDER_ID) as Condition<*, *>) - .leftJoin(Item::class.java).on(Item.ID.eq(OrderPosition.ITEM_ID)) - .where(search).limit(50).offset(50 * (page - 1)).get() as Result).toList() + + // The individual filters need a separate flag, based on whether any of their lists are empty. + // If any of them are, we also need to provide dummy values. These will not affect the + // query result, but might still be evaluated. + // All of this is done to avoid executing ` IN ()`, which is not valid SQL. + // See https://github.com/sqldelight/sql-psi/issues/285 + // and https://www.postgresql.org/docs/current/sql-expressions.html#SYNTAX-EXPRESS-EVAL. + val useEventFilter = if (eventFilter.isEmpty()) { + eventFilter.add("") + false + } else { + true + } + val useEventItemFilter = if (eventItemFilterEvents.isEmpty() || eventItemFilterItems.isEmpty()) { + eventItemFilterEvents.add("") + eventItemFilterItems.add(-1L) + false + } else { + true + } + val useEventSubEventFilter = if (eventSubEventFilterEvents.isEmpty() || eventSubEventFilterSubEvents.isEmpty()) { + eventSubEventFilterEvents.add("") + eventSubEventFilterSubEvents.add(-1L) + false + } else { + true + } + val useAllFilter = if (allFilterEvents.isEmpty() || allFilterItems.isEmpty() || allFilterSubEvents.isEmpty()) { + allFilterEvents.add("") + allFilterItems.add(-1L) + allFilterSubEvents.add(-1L) + false + } else { + true + } + + val positions = db.compatQueries.searchOrderPosition( + queryStartsWith = "$query%", + queryContains = "%$query%", + use_event_filter = useEventFilter, + event_filter = eventFilter, + use_event_item_filter = useEventItemFilter, + event_item_filter_events = eventItemFilterEvents, + event_item_filter_items = eventItemFilterItems, + use_event_subevent_filter = useEventSubEventFilter, + event_subevent_filter_events = eventSubEventFilterEvents, + event_subevent_filter_subevents = eventSubEventFilterSubEvents, + use_all_filter = useAllFilter, + all_filter_events = allFilterEvents, + all_filter_items = allFilterItems, + all_filter_subevents = allFilterSubEvents, + limit = 50L, + offset = 50L * (page - 1L), + ) + .executeAsList() + .map { it.toModel() } + // TODO: search invoice_address? for (position in positions) { - val item = position.getItem() - val order = position.getOrder() + val order = db.orderQueries.selectById(position.orderId).executeAsOne().toModel() + val item = db.itemQueries.selectById(position.itemId).executeAsOne().toModel() val sr = TicketCheckProvider.SearchResult() sr.ticket = item.internalName val variation = try { - if (position.variationId != null && position.variationId > 0) { - item.getVariation(position.variationId) + if (position.variationServerId != null && position.variationServerId > 0) { + item.getVariation(position.variationServerId) } else { null } @@ -998,110 +1083,108 @@ class AsyncCheckProvider(private val config: ConfigStore, private val dataStore: if (variation != null) { sr.variation = variation.stringValue } - sr.attendee_name = position.attendee_name + sr.attendee_name = position.attendeeName sr.seat = position.seatName - sr.orderCode = order.getCode() - sr.positionId = position.getPositionid() - sr.secret = position.getSecret() - val queuedCheckIns = dataStore.count(QueuedCheckIn::class.java) - .where(QueuedCheckIn.SECRET.eq(position.getSecret())) - .and(QueuedCheckIn.CHECKIN_LIST_ID.`in`(eventsAndCheckinLists.values.toList())) - .get().value().toLong() + sr.orderCode = order.code + sr.positionId = position.positionId + sr.secret = position.secret + + val queuedCheckIns = db.queuedCheckInQueries.countForSecretAndLists( + secret = position.secret, + checkin_list_ids = eventsAndCheckinLists.values.toList(), + ).executeAsOne() + val checkIns = db.checkInQueries.selectByPositionId(position.id).executeAsList().map { it.toModel() } var is_checked_in = queuedCheckIns > 0 - for (ci in position.getCheckins()) { - if (eventsAndCheckinLists.containsValue(ci.getListId())) { + for (ci in checkIns) { + if (eventsAndCheckinLists.containsValue(ci.listServerId)) { is_checked_in = true break } } sr.isRedeemed = is_checked_in - if (order.getStatus() == "p") { + if (order.status == OrderModel.Status.PAID) { sr.status = TicketCheckProvider.SearchResult.Status.PAID - } else if (order.getStatus() == "n") { + } else if (order.status == OrderModel.Status.PENDING) { sr.status = TicketCheckProvider.SearchResult.Status.PENDING } else { sr.status = TicketCheckProvider.SearchResult.Status.CANCELED } - var require_attention = order.isCheckin_attention + var require_attention = order.requiresCheckInAttention try { - require_attention = require_attention || item.json.optBoolean("checkin_attention", false) || variation?.isCheckin_attention == true + require_attention = require_attention || item.checkInAttention || variation?.isCheckin_attention == true } catch (e: JSONException) { sentry.captureException(e) } sr.isRequireAttention = require_attention - sr.position = position.json + sr.position = JSONObject(db.orderPositionQueries.selectById(position.id).executeAsOne().json_data) results.add(sr) } return results } - private fun basePositionQuery(lists: List, onlyCheckedIn: Boolean): WhereAndOr?> { - - var q = dataStore.count(OrderPosition::class.java).distinct() - .leftJoin(Order::class.java).on(OrderPosition.ORDER_ID.eq(Order.ID)) - .where(OrderPosition.SERVER_ID.eq(-1)) // stupid logic node just so we can dynamically add .or() below - - for (list in lists) { - var lq: Logical<*, *> = Order.EVENT_SLUG.eq(list.getEvent_slug()) - if (list.include_pending) { - lq = lq.and(Order.STATUS.`in`(listOf("p", "n"))) - } else { - lq = lq.and(Order.STATUS.eq("p").or(Order.STATUS.eq("n").and(Order.VALID_IF_PENDING.eq(true)))) - } - - if (list.getSubevent_id() != null && list.getSubevent_id() > 0) { - lq = lq.and(OrderPosition.SUBEVENT_ID.eq(list.getSubevent_id())) - } - - if (!list.isAll_items) { - val product_ids = dataStore.select(CheckInList_Item.ITEM_ID) - .where(CheckInList_Item.CHECK_IN_LIST_ID.eq(list.getId())) - .get().toList().map { it.get(0) } - lq = lq.and(OrderPosition.ITEM_ID.`in`(product_ids)) - } - - if (onlyCheckedIn) { - lq = lq.and(OrderPosition.ID.`in`( - dataStore.select(CheckIn.POSITION_ID) - .where(CheckIn.LIST_ID.eq(list.getServer_id())) - .and(CheckIn.TYPE.eq("entry")) - )) - } - q = q.or(lq) - } - - return q - } - @Throws(CheckException::class) override fun status(eventSlug: String, listId: Long): TicketCheckProvider.StatusResult { sentry.addBreadcrumb("provider.status", "offline status started") val items: MutableList = ArrayList() - val list = dataStore.select(CheckInList::class.java) - .where(CheckInList.SERVER_ID.eq(listId)) - .and(CheckInList.EVENT_SLUG.eq(eventSlug)) - .get().firstOrNull() - ?: throw CheckException("Check-in list not found") - val products: List - products = if (list.all_items) { - dataStore.select(Item::class.java) - .where(Item.EVENT_SLUG.eq(eventSlug)) - .get().toList() + val list = db.checkInListQueries.selectByServerIdAndEventSlug( + server_id = listId, + event_slug = eventSlug, + ).executeAsOneOrNull()?.toModel() + ?: throw CheckException("Check-in list not found") + + val products = if (list.allItems) { + db.itemQueries.selectByEventSlug(eventSlug) + .executeAsList() + .map { it.toModel() } } else { - list.items + db.itemQueries.selectForCheckInList(list.id) + .executeAsList() + .map { it.toModel() } } var sum_pos = 0 var sum_ci = 0 for (product in products) { val variations: MutableList = ArrayList() try { + val subEventId = if (list.subEventId != null && list.subEventId > 0) list.subEventId else -1L + + val notAllItems = !list.allItems + val listItemIds = if (notAllItems) { + db.checkInListQueries.selectItemIdsForList(list.id) + .executeAsList() + .map { + // Not-null assertion needed for SQLite + it.id!! + } + } else { + // Dummy ID that is not used. Required for SQLDelight to generate valid SQL. + // See comments in search(). + listOf(-1L) + } + for (`var` in product.variations) { - val position_count = basePositionQuery(listOf(list), false) - .and(OrderPosition.ITEM_ID.eq(product.id)) - .and(OrderPosition.VARIATION_ID.eq(`var`.server_id)).get()!!.value()!! - val ci_count = basePositionQuery(listOf(list), true) - .and(OrderPosition.ITEM_ID.eq(product.id)) - .and(OrderPosition.VARIATION_ID.eq(`var`.server_id)).get()!!.value()!! + val position_count = db.compatQueries.countOrderPositionForStatus( + event_slug = list.eventSlug, + include_pending = list.includePending, + subevent_id = subEventId, + not_all_items = notAllItems, + list_item_ids = listItemIds, + only_checked_in_list_server_id = -1L, + item_id = product.id, + variation_id = `var`.server_id, + ).executeAsOne().toInt() + + val ci_count = db.compatQueries.countOrderPositionForStatus( + event_slug = list.eventSlug, + include_pending = list.includePending, + subevent_id = subEventId, + not_all_items = notAllItems, + list_item_ids = listItemIds, + only_checked_in_list_server_id = list.serverId, + item_id = product.id, + variation_id = `var`.server_id, + ).executeAsOne().toInt() + variations.add(TicketCheckProvider.StatusResultItemVariation( `var`.server_id, `var`.stringValue, @@ -1109,17 +1192,36 @@ class AsyncCheckProvider(private val config: ConfigStore, private val dataStore: ci_count )) } - val position_count = basePositionQuery(listOf(list), false) - .and(OrderPosition.ITEM_ID.eq(product.id)).get()!!.value()!! - val ci_count = basePositionQuery(listOf(list), true) - .and(OrderPosition.ITEM_ID.eq(product.id)).get()!!.value()!! + + val position_count = db.compatQueries.countOrderPositionForStatus( + event_slug = list.eventSlug, + include_pending = list.includePending, + subevent_id = subEventId, + not_all_items = notAllItems, + list_item_ids = listItemIds, + only_checked_in_list_server_id = -1L, + item_id = product.id, + variation_id = -1L, + ).executeAsOne().toInt() + + val ci_count = db.compatQueries.countOrderPositionForStatus( + event_slug = list.eventSlug, + include_pending = list.includePending, + subevent_id = subEventId, + not_all_items = notAllItems, + list_item_ids = listItemIds, + only_checked_in_list_server_id = list.serverId, + item_id = product.id, + variation_id = -1L, + ).executeAsOne().toInt() + items.add(TicketCheckProvider.StatusResultItem( - product.getServer_id(), + product.serverId, product.internalName, position_count, ci_count, variations, - product.isAdmission + product.admission )) sum_pos += position_count sum_ci += ci_count @@ -1139,4 +1241,26 @@ class AsyncCheckProvider(private val config: ConfigStore, private val dataStore: private fun now(): DateTime { return overrideNow ?: DateTime() } + + private fun javaTimeNow(): OffsetDateTime { + val jodaNow = now() + val instant = Instant.ofEpochMilli(jodaNow.millis) + val zoneId = jodaNow.zone.toTimeZone().toZoneId() + return OffsetDateTime.ofInstant(instant, zoneId) + } + + private val CheckIn.fullDatetime : DateTime + get() { + // To avoid Joda Time code in the models, handle the case where we don't have a datetime value from JSON here + return if (this.datetime != null) { + DateTime(this.datetime.toInstant().toEpochMilli()) + } else { + val date = db.checkInQueries.selectById(this.id).executeAsOne().datetime + DateTime(date) + } + } + + // Replicates the behaviour of AbstractQueuedCheckIn.getFullDatetime() + private val QueuedCheckIn.fullDatetime : Date + get() = DateTime(this.dateTime.toInstant().toEpochMilli()).toDate() } diff --git a/libpretixsync/src/main/java/eu/pretix/libpretixsync/check/OnlineCheckProvider.kt b/libpretixsync/src/main/java/eu/pretix/libpretixsync/check/OnlineCheckProvider.kt index 74222d65..a464ce4c 100644 --- a/libpretixsync/src/main/java/eu/pretix/libpretixsync/check/OnlineCheckProvider.kt +++ b/libpretixsync/src/main/java/eu/pretix/libpretixsync/check/OnlineCheckProvider.kt @@ -8,10 +8,10 @@ import eu.pretix.libpretixsync.api.PretixApi import eu.pretix.libpretixsync.api.TimeoutApiException import eu.pretix.libpretixsync.config.ConfigStore import eu.pretix.libpretixsync.db.Answer -import eu.pretix.libpretixsync.db.CheckInList -import eu.pretix.libpretixsync.db.Item import eu.pretix.libpretixsync.db.NonceGenerator -import eu.pretix.libpretixsync.db.Question +import eu.pretix.libpretixsync.models.db.toModel +import eu.pretix.libpretixsync.sqldelight.Question +import eu.pretix.libpretixsync.sqldelight.SyncDatabase import eu.pretix.libpretixsync.sync.FileStorage import eu.pretix.libpretixsync.sync.OrderSyncAdapter import eu.pretix.libpretixsync.utils.cleanInput @@ -26,7 +26,7 @@ import java.util.* class OnlineCheckProvider( private val config: ConfigStore, httpClientFactory: HttpClientFactory?, - private val dataStore: BlockingEntityStore, + private val db: SyncDatabase, private val fileStore: FileStorage, private val fallback: TicketCheckProvider? = null, private val fallbackTimeout: Int = 30000 @@ -104,12 +104,16 @@ class OnlineCheckProvider( val required_answers: MutableList = ArrayList() for (i in 0 until response.getJSONArray("questions").length()) { val q = response.getJSONArray("questions").getJSONObject(i) - val question = Question() - question.setServer_id(q.getLong("id")) - question.isRequired = q.getBoolean("required") - question.setPosition(q.getLong("position")) - question.setJson_data(q.toString()) - required_answers.add(TicketCheckProvider.QuestionAnswer(question, "")) + + val question = Question( + server_id = q.getLong("id"), + required = q.getBoolean("required"), + position = q.getLong("position"), + json_data = q.toString(), + id = -1, + event_slug = null, + ).toModel() + required_answers.add(TicketCheckProvider.QuestionAnswer(question, q.toString(), "")) } res.requiredAnswers = required_answers } else { @@ -141,12 +145,12 @@ class OnlineCheckProvider( response.getJSONObject("list").getBoolean("include_pending") } else { // pretix < 4.12, no multi-scan supported - val list = dataStore.select(CheckInList::class.java) - .where(CheckInList.SERVER_ID.eq(eventsAndCheckinLists.values.first())) - .and(CheckInList.EVENT_SLUG.eq(eventsAndCheckinLists.keys.first())) - .get().firstOrNull() - ?: throw CheckException("Check-in list not found") - list.isInclude_pending + val list = db.checkInListQueries.selectByServerIdAndEventSlug( + server_id = eventsAndCheckinLists.values.first(), + event_slug = eventsAndCheckinLists.keys.first(), + ).executeAsOneOrNull()?.toModel() + ?: throw CheckException("Check-in list not found") + list.includePending } res.isCheckinAllowed = includePending && response.has("position") && response.getJSONObject("position").optString("order__status", "n") == "n" } else if ("product" == reason) { @@ -168,9 +172,7 @@ class OnlineCheckProvider( if (response.has("position")) { val posjson = response.getJSONObject("position") - val item = dataStore.select(Item::class.java) - .where(Item.SERVER_ID.eq(posjson.getLong("item"))) - .get().firstOrNull() + val item = db.itemQueries.selectByServerId(posjson.getLong("item")).executeAsOneOrNull()?.toModel() if (item != null) { res.ticket = item.internalName if (posjson.optLong("variation", 0) > 0) { @@ -203,7 +205,7 @@ class OnlineCheckProvider( val pdfdata = posjson.getJSONObject("pdf_data") if (pdfdata.has("images")) { val images = pdfdata.getJSONObject("images") - OrderSyncAdapter.updatePdfImages(dataStore, fileStore, api, posjson.getLong("id"), images) + OrderSyncAdapter.updatePdfImages(db, fileStore, api, posjson.getLong("id"), images) } } } catch (e: Exception) { @@ -218,18 +220,17 @@ class OnlineCheckProvider( val value = a.getString("answer") val q = a.get("question") if (q is JSONObject) { // pretix version supports the expand parameter - val question = Question() - question.setServer_id(q.getLong("id")) - question.isRequired = q.getBoolean("required") - question.setPosition(q.getLong("position")) - question.setJson_data(q.toString()) - if (question.isShowDuringCheckin) { - shownAnswers.add( - TicketCheckProvider.QuestionAnswer( - question, - value - ) - ) + val question = Question( + server_id = q.getLong("id"), + required = q.getBoolean("required"), + position = q.getLong("position"), + json_data = q.toString(), + id = -1, + event_slug = null, + ).toModel() + + if (question.showDuringCheckIn) { + shownAnswers.add(TicketCheckProvider.QuestionAnswer(question, q.toString(), value)) } } } @@ -335,9 +336,7 @@ class OnlineCheckProvider( for (i in 0 until resdata.length()) { val res = resdata.getJSONObject(i) val sr = TicketCheckProvider.SearchResult() - val item = dataStore.select(Item::class.java) - .where(Item.SERVER_ID.eq(res.getLong("item"))) - .get().firstOrNull() + val item = db.itemQueries.selectByServerId(res.getLong("item")).executeAsOneOrNull()?.toModel() if (item != null) { sr.ticket = item.internalName if (res.optLong("variation", 0) > 0) { @@ -386,10 +385,10 @@ class OnlineCheckProvider( val response = api.status(eventSlug, listId) val r = parseStatusResponse(response.data!!) - val list = dataStore.select(CheckInList::class.java) - .where(CheckInList.SERVER_ID.eq(listId)) - .and(CheckInList.EVENT_SLUG.eq(eventSlug)) - .get().firstOrNull() + val list = db.checkInListQueries.selectByServerIdAndEventSlug( + server_id = listId, + event_slug = eventSlug, + ).executeAsOneOrNull() if (list != null) { r.eventName += " – " + list.name } @@ -440,4 +439,4 @@ class OnlineCheckProvider( ) } } -} \ No newline at end of file +} diff --git a/libpretixsync/src/main/java/eu/pretix/libpretixsync/check/TicketCheckProvider.kt b/libpretixsync/src/main/java/eu/pretix/libpretixsync/check/TicketCheckProvider.kt index 08b81cfe..fe31d5b8 100644 --- a/libpretixsync/src/main/java/eu/pretix/libpretixsync/check/TicketCheckProvider.kt +++ b/libpretixsync/src/main/java/eu/pretix/libpretixsync/check/TicketCheckProvider.kt @@ -2,7 +2,9 @@ package eu.pretix.libpretixsync.check import eu.pretix.libpretixsync.SentryInterface import eu.pretix.libpretixsync.db.Answer -import eu.pretix.libpretixsync.db.Question +import eu.pretix.libpretixsync.models.db.toModel +import eu.pretix.libpretixsync.sqldelight.Question +import eu.pretix.libpretixsync.models.Question as QuestionModel import org.json.JSONObject import java.util.* @@ -11,21 +13,60 @@ interface TicketCheckProvider { ENTRY, EXIT } + // Old clients expect the requery models on the API + // This class mimics the relevant fields + // TODO: List affected versions? + data class QuestionOutput( + val server_id: Long, + val position: Long, + val required: Boolean, + val json_data: String, + ) { + constructor(model: QuestionModel, jsonData: String) : this( + server_id = model.serverId, + required = model.required, + position = model.position, + json_data = jsonData, + ) + + fun toModel() = Question( + server_id = server_id, + position = position, + required = required, + json_data = json_data, + id = -1L, + event_slug = null, + ).toModel() + } + class QuestionAnswer { - lateinit var question: Question + private lateinit var _question: QuestionModel + private lateinit var _jsonData: String + var currentValue: String? = null - constructor(question: Question, current_value: String?) { - this.question = question - this.currentValue = current_value - } + var question: QuestionOutput + get() = QuestionOutput(_question, _jsonData) + + set(value) { + this._question = Question( + server_id = value.server_id, + json_data = value.json_data, + position = -1, //TODO + required = false, //TODO + id = -1, + event_slug = null, + ).toModel() + } - constructor() { // required for de-serialization + constructor(question: QuestionModel, jsonData: String, currentValue: String?) { + this._question = question + this._jsonData = jsonData + this.currentValue = currentValue } - fun setCurrent_value(current_value: String?) { - currentValue = current_value - } + // required for de-serialization + constructor() {} } class CheckResult { @@ -140,4 +181,4 @@ interface TicketCheckProvider { fun status(eventSlug: String, listId: Long): StatusResult? fun setSentry(sentry: SentryInterface) -} \ No newline at end of file +} diff --git a/libpretixsync/src/main/java/eu/pretix/libpretixsync/database/BadgeLayoutExtensions.kt b/libpretixsync/src/main/java/eu/pretix/libpretixsync/database/BadgeLayoutExtensions.kt new file mode 100644 index 00000000..cad84c08 --- /dev/null +++ b/libpretixsync/src/main/java/eu/pretix/libpretixsync/database/BadgeLayoutExtensions.kt @@ -0,0 +1,16 @@ +package eu.pretix.libpretixsync.database + +import eu.pretix.libpretixsync.sqldelight.BadgeLayout +import org.json.JSONArray +import org.json.JSONObject +import eu.pretix.libpretixsync.models.BadgeLayout as BadgeLayoutModel + +fun BadgeLayout.toModel() = + BadgeLayoutModel( + id = this.id, + backgroundFilename = this.background_filename, + eventSlug = this.event_slug!!, + isDefault = this.is_default, + layout = JSONObject(this.json_data!!).optJSONArray("layout") ?: JSONArray(), + serverId = this.server_id!!, + ) diff --git a/libpretixsync/src/main/java/eu/pretix/libpretixsync/database/TicketLayoutExtensions.kt b/libpretixsync/src/main/java/eu/pretix/libpretixsync/database/TicketLayoutExtensions.kt new file mode 100644 index 00000000..5dfe46d6 --- /dev/null +++ b/libpretixsync/src/main/java/eu/pretix/libpretixsync/database/TicketLayoutExtensions.kt @@ -0,0 +1,16 @@ +package eu.pretix.libpretixsync.database + +import eu.pretix.libpretixsync.sqldelight.TicketLayout +import org.json.JSONArray +import org.json.JSONObject +import eu.pretix.libpretixsync.models.TicketLayout as TicketLayoutModel + +fun TicketLayout.toModel() = + TicketLayoutModel( + id = this.id, + backgroundFilename = this.background_filename, + eventSlug = this.event_slug!!, + isDefault = this.is_default, + layout = JSONObject(this.json_data!!).optJSONArray("layout") ?: JSONArray(), + serverId = this.server_id!!, + ) diff --git a/libpretixsync/src/main/java/eu/pretix/libpretixsync/db/AbstractQuestion.java b/libpretixsync/src/main/java/eu/pretix/libpretixsync/db/AbstractQuestion.java index 9271f4bd..d4b92c61 100644 --- a/libpretixsync/src/main/java/eu/pretix/libpretixsync/db/AbstractQuestion.java +++ b/libpretixsync/src/main/java/eu/pretix/libpretixsync/db/AbstractQuestion.java @@ -146,6 +146,7 @@ public QuestionLike getDependency() { return _resolvedDependency; } + @Override public List getDependencyValues() { try { List l = new ArrayList<>(); diff --git a/libpretixsync/src/main/java/eu/pretix/libpretixsync/db/ItemAddOn.java b/libpretixsync/src/main/java/eu/pretix/libpretixsync/db/ItemAddOn.java index f73fc11c..0d9cb243 100644 --- a/libpretixsync/src/main/java/eu/pretix/libpretixsync/db/ItemAddOn.java +++ b/libpretixsync/src/main/java/eu/pretix/libpretixsync/db/ItemAddOn.java @@ -68,4 +68,19 @@ public boolean isPriceIncluded() { public void setPriceIncluded(boolean priceIncluded) { this.priceIncluded = priceIncluded; } + + public JSONObject toJSON() { + JSONObject jsonObject = new JSONObject(); + try { + jsonObject.put("addon_category", addonCategoryId); + jsonObject.put("min_count", minCount); + jsonObject.put("max_count", maxCount); + jsonObject.put("position", position); + jsonObject.put("multi_allowed", multiAllowed); + jsonObject.put("price_included", priceIncluded); + } catch (JSONException e) { + e.printStackTrace(); + } + return jsonObject; + } } diff --git a/libpretixsync/src/main/java/eu/pretix/libpretixsync/models/BadgeLayout.kt b/libpretixsync/src/main/java/eu/pretix/libpretixsync/models/BadgeLayout.kt new file mode 100644 index 00000000..c02c2807 --- /dev/null +++ b/libpretixsync/src/main/java/eu/pretix/libpretixsync/models/BadgeLayout.kt @@ -0,0 +1,26 @@ +package eu.pretix.libpretixsync.models + +import org.json.JSONArray + +data class BadgeLayout( + val id: Long, + val backgroundFilename: String?, + val eventSlug: String, + val isDefault: Boolean, + val layout: JSONArray, + val serverId: Long, +) { + companion object { + fun defaultWithLayout(layout: String): BadgeLayout { + return BadgeLayout( + id = 0L, + backgroundFilename = null, + eventSlug = "", + isDefault = true, + layout = JSONArray(layout), + serverId = 0L, + ) + } + } +} + diff --git a/libpretixsync/src/main/java/eu/pretix/libpretixsync/models/CachedPdfImage.kt b/libpretixsync/src/main/java/eu/pretix/libpretixsync/models/CachedPdfImage.kt new file mode 100644 index 00000000..e9ae9d10 --- /dev/null +++ b/libpretixsync/src/main/java/eu/pretix/libpretixsync/models/CachedPdfImage.kt @@ -0,0 +1,8 @@ +package eu.pretix.libpretixsync.models + +data class CachedPdfImage( + val id: Long, + val orderPositionServerId: Long, + val etag: String, + val key: String, +) diff --git a/libpretixsync/src/main/java/eu/pretix/libpretixsync/models/Cashier.kt b/libpretixsync/src/main/java/eu/pretix/libpretixsync/models/Cashier.kt new file mode 100644 index 00000000..feb091aa --- /dev/null +++ b/libpretixsync/src/main/java/eu/pretix/libpretixsync/models/Cashier.kt @@ -0,0 +1,80 @@ +package eu.pretix.libpretixsync.models + +import eu.pretix.libpretixsync.db.CashierLike +import org.json.JSONException +import org.json.JSONObject + +class Cashier( + val id: Long, + val active: Boolean, + private val pin: String, + name: String, + numericId: Long? = null, + userId: String? = null, + team: JSONObject? = null, +) : CashierLike { + private val _name = name + private val _numericId = numericId + private val _userId = userId + private val _team = team + + override fun checkPIN(pin: String): Boolean { + return if (!active) { + false + } else { + this.pin == pin + } + } + + override fun validOnDevice(device: String): Boolean { + if (!active) { + return false + } + + try { + val team: JSONObject = _team ?: return false + if (team.optBoolean("all_devices", false)) { + return true + } + val devices = team.getJSONArray("devices") + for (i in 0 until devices.length()) { + val d = devices.getString(i) + if (d == device) { + return true + } + } + return false + } catch (e: JSONException) { + return false + } + } + + override fun hasPermission(permission: String): Boolean { + val defaults: MutableMap = HashMap() + defaults["can_open_drawer"] = true + defaults["can_top_up_gift_cards"] = true + defaults["can_check_in_tickets"] = true + if (!active) { + return false + } + + try { + val team: JSONObject = _team ?: return false + return team.optBoolean(permission, defaults.getOrDefault(permission, false)) + } catch (e: JSONException) { + return false + } + } + + override fun getNumericId(): Long? { + return _numericId + } + + override fun getUserId(): String? { + return _userId + } + + override fun getName(): String { + return _name + } +} diff --git a/libpretixsync/src/main/java/eu/pretix/libpretixsync/models/CheckIn.kt b/libpretixsync/src/main/java/eu/pretix/libpretixsync/models/CheckIn.kt new file mode 100644 index 00000000..17c84321 --- /dev/null +++ b/libpretixsync/src/main/java/eu/pretix/libpretixsync/models/CheckIn.kt @@ -0,0 +1,12 @@ +package eu.pretix.libpretixsync.models + +import java.time.OffsetDateTime + +data class CheckIn( + val id: Long, + val serverId: Long?, + val datetime: OffsetDateTime?, + val type: String?, + val listServerId: Long?, + val positionId: Long?, +) diff --git a/libpretixsync/src/main/java/eu/pretix/libpretixsync/models/CheckInList.kt b/libpretixsync/src/main/java/eu/pretix/libpretixsync/models/CheckInList.kt new file mode 100644 index 00000000..a5e93204 --- /dev/null +++ b/libpretixsync/src/main/java/eu/pretix/libpretixsync/models/CheckInList.kt @@ -0,0 +1,17 @@ +package eu.pretix.libpretixsync.models + +import org.json.JSONObject + +data class CheckInList( + val id: Long, + val serverId: Long, + val allItems: Boolean, + val eventSlug: String?, + val includePending: Boolean, + val name: String?, + val subEventId: Long?, + val allowMultipleEntries: Boolean, + val allowEntryAfterExit: Boolean, + val addonMatch: Boolean, + val rules: JSONObject?, +) diff --git a/libpretixsync/src/main/java/eu/pretix/libpretixsync/models/Closing.kt b/libpretixsync/src/main/java/eu/pretix/libpretixsync/models/Closing.kt new file mode 100644 index 00000000..bd7a6c08 --- /dev/null +++ b/libpretixsync/src/main/java/eu/pretix/libpretixsync/models/Closing.kt @@ -0,0 +1,26 @@ +package eu.pretix.libpretixsync.models + +import org.json.JSONArray +import org.json.JSONObject +import java.math.BigDecimal +import java.util.Date + +data class Closing( + val id: Long, + val serverId: Long?, + val datetime: Date?, + val open: Boolean, + val firstReceiptId: Long?, + val lastReceiptId: Long?, + val paymentSum: BigDecimal?, + val paymentSumCash: BigDecimal?, + val cashCounted: BigDecimal?, + val invoiceSettings: JSONObject = JSONObject(), + val cashierName: String?, + val cashierNumericId: Long?, + val cashierUserId: String?, + val sums: JSONArray = JSONArray(), + val trainingSums: JSONArray?, + val canceled: JSONArray = JSONArray(), + val datamodel: Long? +) diff --git a/libpretixsync/src/main/java/eu/pretix/libpretixsync/models/Event.kt b/libpretixsync/src/main/java/eu/pretix/libpretixsync/models/Event.kt new file mode 100644 index 00000000..9f1df14d --- /dev/null +++ b/libpretixsync/src/main/java/eu/pretix/libpretixsync/models/Event.kt @@ -0,0 +1,21 @@ +package eu.pretix.libpretixsync.models + +import org.json.JSONObject +import java.time.OffsetDateTime +import java.time.ZoneId + +data class Event( + val id: Long, + val name: String, + val slug: String, + val currency: String, + val isLive: Boolean, + val hasSubEvents: Boolean, + val dateFrom: OffsetDateTime, + val dateTo: OffsetDateTime? = null, + val timezone: ZoneId = ZoneId.of("UTC"), + val plugins: List = emptyList(), + val hasSeating: Boolean = false, + val seatCategoryMapping: JSONObject = JSONObject(), + val validKeys: JSONObject? = null, +) diff --git a/libpretixsync/src/main/java/eu/pretix/libpretixsync/models/Item.kt b/libpretixsync/src/main/java/eu/pretix/libpretixsync/models/Item.kt new file mode 100644 index 00000000..f5fff547 --- /dev/null +++ b/libpretixsync/src/main/java/eu/pretix/libpretixsync/models/Item.kt @@ -0,0 +1,138 @@ +package eu.pretix.libpretixsync.models + +import eu.pretix.libpretixsync.db.AbstractItem.MediaPolicy +import eu.pretix.libpretixsync.db.ItemAddOn +import eu.pretix.libpretixsync.db.ItemBundle +import eu.pretix.libpretixsync.db.ItemVariation +import eu.pretix.libpretixsync.db.ReusableMediaType +import org.json.JSONArray +import org.json.JSONException +import org.json.JSONObject +import java.math.BigDecimal +import java.util.Collections + + +class Item( + val id: Long, + val serverId: Long, + val active: Boolean, + val admission: Boolean, + val name: String = "", + val nameI18n: JSONObject = JSONObject(), + val description: String = "", + val descriptionI18n: JSONObject = JSONObject(), + val internalName: String = "", + val isPersonalized: Boolean = true, + val hasVariations: Boolean = false, + val hasDynamicValidityWithCustomStart: Boolean = false, + val hasDynamicValidityWithTimeOfDay: Boolean = false, + val dynamicValidityDayLimit: Long? = null, + val categoryServerId: Long? = null, + val checkInText: String? = null, + val eventSlug: String? = null, + val pictureFilename: String? = null, + val position: Long? = null, + val ticketLayoutServerId: Long? = null, + val ticketLayoutPretixPosId: Long? = null, + val requireVoucher: Boolean = true, + val hideWithoutVoucher: Boolean = true, + val isGiftcard: Boolean = false, + val requireBundling: Boolean = false, + val taxRuleId: Long = 0, + val defaultPrice: BigDecimal = BigDecimal("0.00"), + val hasFreePrice: Boolean = false, + val mediaPolicy: MediaPolicy = MediaPolicy.NONE, + val mediaType: ReusableMediaType = ReusableMediaType.NONE, + val generateTickets: Boolean = false, + val checkInAttention: Boolean = false, + + variations: JSONArray = JSONArray(), + bundles: JSONArray = JSONArray(), + addons: JSONArray = JSONArray(), + salesChannels: JSONArray? = null, +) { + private val _variations = variations + private val _bundles = bundles + private val _addons = addons + private val _salesChannels = salesChannels + + val variations: List + get() { + val l: MutableList = ArrayList() + val vars: JSONArray = _variations + for (i in 0 until vars.length()) { + val variation = vars.getJSONObject(i) + val v = ItemVariation() + v.isActive = variation.getBoolean("active") + v.description = variation.optJSONObject("description") + v.position = variation.getLong("position") + v.price = BigDecimal(variation.getString("price")) + v.listed_price = BigDecimal(variation.getString("price")) + v.server_id = variation.getLong("id") + v.value = variation.getJSONObject("value") + v.available_from = variation.optString("available_from") + v.available_until = variation.optString("available_until") + v.sales_channels = variation.optJSONArray("sales_channels") + v.isHide_without_voucher = variation.optBoolean("hide_without_voucher", false) + v.isCheckin_attention = variation.optBoolean("checkin_attention", false) + v.checkin_text = variation.optString("checkin_text") + l.add(v) + } + return l + } + + fun getVariation(variationServerId: Long): ItemVariation? = + variations.firstOrNull { it.server_id == variationServerId } + + val bundles: List + get() { + val l: MutableList = ArrayList() + val objects: JSONArray = _bundles + for (i in 0 until objects.length()) { + val obj = objects.getJSONObject(i) + val v = ItemBundle() + v.bundledItemId = obj.getLong("bundled_item") + v.bundledVariationId = + if (obj.isNull("bundled_variation")) null else obj.getLong("bundled_variation") + v.count = obj.getInt("count") + v.designatedPrice = + if (obj.isNull("designated_price")) null else BigDecimal(obj.getString("designated_price")) + l.add(v) + } + return l + } + + val addons: List + get() { + val l: MutableList = java.util.ArrayList() + val objects: JSONArray = _addons + for (i in 0 until objects.length()) { + val obj = objects.getJSONObject(i) + val v = ItemAddOn() + v.addonCategoryId = obj.getLong("addon_category") + v.minCount = obj.getInt("min_count") + v.maxCount = obj.getInt("max_count") + v.position = obj.getInt("position") + v.isMultiAllowed = obj.getBoolean("multi_allowed") + v.isPriceIncluded = obj.getBoolean("price_included") + l.add(v) + } + Collections.sort(l, Comparator.comparingInt { obj: ItemAddOn -> obj.position }) + return l + } + + val salesChannels: List? + get() { + return try { + val l = mutableListOf() + val channels: JSONArray = _salesChannels ?: return null + for (i in 0 until channels.length()) { + l.add(channels.getString(i)) + } + l + } catch (e: JSONException) { + e.printStackTrace() + null + } + } +} diff --git a/libpretixsync/src/main/java/eu/pretix/libpretixsync/models/ItemCategory.kt b/libpretixsync/src/main/java/eu/pretix/libpretixsync/models/ItemCategory.kt new file mode 100644 index 00000000..65acf6cd --- /dev/null +++ b/libpretixsync/src/main/java/eu/pretix/libpretixsync/models/ItemCategory.kt @@ -0,0 +1,15 @@ +package eu.pretix.libpretixsync.models + +import org.json.JSONObject + +data class ItemCategory( + val id: Long, + val serverId: Long, + val eventSlug: String, + val isAddOn: Boolean, + val position: Long, + val name: String = "", + val nameI18n: JSONObject = JSONObject(), + val description: String? = null, + val descriptionI18n: JSONObject? = null, +) diff --git a/libpretixsync/src/main/java/eu/pretix/libpretixsync/models/Order.kt b/libpretixsync/src/main/java/eu/pretix/libpretixsync/models/Order.kt new file mode 100644 index 00000000..3adf412b --- /dev/null +++ b/libpretixsync/src/main/java/eu/pretix/libpretixsync/models/Order.kt @@ -0,0 +1,40 @@ +package eu.pretix.libpretixsync.models + +data class Order( + val id: Long, + val eventSlug: String, + val requiresCheckInAttention: Boolean, + val status: Status, + val code: String? = null, + val checkInText: String? = null, + val testMode: Boolean = false, + val email: String? = null, + val requiresApproval: Boolean = false, + val validIfPending: Boolean = false, +) { + + val hasValidStatus = when (status) { + Status.PAID -> true + Status.PENDING -> validIfPending + else -> false + } + + enum class Status(val value: String) { + PENDING("n"), + PAID("p"), + EXPIRED("e"), + CANCELED("c"), + ; + + companion object { + fun fromValue(value: String) = + when (value) { + "n" -> PENDING + "p" -> PAID + "e" -> EXPIRED + "c" -> CANCELED + else -> throw IllegalArgumentException() + } + } + } +} diff --git a/libpretixsync/src/main/java/eu/pretix/libpretixsync/models/OrderPosition.kt b/libpretixsync/src/main/java/eu/pretix/libpretixsync/models/OrderPosition.kt new file mode 100644 index 00000000..07a2d18e --- /dev/null +++ b/libpretixsync/src/main/java/eu/pretix/libpretixsync/models/OrderPosition.kt @@ -0,0 +1,51 @@ +package eu.pretix.libpretixsync.models + +import eu.pretix.libpretixsync.db.OrderPositionLike +import org.json.JSONObject +import java.math.BigDecimal +import java.time.OffsetDateTime + +class OrderPosition( + val id: Long, + val itemId: Long, + val serverId: Long? = null, + val orderId: Long, + val positionId: Long, + val secret: String? = null, + val subEventServerId: Long? = null, + val variationServerId: Long? = null, + val attendeeNameParts: JSONObject? = null, + val city: String? = null, + val company: String? = null, + val country: String? = null, + val email: String? = null, + val street: String? = null, + val zipcode: String? = null, + val price: BigDecimal? = null, + val taxRate: BigDecimal? = null, + val taxValue: BigDecimal? = null, + val seatName: String? = null, + val addonToServerId: Long? = null, + val blocked: Boolean = false, + val validFrom: OffsetDateTime? = null, + val validUntil: OffsetDateTime? = null, + val answers: Map? = null, + attendeeEmail: String? = null, + attendeeName: String? = null, +) : OrderPositionLike { + private val _attendeeEmail = attendeeEmail + private val _attendeeName = attendeeName + + override fun getJSON(): JSONObject { + // TODO: Remove RemoteObject from OrderPositionLike? + throw NotImplementedError() + } + + override fun getAttendeeName(): String { + return _attendeeName!! + } + + override fun getAttendeeEmail(): String { + return _attendeeEmail!! + } +} diff --git a/libpretixsync/src/main/java/eu/pretix/libpretixsync/models/Question.kt b/libpretixsync/src/main/java/eu/pretix/libpretixsync/models/Question.kt new file mode 100644 index 00000000..a7eca346 --- /dev/null +++ b/libpretixsync/src/main/java/eu/pretix/libpretixsync/models/Question.kt @@ -0,0 +1,64 @@ +package eu.pretix.libpretixsync.models + +import eu.pretix.libpretixsync.check.QuestionType +import eu.pretix.libpretixsync.db.QuestionLike +import eu.pretix.libpretixsync.db.QuestionOption + +class Question( + val id: Long, + val serverId: Long, + val eventSlug: String?, + val position: Long, + val required: Boolean, + val askDuringCheckIn: Boolean, + val showDuringCheckIn: Boolean, + val dependencyQuestionServerId: Long?, + type: QuestionType, + question: String, + identifier: String, + options: List?, + dependencyValues: List, +) : QuestionLike() { + + private val _type: QuestionType = type + private val _question: String = question + private val _identifier: String = identifier + private val _options: List? = options + private val _dependencyValues: List = dependencyValues + + private var resolveDependencyCalled = false + private var resolvedDependency: Question? = null + + override fun getType(): QuestionType = _type + + override fun getQuestion(): String = _question + + override fun getIdentifier(): String = _identifier + + override fun getOptions(): List? = _options + + override fun requiresAnswer(): Boolean = required + + override fun getDependency(): QuestionLike? { + if (!resolveDependencyCalled) { + throw IllegalStateException("Question dependencies not resolved") + } + return resolvedDependency + } + + fun resolveDependency(all: List) { + resolveDependencyCalled = true + if (dependencyQuestionServerId == null) { + resolvedDependency = null + return + } + for (q in all) { + if (q.serverId == dependencyQuestionServerId) { + resolvedDependency = q + break + } + } + } + + override fun getDependencyValues(): List = _dependencyValues +} diff --git a/libpretixsync/src/main/java/eu/pretix/libpretixsync/models/QueuedCheckIn.kt b/libpretixsync/src/main/java/eu/pretix/libpretixsync/models/QueuedCheckIn.kt new file mode 100644 index 00000000..6e23f145 --- /dev/null +++ b/libpretixsync/src/main/java/eu/pretix/libpretixsync/models/QueuedCheckIn.kt @@ -0,0 +1,15 @@ +package eu.pretix.libpretixsync.models + +import java.time.OffsetDateTime + +data class QueuedCheckIn( + val id: Long, + val answers: String?, + val checkInListId: Long?, + val dateTime: OffsetDateTime, + val eventSlug: String?, + val nonce: String?, + val secret: String?, + val sourceType: String?, + val type: String?, +) diff --git a/libpretixsync/src/main/java/eu/pretix/libpretixsync/models/QueuedOrder.kt b/libpretixsync/src/main/java/eu/pretix/libpretixsync/models/QueuedOrder.kt new file mode 100644 index 00000000..09f8605c --- /dev/null +++ b/libpretixsync/src/main/java/eu/pretix/libpretixsync/models/QueuedOrder.kt @@ -0,0 +1,11 @@ +package eu.pretix.libpretixsync.models + +data class QueuedOrder( + val id: Long, + val error: String?, + val eventSlug: String?, + val idempotencyKey: String?, + val locked: Boolean?, + val payload: String?, + val receiptId: Long?, +) diff --git a/libpretixsync/src/main/java/eu/pretix/libpretixsync/models/Quota.kt b/libpretixsync/src/main/java/eu/pretix/libpretixsync/models/Quota.kt new file mode 100644 index 00000000..4fcdbd1a --- /dev/null +++ b/libpretixsync/src/main/java/eu/pretix/libpretixsync/models/Quota.kt @@ -0,0 +1,14 @@ +package eu.pretix.libpretixsync.models + +data class Quota( + val id: Long, + val serverId: Long, + val available: Boolean, + val availableNumber: Long? = null, + val size: Long? = null, + val eventSlug: String? = null, + val subEventServerId: Long? = null, + val items: List, + val variations: List, + val isUnlimited: Boolean = false, +) diff --git a/libpretixsync/src/main/java/eu/pretix/libpretixsync/models/Receipt.kt b/libpretixsync/src/main/java/eu/pretix/libpretixsync/models/Receipt.kt new file mode 100644 index 00000000..4ac6354d --- /dev/null +++ b/libpretixsync/src/main/java/eu/pretix/libpretixsync/models/Receipt.kt @@ -0,0 +1,41 @@ +package eu.pretix.libpretixsync.models + +import java.util.Date + +data class Receipt( + val id: Long, + val eventSlug: String, + val paymentType: PaymentType, + val currency: String, + val dateTimeOpened: Date, + val dateTimeClosed: Date? = null, + val isTraining: Boolean, + val isCanceled: Boolean, + val isOpen: Boolean = false, + val isStarted: Boolean = false, + val isPrinted: Boolean = false, + val orderCode: String? = null, + val fiscalisationData: String? = null, + val fiscalisationText: String? = null, + val fiscalisationQr: String? = null, + val cashierName: String? = null, + val cashierNumericId: Long? = null, + val cashierUserId: String? = null, + val chosenCartId: String? = null, + val emailTo: String? = null, + val closing: Long? = null, + val additionalText: String? = null, +) { + enum class PaymentType(val value: String) { + CASH("cash"), + SUMUP("sumup"), + IZETTLE("izettle"), + IZETTLE_QRC("izettle_qrc"), + STRIPE_TERMINAL("stripe_terminal"), + TERMINAL_ZVT("terminal_zvt"), + SQUARE_POS("square_pos"), + EXTERNAL("external"), + TERMINAL_CSB60("terminal_csb60"), + ADYEN_LEGACY("adyen_legacy"), + } +} diff --git a/libpretixsync/src/main/java/eu/pretix/libpretixsync/models/ReceiptLine.kt b/libpretixsync/src/main/java/eu/pretix/libpretixsync/models/ReceiptLine.kt new file mode 100644 index 00000000..fb1cca81 --- /dev/null +++ b/libpretixsync/src/main/java/eu/pretix/libpretixsync/models/ReceiptLine.kt @@ -0,0 +1,85 @@ +package eu.pretix.libpretixsync.models + +import org.json.JSONArray +import java.math.BigDecimal +import java.time.OffsetDateTime + +/** + * Note: if you change something here, don't forget to modify ReceiptWrapper.updateFromModel too + */ +data class ReceiptLine( + val id: Long, + val receiptId: Long, + val positionId: Long, + val type: Type, + val price: BigDecimal, + val listedPrice: BigDecimal? = null, + val priceAfterVoucher: BigDecimal? = null, + val customPriceInput: BigDecimal? = null, + val cartId: String? = null, + val canceled: Boolean = false, + val canceledBecauseOfReceipt: Boolean = false, + val saleText: String? = null, + val isBundled: Boolean = false, + val addonTo: Long? = null, + val remoteError: String? = null, + val voucherCode: String? = null, + val useReusableMedium: Long? = null, + val taxRate: BigDecimal? = null, + val taxRule: Long? = null, + val taxValue: BigDecimal? = null, + val eventDateFrom: OffsetDateTime? = null, + val eventDateTo: OffsetDateTime? = null, + val subEventServerId: Long? = null, + val subEventText: String? = null, + val itemServerId: Long? = null, + val variationServerId: Long? = null, + val requestedValidFrom: String? = null, + val attendeeCity: String? = null, + val attendeeCompany: String? = null, + val attendeeCountry: String? = null, + val attendeeEmail: String? = null, + val attendeeName: String? = null, + val attendeeStreet: String? = null, + val attendeeZipcode: String? = null, + val seatGuid: String? = null, + val seatName: String? = null, + val answers: JSONArray = JSONArray(), + val giftCardId: Long? = null, + val giftCardSecret: String? = null, + val priceCalculatedFromNet: Boolean = false, +) { + enum class Type(val value: String) { + PRODUCT_SALE("PRODUCT_SALE"), + PRODUCT_RETURN("PRODUCT_RETURN"), + CHANGE_IN("CHANGE_IN"), + CHANGE_START("CHANGE_START"), + CHANGE_OUT("CHANGE_OUT"), + CHANGE_DIFF("CHANGE_DIFF"), + GIFTCARD_SALE("GIFTCARD_SALE"), + GIFTCARD_REDEMPTION("GIFTCARD_REDEMPTION"), + GIFTCARD_PAYOUT("GIFTCARD_PAYOUT"), + PAY_ORDER("PAY_ORDER"), + PAY_ORDER_REVERSE("PAY_ORDER_REVERSE"), + REFUND_ORDER("REFUND_ORDER"), + NULL("NULL"); + + + fun isGiftcard(): Boolean { + return this.toString().startsWith("GIFTCARD_") + } + + fun isChange(): Boolean { + return this.toString().startsWith("CHANGE_") + } + } + + val hasAttendeeData: Boolean + get() = !attendeeName.isNullOrEmpty() || + !attendeeEmail.isNullOrEmpty() || + !attendeeCompany.isNullOrEmpty() || + !attendeeStreet.isNullOrEmpty() || + !attendeeZipcode.isNullOrEmpty() || + !attendeeCity.isNullOrEmpty() || + !attendeeCountry.isNullOrEmpty() +} diff --git a/libpretixsync/src/main/java/eu/pretix/libpretixsync/models/ReceiptPayment.kt b/libpretixsync/src/main/java/eu/pretix/libpretixsync/models/ReceiptPayment.kt new file mode 100644 index 00000000..35c541a3 --- /dev/null +++ b/libpretixsync/src/main/java/eu/pretix/libpretixsync/models/ReceiptPayment.kt @@ -0,0 +1,13 @@ +package eu.pretix.libpretixsync.models + +import org.json.JSONObject +import java.math.BigDecimal + +data class ReceiptPayment( + val id: Long, + val amount: BigDecimal?, + val detailsJson: JSONObject?, + val paymentType: String?, + val receipt: Long?, + val status: String?, +) diff --git a/libpretixsync/src/main/java/eu/pretix/libpretixsync/models/ReusableMedium.kt b/libpretixsync/src/main/java/eu/pretix/libpretixsync/models/ReusableMedium.kt new file mode 100644 index 00000000..28c6eb02 --- /dev/null +++ b/libpretixsync/src/main/java/eu/pretix/libpretixsync/models/ReusableMedium.kt @@ -0,0 +1,13 @@ +package eu.pretix.libpretixsync.models + +data class ReusableMedium( + val id: Long, + val serverId: Long?, + val active: Boolean, + val customerId: Long?, + val expires: String?, + val identifier: String?, + val linkedGiftCardId: Long?, + val linkedOrderPositionServerId: Long?, + val type: String?, +) diff --git a/libpretixsync/src/main/java/eu/pretix/libpretixsync/models/Settings.kt b/libpretixsync/src/main/java/eu/pretix/libpretixsync/models/Settings.kt new file mode 100644 index 00000000..9597d588 --- /dev/null +++ b/libpretixsync/src/main/java/eu/pretix/libpretixsync/models/Settings.kt @@ -0,0 +1,18 @@ +package eu.pretix.libpretixsync.models + +import org.json.JSONObject + +data class Settings( + val id: Long, + val address: String? = null, + val city: String? = null, + val country: String? = null, + val jsonData: String? = null, + val name: String? = null, + val pretixposAdditionalReceiptText: String? = null, + val slug: String? = null, + val taxId: String? = null, + val vatId: String? = null, + val zipcode: String? = null, + val json: JSONObject = JSONObject(), +) diff --git a/libpretixsync/src/main/java/eu/pretix/libpretixsync/models/SubEvent.kt b/libpretixsync/src/main/java/eu/pretix/libpretixsync/models/SubEvent.kt new file mode 100644 index 00000000..11357f79 --- /dev/null +++ b/libpretixsync/src/main/java/eu/pretix/libpretixsync/models/SubEvent.kt @@ -0,0 +1,54 @@ +package eu.pretix.libpretixsync.models + +import org.json.JSONObject +import java.math.BigDecimal +import java.time.OffsetDateTime + +data class SubEvent( + val id: Long, + val name: String, + val dateFrom: OffsetDateTime, + val dateTo: OffsetDateTime? = null, + val itemPriceOverrides: List, + val variationPriceOverrides: List, + val hasSeating: Boolean = false, + val seatCategoryMapping: JSONObject = JSONObject(), +) { + data class ItemOverride( + val item: Long, + val availableFrom: String? = null, + val availableUntil: String? = null, + val price: BigDecimal? = null, + val disabled: Boolean = false, + ) + + fun getPriceForItem( + item_id: Long, + original_price: BigDecimal, + ): BigDecimal { + for (or in itemPriceOverrides) { + if (or.item == item_id) { + return or.price ?: original_price + } + } + return original_price + } + + fun getPriceForVariation( + var_id: Long, + original_price: BigDecimal, + ): BigDecimal { + for (or in variationPriceOverrides) { + if (or.item == var_id) { + return or.price ?: original_price + } + } + return original_price + } + + fun getOverrideForItem(item_id: Long) = + itemPriceOverrides.firstOrNull { it.item == item_id } + + fun getOverrideForVariation(var_id: Long) = + variationPriceOverrides.firstOrNull { it.item == var_id } +} diff --git a/libpretixsync/src/main/java/eu/pretix/libpretixsync/models/TaxRule.kt b/libpretixsync/src/main/java/eu/pretix/libpretixsync/models/TaxRule.kt new file mode 100644 index 00000000..78609544 --- /dev/null +++ b/libpretixsync/src/main/java/eu/pretix/libpretixsync/models/TaxRule.kt @@ -0,0 +1,10 @@ +package eu.pretix.libpretixsync.models + +import java.math.BigDecimal + +data class TaxRule( + val id: Long, + val serverId: Long, + val rate: BigDecimal = BigDecimal("0.00"), + val includesTax: Boolean = false, +) diff --git a/libpretixsync/src/main/java/eu/pretix/libpretixsync/models/TicketLayout.kt b/libpretixsync/src/main/java/eu/pretix/libpretixsync/models/TicketLayout.kt new file mode 100644 index 00000000..9c3d0c56 --- /dev/null +++ b/libpretixsync/src/main/java/eu/pretix/libpretixsync/models/TicketLayout.kt @@ -0,0 +1,25 @@ +package eu.pretix.libpretixsync.models + +import org.json.JSONArray + +data class TicketLayout( + val id: Long, + val backgroundFilename: String?, + val eventSlug: String, + val isDefault: Boolean, + val layout: JSONArray, + val serverId: Long, +) { + companion object { + fun defaultWithLayout(layout: String): TicketLayout { + return TicketLayout( + id = 0L, + backgroundFilename = null, + eventSlug = "", + isDefault = true, + layout = JSONArray(layout), + serverId = 0L, + ) + } + } +} diff --git a/libpretixsync/src/main/java/eu/pretix/libpretixsync/models/db/CachedPdfImageExtensions.kt b/libpretixsync/src/main/java/eu/pretix/libpretixsync/models/db/CachedPdfImageExtensions.kt new file mode 100644 index 00000000..a912e5bc --- /dev/null +++ b/libpretixsync/src/main/java/eu/pretix/libpretixsync/models/db/CachedPdfImageExtensions.kt @@ -0,0 +1,12 @@ +package eu.pretix.libpretixsync.models.db + +import eu.pretix.libpretixsync.sqldelight.CachedPdfImage +import eu.pretix.libpretixsync.models.CachedPdfImage as CachedPdfImageModel + +fun CachedPdfImage.toModel() = + CachedPdfImageModel( + id = this.id, + orderPositionServerId = orderposition_id!!, + etag = etag!!, + key = key!!, + ) diff --git a/libpretixsync/src/main/java/eu/pretix/libpretixsync/models/db/CashierExtensions.kt b/libpretixsync/src/main/java/eu/pretix/libpretixsync/models/db/CashierExtensions.kt new file mode 100644 index 00000000..367c0d5e --- /dev/null +++ b/libpretixsync/src/main/java/eu/pretix/libpretixsync/models/db/CashierExtensions.kt @@ -0,0 +1,19 @@ +package eu.pretix.libpretixsync.models.db + +import eu.pretix.libpretixsync.sqldelight.Cashier +import org.json.JSONObject +import eu.pretix.libpretixsync.models.Cashier as CashierModel + +fun Cashier.toModel(): CashierModel { + val json = JSONObject(this.json_data) + + return CashierModel( + id = this.id, + numericId = this.server_id, + userId = this.userid, + name = this.name!!, + active = this.active, + pin = this.pin!!, + team = json.optJSONObject("team"), + ) +} diff --git a/libpretixsync/src/main/java/eu/pretix/libpretixsync/models/db/CheckInExtensions.kt b/libpretixsync/src/main/java/eu/pretix/libpretixsync/models/db/CheckInExtensions.kt new file mode 100644 index 00000000..f6dd05e0 --- /dev/null +++ b/libpretixsync/src/main/java/eu/pretix/libpretixsync/models/db/CheckInExtensions.kt @@ -0,0 +1,20 @@ +package eu.pretix.libpretixsync.models.db + +import eu.pretix.libpretixsync.sqldelight.CheckIn +import eu.pretix.libpretixsync.sqldelight.SafeOffsetDateTimeMapper +import org.json.JSONObject +import eu.pretix.libpretixsync.models.CheckIn as CheckInModel + +fun CheckIn.toModel(): CheckInModel { + val json = JSONObject(this.json_data) + + return CheckInModel( + id = this.id, + serverId = this.server_id, + listServerId = this.listId, + positionId = this.position, + type = this.type, + // Use date values from JSON, as they contain time zone information + datetime = SafeOffsetDateTimeMapper.decode(json, "datetime"), + ) +} diff --git a/libpretixsync/src/main/java/eu/pretix/libpretixsync/models/db/CheckInListExtensions.kt b/libpretixsync/src/main/java/eu/pretix/libpretixsync/models/db/CheckInListExtensions.kt new file mode 100644 index 00000000..657278ee --- /dev/null +++ b/libpretixsync/src/main/java/eu/pretix/libpretixsync/models/db/CheckInListExtensions.kt @@ -0,0 +1,56 @@ +package eu.pretix.libpretixsync.models.db + +import eu.pretix.libpretixsync.sqldelight.CheckInList +import org.json.JSONException +import org.json.JSONObject +import eu.pretix.libpretixsync.models.CheckInList as CheckInListModel + +fun CheckInList.toModel(): CheckInListModel { + val json = JSONObject(this.json_data!!) + + return CheckInListModel( + id = this.id, + serverId = this.server_id!!, + allItems = this.all_items, + eventSlug = this.event_slug, + includePending = this.include_pending, + name = this.name, + subEventId = this.subevent_id, + allowMultipleEntries = parseAllowMultipleEntries(json), + allowEntryAfterExit = parseAllowEntryAfterExit(json), + addonMatch = parseAddonMatch(json), + rules = parseRules(json), + ) +} + +fun parseAllowMultipleEntries(json: JSONObject): Boolean { + return try { + json.getBoolean("allow_multiple_entries") + } catch (e: JSONException) { + false + } +} + +fun parseAllowEntryAfterExit(json: JSONObject): Boolean { + return try { + json.getBoolean("allow_entry_after_exit") + } catch (e: JSONException) { + false + } +} + +fun parseAddonMatch(json: JSONObject): Boolean { + return try { + json.optBoolean("addon_match", false) + } catch (e: JSONException) { + false + } +} + +private fun parseRules(json: JSONObject): JSONObject? { + return try { + json.optJSONObject("rules") + } catch (e: JSONException) { + null + } +} diff --git a/libpretixsync/src/main/java/eu/pretix/libpretixsync/models/db/ClosingExtensions.kt b/libpretixsync/src/main/java/eu/pretix/libpretixsync/models/db/ClosingExtensions.kt new file mode 100644 index 00000000..ef2d5a25 --- /dev/null +++ b/libpretixsync/src/main/java/eu/pretix/libpretixsync/models/db/ClosingExtensions.kt @@ -0,0 +1,28 @@ +package eu.pretix.libpretixsync.models.db + +import eu.pretix.libpretixsync.sqldelight.Closing +import eu.pretix.libpretixsync.models.Closing as ClosingModel +import org.json.JSONArray +import org.json.JSONObject + +fun Closing.toModel(): ClosingModel { + return ClosingModel( + id = this.id, + serverId = this.server_id, + open = this.open_, + firstReceiptId = this.first_receipt, + lastReceiptId = this.last_receipt, + paymentSum = this.payment_sum, + paymentSumCash = this.payment_sum_cash, + cashCounted = this.cash_counted, + invoiceSettings = this.invoice_settings?.let { JSONObject(it) } ?: JSONObject(), + datetime = this.datetime, + cashierName = this.cashier_name, + cashierNumericId = this.cashier_numericid, + cashierUserId = this.cashier_userid, + sums = this.json_data?.let { JSONObject(it).optJSONArray("sums") } ?: JSONArray(), + trainingSums = this.json_data?.let { JSONObject(it).optJSONArray("training_sums") }, + canceled = this.json_data?.let { JSONObject(it).optJSONArray("canceled") } ?: JSONArray(), + datamodel = this.json_data?.let { JSONObject(it).optLong("datamodel") } ?: 0L, + ) +} diff --git a/libpretixsync/src/main/java/eu/pretix/libpretixsync/models/db/EventExtensions.kt b/libpretixsync/src/main/java/eu/pretix/libpretixsync/models/db/EventExtensions.kt new file mode 100644 index 00000000..dbc2e04a --- /dev/null +++ b/libpretixsync/src/main/java/eu/pretix/libpretixsync/models/db/EventExtensions.kt @@ -0,0 +1,78 @@ +package eu.pretix.libpretixsync.models.db + +import eu.pretix.libpretixsync.sqldelight.Event +import eu.pretix.libpretixsync.sqldelight.SafeOffsetDateTimeMapper +import eu.pretix.libpretixsync.utils.I18nString +import org.json.JSONException +import org.json.JSONObject +import java.time.ZoneId +import eu.pretix.libpretixsync.models.Event as EventModel + +fun Event.toModel(): EventModel { + val json = JSONObject(this.json_data!!) + + return EventModel( + id = this.id, + name = getName(json), + slug = this.slug!!, + currency = this.currency!!, + isLive = this.live, + hasSubEvents = this.has_subevents, + // Use date values from JSON, as they contain time zone information + dateFrom = SafeOffsetDateTimeMapper.decode(json, "date_from")!!, + dateTo = SafeOffsetDateTimeMapper.decode(json, "date_to"), + timezone = getTimezone(json), + plugins = parsePlugins(json), + hasSeating = parseHasSeating(json), + seatCategoryMapping = json.getJSONObject("seat_category_mapping"), + validKeys = parseValidKeys(json), + ) +} + +private fun getName(json: JSONObject): String = + try { + I18nString.toString(json.getJSONObject("name")) + } catch (e: JSONException) { + e.printStackTrace() + "" + } + +private fun getTimezone(json: JSONObject): ZoneId = + try { + ZoneId.of(json.optString("timezone", "UTC")) + } catch (e: JSONException) { + ZoneId.of("UTC") + } + +private fun parsePlugins(json: JSONObject): List { + try { + val plugins = json.optJSONArray("plugins") + if (plugins == null || plugins.length() == 0) { + return emptyList() + } + + val res = mutableListOf() + for (i in 0 until plugins.length()) { + res.add(plugins.getString(i)) + } + return res + } catch (e: JSONException) { + return emptyList() + } +} + +private fun parseHasSeating(json: JSONObject): Boolean { + return try { + !json.isNull("seating_plan") + } catch (e: JSONException) { + false + } +} + +fun parseValidKeys(json: JSONObject): JSONObject? { + return try { + json.optJSONObject("valid_keys") + } catch (e: JSONException) { + null + } +} diff --git a/libpretixsync/src/main/java/eu/pretix/libpretixsync/models/db/ItemCategoryExtensions.kt b/libpretixsync/src/main/java/eu/pretix/libpretixsync/models/db/ItemCategoryExtensions.kt new file mode 100644 index 00000000..ee784a6b --- /dev/null +++ b/libpretixsync/src/main/java/eu/pretix/libpretixsync/models/db/ItemCategoryExtensions.kt @@ -0,0 +1,45 @@ +package eu.pretix.libpretixsync.models.db + +import eu.pretix.libpretixsync.sqldelight.ItemCategory +import eu.pretix.libpretixsync.utils.I18nString +import org.json.JSONException +import org.json.JSONObject +import eu.pretix.libpretixsync.models.ItemCategory as ItemCategoryModel + +fun ItemCategory.toModel(): ItemCategoryModel { + val json = JSONObject(this.json_data!!) + + return ItemCategoryModel( + id = this.id, + serverId = this.server_id!!, + eventSlug = this.event_slug!!, + isAddOn = this.is_addon, + position = this.position!!, + name = parseName(json), + nameI18n = json.getJSONObject("name"), + description = parseDescription(json), + descriptionI18n = json.optJSONObject("description") ?: null, + ) +} + +private fun parseName(json: JSONObject): String { + return try { + I18nString.toString(json.getJSONObject("name")) + } catch (e: JSONException) { + e.printStackTrace() + "" + } +} + +private fun parseDescription(json: JSONObject): String? { + return try { + if (!json.isNull("description")) { + I18nString.toString(json.getJSONObject("description")) ?: "" + } else { + null + } + } catch (e: JSONException) { + e.printStackTrace() + null + } +} diff --git a/libpretixsync/src/main/java/eu/pretix/libpretixsync/models/db/ItemExtensions.kt b/libpretixsync/src/main/java/eu/pretix/libpretixsync/models/db/ItemExtensions.kt new file mode 100644 index 00000000..6331a189 --- /dev/null +++ b/libpretixsync/src/main/java/eu/pretix/libpretixsync/models/db/ItemExtensions.kt @@ -0,0 +1,239 @@ +package eu.pretix.libpretixsync.models.db + +import eu.pretix.libpretixsync.models.Item as ItemModel +import eu.pretix.libpretixsync.db.AbstractItem.MediaPolicy +import eu.pretix.libpretixsync.db.ReusableMediaType +import eu.pretix.libpretixsync.sqldelight.Item +import eu.pretix.libpretixsync.sqldelight.isGenerateTickets +import eu.pretix.libpretixsync.utils.I18nString +import org.json.JSONException +import org.json.JSONObject +import java.math.BigDecimal + +fun Item.toModel(): ItemModel { + val json = JSONObject(this.json_data) + + return ItemModel( + id = this.id, + serverId = this.server_id, + active = this.active, + admission = this.admission, + name = parseName(json), + nameI18n = json.getJSONObject("name"), + description = parseDescription(json), + descriptionI18n = json.optJSONObject("description") ?: JSONObject(), + internalName = parseInternalName(json), + isPersonalized = parseIsPersonalized(json, this.admission), + hasVariations = parseHasVariations(json), + hasDynamicValidityWithCustomStart = parseHasDynamicValidityWithCustomStart(json), + hasDynamicValidityWithTimeOfDay = parseHasDynamicValidityWithTimeOfDay(json), + dynamicValidityDayLimit = parseDynamicValidityDayLimit(json), + categoryServerId = this.category_id, + checkInText = this.checkin_text, + eventSlug = this.event_slug, + pictureFilename = this.picture_filename, + position = this.position, + ticketLayoutServerId = this.ticket_layout_id, + ticketLayoutPretixPosId = this.ticket_layout_pretixpos_id, + requireVoucher = parseRequireVoucher(json), + hideWithoutVoucher = parseHideWithoutVoucher(json), + isGiftcard = parseIsGiftcard(json), + requireBundling = parseRequireBundling(json), + taxRuleId = parseTaxRuleId(json), + defaultPrice = parseDefaultPrice(json), + hasFreePrice = parseHasFreePrice(json), + mediaPolicy = parseMediaPolicy(json), + mediaType = parseMediaType(json), + generateTickets = this.isGenerateTickets, + variations = json.getJSONArray("variations"), + bundles = json.getJSONArray("bundles"), + addons = json.getJSONArray("addons"), + salesChannels = json.optJSONArray("sales_channels"), + checkInAttention = json.optBoolean("checkin_attention", false), + ) +} + +private fun parseInternalName(json: JSONObject): String { + return try { + val internal: String = json.optString("internal_name") + if (internal != null && !internal.isEmpty() && "null" != internal) { + internal + } else I18nString.toString(json.getJSONObject("name")) + } catch (e: JSONException) { + e.printStackTrace() + "" + } +} + +private fun parseName(json: JSONObject): String { + return try { + I18nString.toString(json.getJSONObject("name")) + } catch (e: JSONException) { + e.printStackTrace() + "" + } +} + +private fun parseDescription(json: JSONObject): String { + return try { + if (!json.isNull("description")) { + I18nString.toString(json.getJSONObject("description")) ?: "" + } else { + "" + } + } catch (e: JSONException) { + e.printStackTrace() + "" + } +} + +private fun parseHasVariations(json: JSONObject): Boolean { + return try { + json.getBoolean("has_variations") + } catch (e: JSONException) { + e.printStackTrace() + false + } +} + +private fun parseHasDynamicValidityWithCustomStart(jo: JSONObject): Boolean { + return try { + if (jo.optString("validity_mode", "") != "dynamic") { + false + } else jo.optBoolean("validity_dynamic_start_choice", false) + } catch (e: JSONException) { + e.printStackTrace() + false + } +} + +private fun parseHasDynamicValidityWithTimeOfDay(jo: JSONObject): Boolean { + return try { + if (!jo.isNull("validity_dynamic_duration_months") && jo.optLong( + "validity_dynamic_duration_months", + 0 + ) > 0 || !jo.isNull("validity_dynamic_duration_days") && jo.optLong( + "validity_dynamic_duration_days", + 0 + ) > 0 + ) { + false + } else true + } catch (e: JSONException) { + e.printStackTrace() + false + } +} + +private fun parseDynamicValidityDayLimit(jo: JSONObject): Long? { + return try { + if (jo.has("validity_dynamic_start_choice_day_limit") && !jo.isNull("validity_dynamic_start_choice_day_limit")) { + jo.getLong("validity_dynamic_start_choice_day_limit") + } else null + } catch (e: JSONException) { + e.printStackTrace() + null + } +} + +private fun parseIsPersonalized(j: JSONObject, admission: Boolean): Boolean { + return try { + if (j.has("personalized")) { + j.getBoolean("personalized") + } else { + admission + } + } catch (e: JSONException) { + e.printStackTrace() + true + } +} + +private fun parseRequireVoucher(json: JSONObject): Boolean { + return try { + json.getBoolean("require_voucher") + } catch (e: JSONException) { + e.printStackTrace() + true + } +} + +private fun parseHideWithoutVoucher(json: JSONObject): Boolean { + return try { + json.getBoolean("hide_without_voucher") + } catch (e: JSONException) { + e.printStackTrace() + true + } +} + +private fun parseIsGiftcard(json: JSONObject): Boolean { + return try { + json.getBoolean("issue_giftcard") + } catch (e: JSONException) { + e.printStackTrace() + false + } +} + +private fun parseTaxRuleId(json: JSONObject): Long { + return try { + json.optLong("tax_rule") + } catch (e: JSONException) { + e.printStackTrace() + 0 + } +} + +private fun parseDefaultPrice(json: JSONObject): BigDecimal { + return try { + BigDecimal(json.getString("default_price")) + } catch (e: JSONException) { + e.printStackTrace() + BigDecimal(0.00) + } +} + +private fun parseHasFreePrice(json: JSONObject): Boolean { + return try { + if (json.isNull("free_price")) { + false + } else json.getBoolean("free_price") + } catch (e: JSONException) { + e.printStackTrace() + false + } +} + +private fun parseMediaPolicy(json: JSONObject): MediaPolicy { + return try { + val mp: String = json.optString("media_policy") ?: return MediaPolicy.NONE + if (mp == "reuse") return MediaPolicy.REUSE + if (mp == "new") return MediaPolicy.NEW + if (mp == "reuse_or_new") MediaPolicy.REUSE_OR_NEW else MediaPolicy.NONE + } catch (e: JSONException) { + e.printStackTrace() + MediaPolicy.NONE + } +} + +private fun parseMediaType(json: JSONObject): ReusableMediaType { + return try { + val mp: String = json.optString("media_type") ?: return ReusableMediaType.NONE + if (mp == "barcode") return ReusableMediaType.BARCODE + if (mp == "nfc_uid") return ReusableMediaType.NFC_UID + if (mp == "nfc_mf0aes") ReusableMediaType.NFC_MF0AES else ReusableMediaType.UNSUPPORTED + } catch (e: JSONException) { + e.printStackTrace() + ReusableMediaType.NONE + } +} + +private fun parseRequireBundling(json: JSONObject): Boolean { + return try { + json.getBoolean("require_bundling") + } catch (e: JSONException) { + e.printStackTrace() + false + } +} diff --git a/libpretixsync/src/main/java/eu/pretix/libpretixsync/models/db/OrderExensions.kt b/libpretixsync/src/main/java/eu/pretix/libpretixsync/models/db/OrderExensions.kt new file mode 100644 index 00000000..5bf1abaf --- /dev/null +++ b/libpretixsync/src/main/java/eu/pretix/libpretixsync/models/db/OrderExensions.kt @@ -0,0 +1,44 @@ +package eu.pretix.libpretixsync.models.db + +import eu.pretix.libpretixsync.models.Order +import eu.pretix.libpretixsync.sqldelight.Orders +import eu.pretix.libpretixsync.sqldelight.SafeOffsetDateTimeMapper +import org.joda.time.DateTime +import org.joda.time.format.ISODateTimeFormat +import org.json.JSONException +import org.json.JSONObject + +fun Orders.toModel(): Order { + val json = JSONObject(this.json_data!!) + + return Order( + id = this.id, + eventSlug = this.event_slug!!, + code = this.code, + checkInText = this.checkin_text, + requiresCheckInAttention = this.checkin_attention, + status = Order.Status.fromValue(this.status!!), + testMode = parseTestMode(json), + email = this.email, + requiresApproval = parseRequiresApproval(json), + validIfPending = this.valid_if_pending ?: false, + ) +} + +private fun parseTestMode(json: JSONObject): Boolean { + try { + return json.getBoolean("testmode") + } catch (e: JSONException) { + e.printStackTrace() + return false + } +} + +private fun parseRequiresApproval(json: JSONObject): Boolean { + try { + return json.getBoolean("require_approval") + } catch (e: JSONException) { + e.printStackTrace() + return false + } +} diff --git a/libpretixsync/src/main/java/eu/pretix/libpretixsync/models/db/OrderPositionExtensions.kt b/libpretixsync/src/main/java/eu/pretix/libpretixsync/models/db/OrderPositionExtensions.kt new file mode 100644 index 00000000..f226cdfb --- /dev/null +++ b/libpretixsync/src/main/java/eu/pretix/libpretixsync/models/db/OrderPositionExtensions.kt @@ -0,0 +1,119 @@ +package eu.pretix.libpretixsync.models.db + +import eu.pretix.libpretixsync.sqldelight.OrderPosition +import eu.pretix.libpretixsync.sqldelight.SafeOffsetDateTimeMapper +import org.json.JSONArray +import org.json.JSONException +import org.json.JSONObject +import java.math.BigDecimal +import eu.pretix.libpretixsync.models.OrderPosition as OrderPositionModel + +fun OrderPosition.toModel(): OrderPositionModel { + val json = JSONObject(this.json_data) + + return OrderPositionModel( + id = this.id, + serverId = this.server_id, + itemId = this.item!!, + orderId = this.order_ref!!, + positionId = this.positionid!!, + secret = this.secret, + subEventServerId = this.subevent_id, + variationServerId = this.variation_id, + attendeeNameParts = json.optJSONObject("attendee_name_parts"), + city = json.optString("city", null), + company = json.optString("company", null), + country = json.optString("country", null), + email = json.optString("email", null), + street = json.optString("street", null), + zipcode = json.optString("zipcode", null), + price = parsePrice(json), + taxRate = parseTaxRate(json), + taxValue = parseTaxValue(json), + seatName = parseSeatName(json), + addonToServerId = parseAddonToServerId(json), + blocked = parseBlocked(json), + validFrom = SafeOffsetDateTimeMapper.decode(json, "valid_from"), + validUntil = SafeOffsetDateTimeMapper.decode(json, "valid_until"), + answers = parseAnswers(json), + attendeeEmail = this.attendee_email, + attendeeName = this.attendee_name, + ) +} + +private fun parsePrice(json: JSONObject): BigDecimal? { + try { + return BigDecimal(json.getString("price")) + } catch (e: JSONException) { + e.printStackTrace() + return null + } +} + +private fun parseTaxRate(json: JSONObject): BigDecimal? { + try { + return BigDecimal(json.getString("tax_rate")) + } catch (e: JSONException) { + e.printStackTrace() + return null + } +} + +private fun parseTaxValue(json: JSONObject): BigDecimal? { + try { + return BigDecimal(json.getString("tax_value")) + } catch (e: JSONException) { + e.printStackTrace() + return null + } +} + +private fun parseSeatName(json: JSONObject): String? { + try { + val seat = json.optJSONObject("seat") + if (seat != null) { + return seat.getString("name") + } + } catch (e: JSONException) { + } + return null +} + +private fun parseAddonToServerId(json: JSONObject): Long? { + try { + val value = json.optLong("addon_to", 0L) + if (value == 0L) { + return null + } + return value + } catch (e: JSONException) { + e.printStackTrace() + return null + } +} + +fun parseBlocked(json: JSONObject): Boolean { + try { + if (!json.has("blocked") || json.isNull("blocked")) { + return false + } + return true + } catch (e: JSONException) { + e.printStackTrace() + return false + } +} + +private fun parseAnswers(json: JSONObject): Map? { + try { + val arr: JSONArray = json.getJSONArray("answers") + val res: MutableMap = HashMap() + for (i in 0 until arr.length()) { + res[arr.getJSONObject(i).getLong("question")] = arr.getJSONObject(i).getString("answer") + } + return res + } catch (e: JSONException) { + e.printStackTrace() + return null + } +} diff --git a/libpretixsync/src/main/java/eu/pretix/libpretixsync/models/db/QuestionExtensions.kt b/libpretixsync/src/main/java/eu/pretix/libpretixsync/models/db/QuestionExtensions.kt new file mode 100644 index 00000000..499c460f --- /dev/null +++ b/libpretixsync/src/main/java/eu/pretix/libpretixsync/models/db/QuestionExtensions.kt @@ -0,0 +1,135 @@ +package eu.pretix.libpretixsync.models.db + +import eu.pretix.libpretixsync.models.Question as QuestionModel +import eu.pretix.libpretixsync.check.QuestionType +import eu.pretix.libpretixsync.db.QuestionOption +import eu.pretix.libpretixsync.sqldelight.Question +import eu.pretix.libpretixsync.utils.I18nString +import org.json.JSONArray +import org.json.JSONException +import org.json.JSONObject + +fun Question.toModel(): QuestionModel { + val json = JSONObject(this.json_data!!) + + return QuestionModel( + id = this.id, + serverId = this.server_id!!, + eventSlug = this.event_slug, + position = this.position!!, + required = this.required, + askDuringCheckIn = parseAskDuringCheckIn(json), + showDuringCheckIn = parseShowDuringCheckIn(json), + dependencyQuestionServerId = parseDependencyQuestionId(json), + dependencyValues = parseDependencyValues(json), + type = parseType(json), + identifier = parseIdentifier(json), + question = parseQuestion(json), + options = parseOptions(json), + ) +} + +private fun parseAskDuringCheckIn(json: JSONObject): Boolean { + return try { + json.getBoolean("ask_during_checkin") + } catch (e: JSONException) { + e.printStackTrace() + false + } +} + +private fun parseShowDuringCheckIn(json: JSONObject): Boolean { + return try { + json.getBoolean("show_during_checkin") + } catch (e: JSONException) { + e.printStackTrace() + false + } +} + +private fun parseType(json: JSONObject): QuestionType { + return try { + QuestionType.valueOf(json.getString("type")) + } catch (e: JSONException) { + QuestionType.T + } catch (e: IllegalArgumentException) { + QuestionType.T + } +} + +private fun parseIdentifier(json: JSONObject): String { + return try { + json.getString("identifier") + } catch (e: JSONException) { + e.printStackTrace() + "" + } +} + +private fun parseQuestion(json: JSONObject): String { + return try { + I18nString.toString(json.getJSONObject("question")) + } catch (e: JSONException) { + e.printStackTrace() + "" + } +} + +private fun parseOptions(json: JSONObject): List? { + val opts: MutableList = ArrayList() + return try { + val arr: JSONArray = json.getJSONArray("options") + for (i in 0 until arr.length()) { + val oobj = arr.getJSONObject(i) + var answ: String? + answ = try { + I18nString.toString(oobj.getJSONObject("answer")) + } catch (e: JSONException) { + oobj.getString("answer") + } + opts.add( + QuestionOption( + oobj.getLong("id"), + oobj.getLong("position"), + oobj.getString("identifier"), + answ + ) + ) + } + opts + } catch (e: JSONException) { + e.printStackTrace() + null + } +} + +private fun parseDependencyQuestionId(json: JSONObject): Long? { + return try { + // Use getLong instead of optLong like in AbstractQuestion + // We want an explicit null here + if (json.isNull("dependency_question")) { + null + } else { + json.getLong("dependency_question") + } + } catch (e: JSONException) { + e.printStackTrace() + null + } +} + +private fun parseDependencyValues(json: JSONObject): List { + try { + val l: MutableList = java.util.ArrayList() + val a = json.optJSONArray("dependency_values") + if (a != null) { + for (i in 0 until a.length()) { + l.add(a.getString(i)) + } + } + return l + } catch (e: JSONException) { + e.printStackTrace() + return java.util.ArrayList() + } +} diff --git a/libpretixsync/src/main/java/eu/pretix/libpretixsync/models/db/QueuedCheckInExtensions.kt b/libpretixsync/src/main/java/eu/pretix/libpretixsync/models/db/QueuedCheckInExtensions.kt new file mode 100644 index 00000000..ed67e909 --- /dev/null +++ b/libpretixsync/src/main/java/eu/pretix/libpretixsync/models/db/QueuedCheckInExtensions.kt @@ -0,0 +1,30 @@ +package eu.pretix.libpretixsync.models.db + +import eu.pretix.libpretixsync.sqldelight.QueuedCheckIn +import java.time.OffsetDateTime +import java.time.ZoneOffset +import java.util.Date +import eu.pretix.libpretixsync.models.QueuedCheckIn as QueuedCheckInModel + +fun QueuedCheckIn.toModel(): QueuedCheckInModel { + return QueuedCheckInModel( + id = this.id, + answers = this.answers, + checkInListId = this.checkinListId, + dateTime = parseDateTime(this.datetime!!, this.datetime_string), + eventSlug = this.event_slug, + nonce = this.nonce, + secret = this.secret, + sourceType = this.source_type, + type = this.type, + ) +} + +private fun parseDateTime(datetime: Date, dateTimeString: String?): OffsetDateTime { + return if (dateTimeString != null && dateTimeString != "") { + OffsetDateTime.parse(dateTimeString) + } else { + // Assume UTC if we have no additional info + datetime.toInstant().atOffset(ZoneOffset.UTC) + } +} diff --git a/libpretixsync/src/main/java/eu/pretix/libpretixsync/models/db/QueuedOrderExtensions.kt b/libpretixsync/src/main/java/eu/pretix/libpretixsync/models/db/QueuedOrderExtensions.kt new file mode 100644 index 00000000..9456db9a --- /dev/null +++ b/libpretixsync/src/main/java/eu/pretix/libpretixsync/models/db/QueuedOrderExtensions.kt @@ -0,0 +1,16 @@ +package eu.pretix.libpretixsync.models.db + +import eu.pretix.libpretixsync.sqldelight.QueuedOrder +import eu.pretix.libpretixsync.models.QueuedOrder as QueuedOrderModel + +fun QueuedOrder.toModel(): QueuedOrderModel { + return QueuedOrderModel( + id = this.id, + error = this.error, + eventSlug = this.event_slug, + idempotencyKey = this.idempotency_key, + locked = this.locked, + payload = this.payload, + receiptId = this.receipt, + ) +} diff --git a/libpretixsync/src/main/java/eu/pretix/libpretixsync/models/db/QuotaExtensions.kt b/libpretixsync/src/main/java/eu/pretix/libpretixsync/models/db/QuotaExtensions.kt new file mode 100644 index 00000000..b330660f --- /dev/null +++ b/libpretixsync/src/main/java/eu/pretix/libpretixsync/models/db/QuotaExtensions.kt @@ -0,0 +1,50 @@ +package eu.pretix.libpretixsync.models.db + +import eu.pretix.libpretixsync.sqldelight.Quota +import eu.pretix.libpretixsync.models.Quota as QuotaModel +import org.json.JSONException +import org.json.JSONObject + +fun Quota.toModel(): QuotaModel { + val json = JSONObject(this.json_data!!) + + return QuotaModel( + id = this.id, + serverId = this.server_id!!, + available = this.available!!, + availableNumber = this.available_number, + size = this.size, + eventSlug = this.event_slug, + subEventServerId = this.subevent_id, + items = parseItems(json), + variations = parseVariations(json), + isUnlimited = parseIsUnlimited(json), + ) +} + +private fun parseItems(json: JSONObject): List { + val items = json.getJSONArray("items") + val res = mutableListOf() + for (i in 0 until items.length()) { + res.add(items.getLong(i)) + } + return res +} + +private fun parseVariations(json: JSONObject): List { + val items = json.getJSONArray("variations") + val res = mutableListOf() + for (i in 0 until items.length()) { + res.add(items.getLong(i)) + } + return res +} + +private fun parseIsUnlimited(json: JSONObject): Boolean { + return try { + json.isNull("size") + } catch (e: JSONException) { + e.printStackTrace() + false + } +} diff --git a/libpretixsync/src/main/java/eu/pretix/libpretixsync/models/db/ReceiptExtensions.kt b/libpretixsync/src/main/java/eu/pretix/libpretixsync/models/db/ReceiptExtensions.kt new file mode 100644 index 00000000..544e230f --- /dev/null +++ b/libpretixsync/src/main/java/eu/pretix/libpretixsync/models/db/ReceiptExtensions.kt @@ -0,0 +1,30 @@ +package eu.pretix.libpretixsync.models.db + +import eu.pretix.libpretixsync.sqldelight.Receipt +import eu.pretix.libpretixsync.models.Receipt as ReceiptModel + +fun Receipt.toModel() = + ReceiptModel( + id = id, + eventSlug = event_slug!!, + paymentType = ReceiptModel.PaymentType.valueOf(payment_type!!.uppercase()), + currency = currency, + orderCode = order_code, + dateTimeOpened = datetime_opened!!, + dateTimeClosed = datetime_closed, + fiscalisationData = fiscalisation_data, + fiscalisationText = fiscalisation_text, + fiscalisationQr = fiscalisation_qr, + isCanceled = canceled, + isTraining = training, + isOpen = open_ == true, + isStarted = started == true, + isPrinted = printed, + cashierName = cashier_name, + cashierNumericId = cashier_numericid, + cashierUserId = cashier_userid, + chosenCartId = chosen_cart_id, + emailTo = email_to, + closing = closing, + additionalText = additional_text, + ) diff --git a/libpretixsync/src/main/java/eu/pretix/libpretixsync/models/db/ReceiptLineExtensions.kt b/libpretixsync/src/main/java/eu/pretix/libpretixsync/models/db/ReceiptLineExtensions.kt new file mode 100644 index 00000000..51bb43e0 --- /dev/null +++ b/libpretixsync/src/main/java/eu/pretix/libpretixsync/models/db/ReceiptLineExtensions.kt @@ -0,0 +1,50 @@ +package eu.pretix.libpretixsync.models.db + +import eu.pretix.libpretixsync.sqldelight.ReceiptLine +import eu.pretix.libpretixsync.sqldelight.SafeOffsetDateTimeMapper +import org.json.JSONArray +import eu.pretix.libpretixsync.models.ReceiptLine as ReceiptLineModel + +fun ReceiptLine.toModel() = + ReceiptLineModel( + id = id, + receiptId = receipt!!, + positionId = positionid!!, + type = ReceiptLineModel.Type.valueOf(type!!), + price = price!!, + listedPrice = listed_price, + priceAfterVoucher = price_after_voucher, + customPriceInput = custom_price_input, + cartId = cart_id, + canceled = canceled, + canceledBecauseOfReceipt = canceled_because_of_receipt ?: false, + saleText = sale_text, + isBundled = is_bundled ?: false, + addonTo = addon_to, + remoteError = remote_error, + voucherCode = voucher_code, + useReusableMedium = use_reusable_medium, + taxRate = tax_rate, + taxRule = tax_rule, + taxValue = tax_value, + eventDateFrom = SafeOffsetDateTimeMapper.decode(event_date_from), + eventDateTo = SafeOffsetDateTimeMapper.decode(event_date_to), + subEventServerId = subevent_id, + subEventText = subevent_text, + itemServerId = item_id, + variationServerId = variation_id, + requestedValidFrom = requested_valid_from, + attendeeCity = attendee_city, + attendeeCompany = attendee_company, + attendeeCountry = attendee_country, + attendeeEmail = attendee_email, + attendeeName = attendee_name, + attendeeStreet = attendee_street, + attendeeZipcode = attendee_zipcode, + seatGuid = seat_guid, + seatName = seat_name, + answers = JSONArray(answers ?: "[]"), + giftCardId = gift_card_id, + giftCardSecret = gift_card_secret, + priceCalculatedFromNet = price_calculated_from_net == true, + ) diff --git a/libpretixsync/src/main/java/eu/pretix/libpretixsync/models/db/ReceiptPaymentExtensions.kt b/libpretixsync/src/main/java/eu/pretix/libpretixsync/models/db/ReceiptPaymentExtensions.kt new file mode 100644 index 00000000..7dccd43d --- /dev/null +++ b/libpretixsync/src/main/java/eu/pretix/libpretixsync/models/db/ReceiptPaymentExtensions.kt @@ -0,0 +1,16 @@ +package eu.pretix.libpretixsync.models.db + +import eu.pretix.libpretixsync.sqldelight.ReceiptPayment +import org.json.JSONObject +import eu.pretix.libpretixsync.models.ReceiptPayment as ReceiptPaymentModel + +fun ReceiptPayment.toModel(): ReceiptPaymentModel { + return ReceiptPaymentModel( + id = this.id, + amount = this.amount, + detailsJson = this.detailsJson?.let { JSONObject(it) }, + paymentType = payment_type, + receipt = this.receipt, + status = this.status, + ) +} diff --git a/libpretixsync/src/main/java/eu/pretix/libpretixsync/models/db/ReusableMedium.kt b/libpretixsync/src/main/java/eu/pretix/libpretixsync/models/db/ReusableMedium.kt new file mode 100644 index 00000000..40faff52 --- /dev/null +++ b/libpretixsync/src/main/java/eu/pretix/libpretixsync/models/db/ReusableMedium.kt @@ -0,0 +1,18 @@ +package eu.pretix.libpretixsync.models.db + +import eu.pretix.libpretixsync.sqldelight.ReusableMedium +import eu.pretix.libpretixsync.models.ReusableMedium as ReusableMediumModel + +fun ReusableMedium.toModel(): ReusableMediumModel { + return ReusableMediumModel( + id = this.id, + serverId = this.server_id!!, + active = this.active, + customerId = this.customer_id, + expires = this.expires, + identifier = this.identifier, + linkedGiftCardId = this.linked_giftcard_id, + linkedOrderPositionServerId = this.linked_orderposition_id, + type = this.type, + ) +} diff --git a/libpretixsync/src/main/java/eu/pretix/libpretixsync/models/db/SettingsExtensions.kt b/libpretixsync/src/main/java/eu/pretix/libpretixsync/models/db/SettingsExtensions.kt new file mode 100644 index 00000000..503571cd --- /dev/null +++ b/libpretixsync/src/main/java/eu/pretix/libpretixsync/models/db/SettingsExtensions.kt @@ -0,0 +1,21 @@ +package eu.pretix.libpretixsync.models.db + +import eu.pretix.libpretixsync.models.Settings as SettingsModel +import eu.pretix.libpretixsync.sqldelight.Settings +import org.json.JSONObject + +fun Settings.toModel() = + SettingsModel( + id = id, + address = address, + city = city, + country = country, + jsonData = json_data, + name = name, + pretixposAdditionalReceiptText = pretixpos_additional_receipt_text, + slug = slug, + taxId = tax_id, + vatId = vat_id, + zipcode = zipcode, + json = JSONObject(json_data!!), + ) diff --git a/libpretixsync/src/main/java/eu/pretix/libpretixsync/models/db/SubEventExtensions.kt b/libpretixsync/src/main/java/eu/pretix/libpretixsync/models/db/SubEventExtensions.kt new file mode 100644 index 00000000..a5a5ede6 --- /dev/null +++ b/libpretixsync/src/main/java/eu/pretix/libpretixsync/models/db/SubEventExtensions.kt @@ -0,0 +1,78 @@ +package eu.pretix.libpretixsync.models.db + +import eu.pretix.libpretixsync.sqldelight.SafeOffsetDateTimeMapper +import eu.pretix.libpretixsync.sqldelight.SubEvent +import eu.pretix.libpretixsync.utils.I18nString +import org.json.JSONException +import org.json.JSONObject +import java.math.BigDecimal +import eu.pretix.libpretixsync.models.SubEvent as SubEventModel + +fun SubEvent.toModel(): SubEventModel { + val json = JSONObject(this.json_data!!) + + return SubEventModel( + id = this.id, + name = parseName(json), + // Use date values from JSON, as they contain time zone information + dateFrom = SafeOffsetDateTimeMapper.decode(json, "date_from")!!, + dateTo = SafeOffsetDateTimeMapper.decode(json, "date_to"), + itemPriceOverrides = parseItemPriceOverrides(json), + variationPriceOverrides = parseVariationPriceOverrides(json), + hasSeating = parseHasSeating(json), + seatCategoryMapping = json.getJSONObject("seat_category_mapping"), + ) +} + +private fun parseName(json: JSONObject): String { + return try { + I18nString.toString(json.getJSONObject("name")) + } catch (e: JSONException) { + e.printStackTrace() + "" + } +} + +private fun parseItemPriceOverrides(json: JSONObject) = + json.getJSONArray("item_price_overrides").let { + val res = mutableListOf() + for (i in 0 until it.length()) { + val or = it.getJSONObject(i) + res.add( + SubEventModel.ItemOverride( + item = or.getLong("item"), + availableFrom = or.optString("available_from", null), + availableUntil = or.optString("available_to", null), + price = if (or.isNull("price")) null else BigDecimal(or.optString("price")), + disabled = or.optBoolean("disabled", false), + ), + ) + } + res + } + +private fun parseVariationPriceOverrides(json: JSONObject) = + json.getJSONArray("variation_price_overrides").let { + val res = mutableListOf() + for (i in 0 until it.length()) { + val or = it.getJSONObject(i) + res.add( + SubEventModel.ItemOverride( + item = or.getLong("variation"), + availableFrom = or.optString("available_from", null), + availableUntil = or.optString("available_to", null), + price = if (or.isNull("price")) null else BigDecimal(or.optString("price")), + disabled = or.optBoolean("disabled", false), + ), + ) + } + res + } + +private fun parseHasSeating(json: JSONObject): Boolean { + return try { + !json.isNull("seating_plan") + } catch (e: JSONException) { + false + } +} diff --git a/libpretixsync/src/main/java/eu/pretix/libpretixsync/models/db/TaxRuleExtensions.kt b/libpretixsync/src/main/java/eu/pretix/libpretixsync/models/db/TaxRuleExtensions.kt new file mode 100644 index 00000000..80e766f9 --- /dev/null +++ b/libpretixsync/src/main/java/eu/pretix/libpretixsync/models/db/TaxRuleExtensions.kt @@ -0,0 +1,36 @@ +package eu.pretix.libpretixsync.models.db + +import eu.pretix.libpretixsync.sqldelight.TaxRule +import org.json.JSONException +import org.json.JSONObject +import java.math.BigDecimal +import eu.pretix.libpretixsync.models.TaxRule as TaxRuleModel + +fun TaxRule.toModel(): TaxRuleModel { + val json = JSONObject(this.json_data!!) + + return TaxRuleModel( + id = this.id, + serverId = this.server_id!!, + rate = parseRate(json), + includesTax = parseIncludesTax(json), + ) +} + +private fun parseRate(json: JSONObject): BigDecimal { + try { + return BigDecimal(json.getString("rate")) + } catch (e: JSONException) { + e.printStackTrace() + return BigDecimal(0.00) + } +} + +private fun parseIncludesTax(json: JSONObject): Boolean { + try { + return json.getBoolean("price_includes_tax") + } catch (e: JSONException) { + e.printStackTrace() + return false + } +} diff --git a/libpretixsync/src/main/java/eu/pretix/libpretixsync/sqldelight/AndroidUtilDateAdapter.kt b/libpretixsync/src/main/java/eu/pretix/libpretixsync/sqldelight/AndroidUtilDateAdapter.kt new file mode 100644 index 00000000..73562f8f --- /dev/null +++ b/libpretixsync/src/main/java/eu/pretix/libpretixsync/sqldelight/AndroidUtilDateAdapter.kt @@ -0,0 +1,23 @@ +package eu.pretix.libpretixsync.sqldelight + +import app.cash.sqldelight.ColumnAdapter +import java.time.Instant +import java.time.OffsetDateTime +import java.time.ZoneId +import java.time.format.DateTimeFormatter +import java.util.Date + +class AndroidUtilDateAdapter : ColumnAdapter { + private val df = DateTimeFormatter.ofPattern("yyyy-MM-dd'T'HH:mm:ssXXXXX") + + override fun decode(databaseValue: String): Date { + // Use the default formatter (ISO_OFFSET_DATE_TIME) when decoding to be on the safe side + // in case we encounter valid ISO strings with a slightly different format (e.g. including + // milliseconds) + return Date(OffsetDateTime.parse(databaseValue).toInstant().toEpochMilli()) + } + + override fun encode(value: Date): String { + return df.format(Instant.ofEpochMilli(value.time).atZone(ZoneId.of("Z"))) + } +} diff --git a/libpretixsync/src/main/java/eu/pretix/libpretixsync/sqldelight/BigDecimalAdapter.kt b/libpretixsync/src/main/java/eu/pretix/libpretixsync/sqldelight/BigDecimalAdapter.kt new file mode 100644 index 00000000..c925e8dd --- /dev/null +++ b/libpretixsync/src/main/java/eu/pretix/libpretixsync/sqldelight/BigDecimalAdapter.kt @@ -0,0 +1,35 @@ +package eu.pretix.libpretixsync.sqldelight + +import app.cash.sqldelight.ColumnAdapter +import java.math.BigDecimal +import java.math.RoundingMode + +class BigDecimalAdapter : ColumnAdapter { + override fun decode(databaseValue: Double): BigDecimal { + return BigDecimal.valueOf(databaseValue).setScale(2, RoundingMode.HALF_UP) + } + + override fun encode(value: BigDecimal): Double { + if (value.scale() > 2) { + throw IllegalArgumentException("Should not store value $value in database, too much precision") + } + + return value.toDouble() + } +} + +/** + * Converts a Double database value to BigDecimal + * + * Applies the same conversion as BigDecimalAdapter. + * Should be used for values that do not go through adapters (such as SUM() values). + */ +fun Double.toScaledBigDecimal(): BigDecimal = BigDecimal.valueOf(this).setScale(2, RoundingMode.HALF_UP) + +fun Double?.toScaledBigDecimalOrZero(): BigDecimal { + return if (this != null) { + BigDecimal.valueOf(this).setScale(2, RoundingMode.HALF_UP) + } else { + BigDecimal.ZERO.setScale(2, RoundingMode.HALF_UP) + } +} diff --git a/libpretixsync/src/main/java/eu/pretix/libpretixsync/sqldelight/ClosingExtensions.kt b/libpretixsync/src/main/java/eu/pretix/libpretixsync/sqldelight/ClosingExtensions.kt new file mode 100644 index 00000000..3d44b157 --- /dev/null +++ b/libpretixsync/src/main/java/eu/pretix/libpretixsync/sqldelight/ClosingExtensions.kt @@ -0,0 +1,26 @@ +package eu.pretix.libpretixsync.sqldelight + +import org.json.JSONObject +import java.math.RoundingMode +import java.text.DateFormat +import java.text.SimpleDateFormat +import java.util.TimeZone + +fun Closing.toJSON(): JSONObject { + val tz = TimeZone.getTimeZone("UTC") + val df: DateFormat = SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss'Z'") + df.timeZone = tz + + val jo = JSONObject() + jo.put("closing_id", id) + jo.put("first_receipt", first_receipt) + jo.put("last_receipt", last_receipt) + jo.put("payment_sum", payment_sum?.setScale(2, RoundingMode.HALF_UP)) + jo.put("payment_sum_cash", payment_sum_cash?.setScale(2, RoundingMode.HALF_UP)) + jo.put("cash_counted", cash_counted?.setScale(2, RoundingMode.HALF_UP)) + jo.put("datetime", df.format(datetime)) + jo.put("invoice_settings", invoice_settings) + jo.put("cashier", cashier_numericid) + jo.put("data", if (json_data != null) JSONObject(json_data) else JSONObject()) + return jo +} diff --git a/libpretixsync/src/main/java/eu/pretix/libpretixsync/sqldelight/ItemExtensions.kt b/libpretixsync/src/main/java/eu/pretix/libpretixsync/sqldelight/ItemExtensions.kt new file mode 100644 index 00000000..d79a79f8 --- /dev/null +++ b/libpretixsync/src/main/java/eu/pretix/libpretixsync/sqldelight/ItemExtensions.kt @@ -0,0 +1,50 @@ +package eu.pretix.libpretixsync.sqldelight + +import eu.pretix.libpretixsync.utils.I18nString +import org.json.JSONException +import org.json.JSONObject + +val Item.name: String + get() { + val json = JSONObject(json_data) + return try { + I18nString.toString(json.getJSONObject("name")) + } catch (e: JSONException) { + e.printStackTrace() + "" + } + } + +val Item.minPerOrder: Int? + get() { + val json = JSONObject(json_data) + return try { + if (json.isNull("min_per_order")) null else json.optInt("min_per_order") + } catch (e: JSONException) { + e.printStackTrace() + null + } + } + +val Item.maxPerOrder: Int? + get() { + val json = JSONObject(json_data) + try { + if (json.isNull("max_per_order")) return null; + return json.optInt("max_per_order"); + } catch (e: JSONException) { + e.printStackTrace(); + return null; + } + } + +val Item.isGenerateTickets: Boolean + get() = try { + val json = JSONObject(json_data) + if (json.isNull("generate_tickets")) { + true + } else json.getBoolean("generate_tickets") + } catch (e: JSONException) { + e.printStackTrace() + true + } diff --git a/libpretixsync/src/main/java/eu/pretix/libpretixsync/sqldelight/JavaUtilDateAdapter.kt b/libpretixsync/src/main/java/eu/pretix/libpretixsync/sqldelight/JavaUtilDateAdapter.kt new file mode 100644 index 00000000..db565efd --- /dev/null +++ b/libpretixsync/src/main/java/eu/pretix/libpretixsync/sqldelight/JavaUtilDateAdapter.kt @@ -0,0 +1,21 @@ +package eu.pretix.libpretixsync.sqldelight + +import app.cash.sqldelight.ColumnAdapter +import java.time.Instant +import java.time.LocalDateTime +import java.time.ZoneId +import java.time.ZoneOffset +import java.time.format.DateTimeFormatter +import java.util.Date + +class JavaUtilDateAdapter : ColumnAdapter { + private val df = DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss.SSS") + + override fun decode(databaseValue: String): Date { + return Date(LocalDateTime.parse(databaseValue, df).toInstant(ZoneOffset.UTC).toEpochMilli()) + } + + override fun encode(value: Date): String { + return df.format(Instant.ofEpochMilli(value.time).atZone(ZoneId.of("Z"))) + } +} diff --git a/libpretixsync/src/main/java/eu/pretix/libpretixsync/sqldelight/PostgresIdAdapter.kt b/libpretixsync/src/main/java/eu/pretix/libpretixsync/sqldelight/PostgresIdAdapter.kt new file mode 100644 index 00000000..c3a58b25 --- /dev/null +++ b/libpretixsync/src/main/java/eu/pretix/libpretixsync/sqldelight/PostgresIdAdapter.kt @@ -0,0 +1,25 @@ +package eu.pretix.libpretixsync.sqldelight + +import app.cash.sqldelight.ColumnAdapter + +/** + * Column adapter that converts serial / integer ID column values to Long + * + * Needed since requery generated all Postgres tables with a 4 byte ID while the rest of the code + * expects 8 byte IDs / Kotlin Long values. + * Since only `id` columns (i.e. local IDs) are affected, the conversion should be reasonably safe. + * If any of the values turn out to be too large for an integer, this adapter will throw an exception. + */ +class PostgresIdAdapter : ColumnAdapter { + override fun decode(databaseValue: Int): Long { + return databaseValue.toLong() + } + + override fun encode(value: Long): Int { + if (value < Int.MIN_VALUE || value > Int.MAX_VALUE) { + throw IllegalArgumentException("ID value exceeds integer range") + } + + return value.toInt() + } +} diff --git a/libpretixsync/src/main/java/eu/pretix/libpretixsync/sqldelight/PostgresJavaUtilDateAdapter.kt b/libpretixsync/src/main/java/eu/pretix/libpretixsync/sqldelight/PostgresJavaUtilDateAdapter.kt new file mode 100644 index 00000000..46639c71 --- /dev/null +++ b/libpretixsync/src/main/java/eu/pretix/libpretixsync/sqldelight/PostgresJavaUtilDateAdapter.kt @@ -0,0 +1,22 @@ +package eu.pretix.libpretixsync.sqldelight + +import app.cash.sqldelight.ColumnAdapter +import java.time.LocalDate +import java.time.ZoneId +import java.util.Date + +class PostgresJavaUtilDateAdapter : ColumnAdapter { + + override fun decode(databaseValue: LocalDate): Date { + // The columns generated by requery do not contain time information. + // As a workaround, assume midnight. + // + // Once the data is no longer accessed by requery, the columns should be migrated + // to a data type with a time portion. + return Date.from(databaseValue.atStartOfDay(ZoneId.of("UTC")).toInstant()) + } + + override fun encode(value: Date): LocalDate { + return value.toInstant().atZone(ZoneId.of("UTC")).toLocalDate() + } +} diff --git a/libpretixsync/src/main/java/eu/pretix/libpretixsync/sqldelight/PostgresLongBooleanAdapter.kt b/libpretixsync/src/main/java/eu/pretix/libpretixsync/sqldelight/PostgresLongBooleanAdapter.kt new file mode 100644 index 00000000..fea4e4c1 --- /dev/null +++ b/libpretixsync/src/main/java/eu/pretix/libpretixsync/sqldelight/PostgresLongBooleanAdapter.kt @@ -0,0 +1,15 @@ +package eu.pretix.libpretixsync.sqldelight + +import app.cash.sqldelight.ColumnAdapter + +class PostgresLongBooleanAdapter : ColumnAdapter { + override fun decode(databaseValue: Long): Boolean = + when (databaseValue) { + 1L -> true + 0L -> false + else -> throw IllegalArgumentException("Value must be 0L or 1L") + } + + override fun encode(value: Boolean): Long = + if (value) 1L else 0L +} diff --git a/libpretixsync/src/main/java/eu/pretix/libpretixsync/sqldelight/ReceiptExtensions.kt b/libpretixsync/src/main/java/eu/pretix/libpretixsync/sqldelight/ReceiptExtensions.kt new file mode 100644 index 00000000..f53fe7da --- /dev/null +++ b/libpretixsync/src/main/java/eu/pretix/libpretixsync/sqldelight/ReceiptExtensions.kt @@ -0,0 +1,67 @@ +package eu.pretix.libpretixsync.sqldelight + +import org.json.JSONObject +import java.text.SimpleDateFormat +import java.util.Locale +import java.util.TimeZone + +fun Receipt.toJSON(): JSONObject { + val tz = TimeZone.getTimeZone("UTC") + val df = SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss'Z'") + df.timeZone = tz + val jo = JSONObject() + jo.put("receipt_id", id) + jo.put("event", if (event_slug != null) event_slug else JSONObject.NULL) + jo.put("order", if (order_code != null) order_code else JSONObject.NULL) + jo.put( + "order_full", + if (order_code != null) event_slug?.uppercase(Locale.getDefault()) + "-" + order_code else "-", + ) + jo.put("open", this.open_) + jo.put("payment_type", payment_type) + jo.put( + "datetime_opened", + if (datetime_opened != null) df.format(datetime_opened) else JSONObject.NULL, + ) + jo.put( + "datetime_closed", + if (datetime_closed != null) df.format(datetime_closed) else JSONObject.NULL, + ) + jo.put("closing_id", closing) + jo.put("canceled", canceled) + jo.put("currency", currency) + jo.put("printed", printed) + jo.put("email_to", email_to) + jo.put( + "payment_data", + if (payment_data == null || payment_data == "null" || payment_data.isEmpty()) { + JSONObject() + } else { + JSONObject( + payment_data, + ) + }, + ) + jo.put( + "fiscalisation_data", + if (fiscalisation_data == null || fiscalisation_data == "null" || fiscalisation_data.isEmpty()) { + JSONObject() + } else { + JSONObject( + fiscalisation_data, + ) + }, + ) + jo.put( + "fiscalisation_text", + if (fiscalisation_text == null || fiscalisation_text == "null" || fiscalisation_text.isEmpty()) "" else fiscalisation_text, + ) + jo.put( + "fiscalisation_qr", + if (fiscalisation_qr == null || fiscalisation_qr == "null" || fiscalisation_qr.isEmpty()) "" else fiscalisation_qr, + ) + jo.put("cashier", cashier_numericid) + jo.put("training", training) + jo.put("additional_text", additional_text) + return jo +} diff --git a/libpretixsync/src/main/java/eu/pretix/libpretixsync/sqldelight/ReceiptLineExtensions.kt b/libpretixsync/src/main/java/eu/pretix/libpretixsync/sqldelight/ReceiptLineExtensions.kt new file mode 100644 index 00000000..53cf0170 --- /dev/null +++ b/libpretixsync/src/main/java/eu/pretix/libpretixsync/sqldelight/ReceiptLineExtensions.kt @@ -0,0 +1,56 @@ +package eu.pretix.libpretixsync.sqldelight + +import org.json.JSONObject +import java.math.BigDecimal +import java.math.RoundingMode + +fun ReceiptLine.toJSON(): JSONObject { + val jo = JSONObject() + jo.put("id", id) + jo.put("type", type) + jo.put("position_id", positionid) + jo.put("canceled", canceled) + jo.put("canceled_because_of_receipt", canceled_because_of_receipt) + jo.put("price_calculated_from_net", price_calculated_from_net) + jo.put("listed_price", listed_price?.setScale(2, RoundingMode.HALF_UP)) + jo.put("price_after_voucher", price_after_voucher?.setScale(2, RoundingMode.HALF_UP)) + jo.put("custom_price_input", custom_price_input?.setScale(2, RoundingMode.HALF_UP)) + jo.put("voucher_code", voucher_code) + jo.put("price", price?.setScale(2, RoundingMode.HALF_UP)) + jo.put("tax_rate", tax_rate?.setScale(2, RoundingMode.HALF_UP)) + jo.put("tax_value", tax_value?.setScale(2, RoundingMode.HALF_UP) ?: "0.00") + jo.put("tax_rule", tax_rule ?: JSONObject.NULL) + jo.put("secret", secret) + jo.put("seat", seat_guid ?: JSONObject.NULL) + jo.put("subevent", subevent_id) + jo.put( + "event_date_from", + if (event_date_from != null && event_date_from.length > 5) event_date_from else JSONObject.NULL, + ) + jo.put( + "event_date_to", + if (event_date_to != null && event_date_to.length > 5) event_date_to else JSONObject.NULL, + ) + jo.put( + "subevent_text", + if (subevent_text != null && subevent_text.length > 0 && subevent_text != "null") subevent_text else JSONObject.NULL, + ) + jo.put("item", if (item_id != null && item_id != 0L) item_id else JSONObject.NULL) + jo.put("variation", variation_id) + jo.put("answers", answers) + jo.put("sale_text", sale_text) + jo.put("addon_to", addon_to ?: JSONObject.NULL) + jo.put("is_bundled", is_bundled) + jo.put("attendee_name", attendee_name) + jo.put("attendee_email", attendee_email) + jo.put("attendee_company", attendee_company) + jo.put("attendee_street", attendee_street) + jo.put("attendee_zipcode", attendee_zipcode) + jo.put("attendee_city", attendee_city) + jo.put("attendee_country", attendee_country) + jo.put("requested_valid_from", requested_valid_from) + jo.put("use_reusable_medium", use_reusable_medium) + jo.put("gift_card", gift_card_id) + jo.put("gift_card_secret", gift_card_secret) + return jo +} diff --git a/libpretixsync/src/main/java/eu/pretix/libpretixsync/sqldelight/SafeOffsetDateTimeMapper.kt b/libpretixsync/src/main/java/eu/pretix/libpretixsync/sqldelight/SafeOffsetDateTimeMapper.kt new file mode 100644 index 00000000..567b7e7b --- /dev/null +++ b/libpretixsync/src/main/java/eu/pretix/libpretixsync/sqldelight/SafeOffsetDateTimeMapper.kt @@ -0,0 +1,40 @@ +package eu.pretix.libpretixsync.sqldelight + +import org.json.JSONObject +import java.time.OffsetDateTime +import java.time.format.DateTimeFormatter + +/** + * Mapper for OffsetDateTime values that are stored as text in the database. + * + * This class has the same job as a ColumnAdapter, but since some of the values require mapping from + * non-null database values to null (e.g. the string "null"), we cannot use custom column types + * with ColumnAdapters. + */ +object SafeOffsetDateTimeMapper { + private val df = DateTimeFormatter.ofPattern("yyyy-MM-dd'T'HH:mm:ssXXXXX") + + fun decode(databaseValue: String?): OffsetDateTime? = + when (databaseValue) { + null -> null + "null" -> null + else -> OffsetDateTime.parse(databaseValue) + } + + fun decode(json: JSONObject, key: String): OffsetDateTime? = + if (json.isNull(key)) { + null + } else { + decode(json.getString(key)) + } + + fun encode(value: OffsetDateTime?): String? = + when (value) { + null -> null + else -> { + // Use .format() instead of .toString() to get a consistent length + // OffsetDateTime.toString() omits portions of the date that are zero + value.format(df) + } + } +} diff --git a/libpretixsync/src/main/java/eu/pretix/libpretixsync/sync/AllEventsSyncAdapter.java b/libpretixsync/src/main/java/eu/pretix/libpretixsync/sync/AllEventsSyncAdapter.java deleted file mode 100644 index 33fccf03..00000000 --- a/libpretixsync/src/main/java/eu/pretix/libpretixsync/sync/AllEventsSyncAdapter.java +++ /dev/null @@ -1,72 +0,0 @@ -package eu.pretix.libpretixsync.sync; - -import eu.pretix.libpretixsync.api.PretixApi; -import eu.pretix.libpretixsync.db.Event; -import io.requery.BlockingEntityStore; -import io.requery.Persistable; -import io.requery.query.Tuple; -import io.requery.util.CloseableIterator; - -import org.joda.time.format.ISODateTimeFormat; -import org.json.JSONException; -import org.json.JSONObject; - -import java.util.Iterator; -import java.util.List; - -public class AllEventsSyncAdapter extends BaseDownloadSyncAdapter { - public AllEventsSyncAdapter(BlockingEntityStore store, FileStorage fileStorage, PretixApi api, String syncCycleId, SyncManager.ProgressFeedback feedback) { - super(store, fileStorage, "__all__", api, syncCycleId, feedback); - } - - @Override - CloseableIterator getKnownIDsIterator() { - return store.select(Event.SLUG) - .get().iterator(); - } - - @Override - public void updateObject(Event obj, JSONObject jsonobj) throws JSONException { - obj.setSlug(jsonobj.getString("slug")); - obj.setCurrency(jsonobj.getString("currency")); - obj.setDate_from(ISODateTimeFormat.dateTimeParser().parseDateTime(jsonobj.getString("date_from")).toDate()); - if (!jsonobj.isNull("date_to")) { - obj.setDate_to(ISODateTimeFormat.dateTimeParser().parseDateTime(jsonobj.getString("date_to")).toDate()); - } - obj.setLive(jsonobj.getBoolean("live")); - obj.setHas_subevents(jsonobj.getBoolean("has_subevents")); - obj.setJson_data(jsonobj.toString()); - } - - @Override - protected String getUrl() { - return api.organizerResourceUrl(getResourceName()); - } - - @Override - String getResourceName() { - return "events"; - } - - @Override - String getId(JSONObject obj) throws JSONException { - return obj.getString("slug"); - } - - @Override - String getId(Event obj) { - return obj.getSlug(); - } - - @Override - Event newEmptyObject() { - return new Event(); - } - - @Override - public CloseableIterator runBatch(List parameterBatch) { - return store.select(Event.class) - .where(Event.SLUG.in(parameterBatch)) - .get().iterator(); - } -} diff --git a/libpretixsync/src/main/java/eu/pretix/libpretixsync/sync/AllEventsSyncAdapter.kt b/libpretixsync/src/main/java/eu/pretix/libpretixsync/sync/AllEventsSyncAdapter.kt new file mode 100644 index 00000000..d82ffad5 --- /dev/null +++ b/libpretixsync/src/main/java/eu/pretix/libpretixsync/sync/AllEventsSyncAdapter.kt @@ -0,0 +1,108 @@ +package eu.pretix.libpretixsync.sync + +import app.cash.sqldelight.TransactionWithoutReturn +import app.cash.sqldelight.db.QueryResult +import eu.pretix.libpretixsync.api.PretixApi +import eu.pretix.libpretixsync.sqldelight.Event +import eu.pretix.libpretixsync.sqldelight.SyncDatabase +import eu.pretix.libpretixsync.sync.SyncManager.ProgressFeedback +import org.joda.time.format.ISODateTimeFormat +import org.json.JSONObject + +class AllEventsSyncAdapter( + db: SyncDatabase, + fileStorage: FileStorage, + api: PretixApi, + syncCycleId: String, + feedback: ProgressFeedback?, +) : BaseDownloadSyncAdapter( + db = db, + api = api, + syncCycleId = syncCycleId, + eventSlug = "__all__", + fileStorage = fileStorage, + feedback = feedback, +) { + override fun getResourceName(): String = "events" + + override fun getUrl(): String { + return api.organizerResourceUrl(getResourceName()) + } + + override fun getId(obj: Event): String = obj.slug!! + + override fun getId(obj: JSONObject): String = obj.getString("slug") + + override fun getJSON(obj: Event): JSONObject = JSONObject(obj.json_data!!) + + override fun queryKnownIDs(): MutableSet { + val res = mutableSetOf() + db.eventQueries.selectSlugs().execute { cursor -> + while (cursor.next().value) { + val id = cursor.getString(0) + ?: throw RuntimeException("slug column not available") + + res.add(id) + } + QueryResult.Unit + } + + return res + } + + override fun insert(jsonobj: JSONObject) { + val dateFrom = + ISODateTimeFormat.dateTimeParser().parseDateTime(jsonobj.getString("date_from")) + .toDate() + + val dateTo = if (!jsonobj.isNull("date_to")) { + ISODateTimeFormat.dateTimeParser().parseDateTime(jsonobj.getString("date_to")) + .toDate() + } else { + null + } + + db.eventQueries.insert( + currency = jsonobj.getString("currency"), + date_from = dateFrom, + date_to = dateTo, + has_subevents = jsonobj.getBoolean("has_subevents"), + json_data = jsonobj.toString(), + live = jsonobj.getBoolean("live"), + slug = jsonobj.getString("slug"), + ) + } + + override fun update(obj: Event, jsonobj: JSONObject) { + val dateFrom = + ISODateTimeFormat.dateTimeParser().parseDateTime(jsonobj.getString("date_from")) + .toDate() + + val dateTo = if (!jsonobj.isNull("date_to")) { + ISODateTimeFormat.dateTimeParser().parseDateTime(jsonobj.getString("date_to")).toDate() + } else { + null + } + + db.eventQueries.updateFromJson( + currency = jsonobj.getString("currency"), + date_to = dateTo, + date_from = dateFrom, + has_subevents = jsonobj.getBoolean("has_subevents"), + json_data = jsonobj.toString(), + live = jsonobj.getBoolean("live"), + slug = obj.slug, + ) + } + + override fun delete(key: String) { + db.eventQueries.deleteBySlug(key) + } + + override fun runInTransaction(body: TransactionWithoutReturn.() -> Unit) { + db.eventQueries.transaction(false, body) + } + + override fun runBatch(parameterBatch: List): List = + db.eventQueries.selectBySlugList(parameterBatch).executeAsList() +} diff --git a/libpretixsync/src/main/java/eu/pretix/libpretixsync/sync/AllSubEventsSyncAdapter.java b/libpretixsync/src/main/java/eu/pretix/libpretixsync/sync/AllSubEventsSyncAdapter.java deleted file mode 100644 index 19de1721..00000000 --- a/libpretixsync/src/main/java/eu/pretix/libpretixsync/sync/AllSubEventsSyncAdapter.java +++ /dev/null @@ -1,170 +0,0 @@ -package eu.pretix.libpretixsync.sync; - -import org.joda.time.format.ISODateTimeFormat; -import org.json.JSONException; -import org.json.JSONObject; - -import java.io.UnsupportedEncodingException; -import java.net.URLEncoder; -import java.util.List; -import java.util.concurrent.ExecutionException; - -import eu.pretix.libpretixsync.api.ApiException; -import eu.pretix.libpretixsync.api.PretixApi; -import eu.pretix.libpretixsync.api.ResourceNotModified; -import eu.pretix.libpretixsync.db.ResourceSyncStatus; -import eu.pretix.libpretixsync.db.SubEvent; -import io.requery.BlockingEntityStore; -import io.requery.Persistable; -import io.requery.query.Tuple; -import io.requery.util.CloseableIterator; - -public class AllSubEventsSyncAdapter extends BaseDownloadSyncAdapter { - private String firstResponseTimestamp; - private ResourceSyncStatus rlm; - - public AllSubEventsSyncAdapter(BlockingEntityStore store, FileStorage fileStorage, PretixApi api, String syncCycleId, SyncManager.ProgressFeedback feedback) { - super(store, fileStorage, "__all__", api, syncCycleId, feedback); - } - - @Override - public void download() throws JSONException, ApiException, ExecutionException, InterruptedException { - boolean completed = false; - try { - super.download(); - completed = true; - } finally { - ResourceSyncStatus resourceSyncStatus = store.select(ResourceSyncStatus.class) - .where(ResourceSyncStatus.RESOURCE.eq("subevents")) - .and(ResourceSyncStatus.EVENT_SLUG.eq("__all__")) - .limit(1) - .get().firstOrNull(); - - // We need to cache the response timestamp of the *first* page in the result set to make - // sure we don't miss anything between this and the next run. - // - // If the download failed, completed will be false. In case this was a full fetch - // (i.e. no timestamp was stored beforehand) we will still store the timestamp to be - // able to continue properly. - if (firstResponseTimestamp != null) { - if (resourceSyncStatus == null) { - resourceSyncStatus = new ResourceSyncStatus(); - resourceSyncStatus.setResource("subevents"); - resourceSyncStatus.setEvent_slug("__all__"); - if (completed) { - resourceSyncStatus.setStatus("complete"); - resourceSyncStatus.setLast_modified(firstResponseTimestamp); - store.upsert(resourceSyncStatus); - } - } else { - if (completed) { - resourceSyncStatus.setLast_modified(firstResponseTimestamp); - store.upsert(resourceSyncStatus); - } - } - } else if (completed && resourceSyncStatus != null) { - resourceSyncStatus.setStatus("complete"); - store.update(resourceSyncStatus); - } - firstResponseTimestamp = null; - } - } - - protected boolean deleteUnseen() { - return rlm == null; - } - - @Override - CloseableIterator getKnownIDsIterator() { - return store.select(SubEvent.SERVER_ID) - .get().iterator(); - } - - @Override - public void updateObject(SubEvent obj, JSONObject jsonobj) throws JSONException { - obj.setServer_id(jsonobj.getLong("id")); - obj.setEvent_slug(jsonobj.getString("event")); - obj.setDate_from(ISODateTimeFormat.dateTimeParser().parseDateTime(jsonobj.getString("date_from")).toDate()); - if (!jsonobj.isNull("date_to")) { - obj.setDate_to(ISODateTimeFormat.dateTimeParser().parseDateTime(jsonobj.getString("date_to")).toDate()); - } - obj.setActive(jsonobj.getBoolean("active")); - obj.setJson_data(jsonobj.toString()); - } - - @Override - protected String getUrl() { - return api.organizerResourceUrl(getResourceName()); - } - - @Override - String getResourceName() { - return "subevents"; - } - - @Override - Long getId(JSONObject obj) throws JSONException { - return obj.getLong("id"); - } - - @Override - Long getId(SubEvent obj) { - return obj.getServer_id(); - } - - @Override - SubEvent newEmptyObject() { - return new SubEvent(); - } - - @Override - public CloseableIterator runBatch(List parameterBatch) { - return store.select(SubEvent.class) - .where(SubEvent.SERVER_ID.in(parameterBatch)) - .get().iterator(); - } - - @Override - protected JSONObject downloadPage(String url, boolean isFirstPage) throws ApiException, ResourceNotModified { - if (isFirstPage) { - rlm = store.select(ResourceSyncStatus.class) - .where(ResourceSyncStatus.RESOURCE.eq("subevents")) - .limit(1) - .get().firstOrNull(); - } - - if (rlm != null) { - // This resource has been fetched before. - // Diff to last time - - // Ordering is crucial here: Only because the server returns the objects in the - // order of modification we can be sure that we don't miss orders created in between our - // paginated requests. If an object were to be modified between our fetch of page 1 - // and 2 that originally wasn't part of the result set, we won't see it (as it will - // be inserted on page 1), but we'll see it the next time, and we will see some - // duplicates on page 2, but we don't care. The important part is that nothing gets - // lost "between the pages". If an order of page 2 gets modified and moves to page - // one while we fetch page 2, again, we won't see it and we'll see some duplicates, - // but the next sync will fix it since we always fetch our diff compared to the time - // of the first page. - try { - if (!url.contains("modified_since")) { - if (url.contains("?")) { - url += "&"; - } else { - url += "?"; - } - url += "ordering=-last_modified&modified_since=" + URLEncoder.encode(rlm.getLast_modified(), "UTF-8"); - } - } catch (UnsupportedEncodingException e) { - e.printStackTrace(); - } - } - - PretixApi.ApiResponse apiResponse = api.fetchResource(url); - if (isFirstPage) { - firstResponseTimestamp = apiResponse.getResponse().header("X-Page-Generated"); - } - return apiResponse.getData(); - } -} diff --git a/libpretixsync/src/main/java/eu/pretix/libpretixsync/sync/AllSubEventsSyncAdapter.kt b/libpretixsync/src/main/java/eu/pretix/libpretixsync/sync/AllSubEventsSyncAdapter.kt new file mode 100644 index 00000000..c8cf124a --- /dev/null +++ b/libpretixsync/src/main/java/eu/pretix/libpretixsync/sync/AllSubEventsSyncAdapter.kt @@ -0,0 +1,231 @@ +package eu.pretix.libpretixsync.sync + +import app.cash.sqldelight.TransactionWithoutReturn +import app.cash.sqldelight.db.QueryResult +import eu.pretix.libpretixsync.api.ApiException +import eu.pretix.libpretixsync.api.PretixApi +import eu.pretix.libpretixsync.api.ResourceNotModified +import eu.pretix.libpretixsync.sqldelight.ResourceSyncStatus +import eu.pretix.libpretixsync.sqldelight.SubEvent +import eu.pretix.libpretixsync.sqldelight.SyncDatabase +import eu.pretix.libpretixsync.sync.SyncManager.ProgressFeedback +import org.joda.time.format.ISODateTimeFormat +import org.json.JSONException +import org.json.JSONObject +import java.io.UnsupportedEncodingException +import java.net.URLEncoder +import java.util.concurrent.ExecutionException + +class AllSubEventsSyncAdapter( + db: SyncDatabase, + fileStorage: FileStorage, + api: PretixApi, + syncCycleId: String, + feedback: ProgressFeedback?, +) : BaseDownloadSyncAdapter( + db = db, + api = api, + syncCycleId = syncCycleId, + eventSlug = "__all__", + fileStorage = fileStorage, + feedback = feedback, +) { + private var firstResponseTimestamp: String? = null + private var rlm: ResourceSyncStatus? = null + + override fun getResourceName(): String = "subevents" + + override fun getUrl(): String { + return api.organizerResourceUrl(getResourceName()) + } + + override fun getId(obj: SubEvent): Long = obj.server_id!! + + override fun getId(obj: JSONObject): Long = obj.getLong("id") + + override fun getJSON(obj: SubEvent): JSONObject = JSONObject(obj.json_data!!) + + override fun queryKnownIDs(): MutableSet { + val res = mutableSetOf() + db.subEventQueries.selectServerIds().execute { cursor -> + while (cursor.next().value) { + val id = cursor.getLong(0) + ?: throw RuntimeException("server_id column not available") + + res.add(id) + } + QueryResult.Unit + } + + return res + } + + override fun insert(jsonobj: JSONObject) { + val dateFrom = + ISODateTimeFormat.dateTimeParser().parseDateTime(jsonobj.getString("date_from")) + .toDate() + + val dateTo = if (!jsonobj.isNull("date_to")) { + ISODateTimeFormat.dateTimeParser().parseDateTime(jsonobj.getString("date_to")).toDate() + } else { + null + } + + db.subEventQueries.insert( + active = jsonobj.getBoolean("active"), + date_from = dateFrom, + date_to = dateTo, + event_slug = jsonobj.getString("event"), + json_data = jsonobj.toString(), + server_id = jsonobj.getLong("id"), + ) + } + + override fun update(obj: SubEvent, jsonobj: JSONObject) { + val dateFrom = + ISODateTimeFormat.dateTimeParser().parseDateTime(jsonobj.getString("date_from")) + .toDate() + + val dateTo = if (!jsonobj.isNull("date_to")) { + ISODateTimeFormat.dateTimeParser().parseDateTime(jsonobj.getString("date_to")).toDate() + } else { + null + } + + db.subEventQueries.updateFromJson( + active = jsonobj.getBoolean("active"), + date_from = dateFrom, + date_to = dateTo, + event_slug = jsonobj.getString("event"), + json_data = jsonobj.toString(), + id = obj.id, + ) + } + + override fun delete(key: Long) { + db.subEventQueries.deleteByServerId(key) + } + + override fun runInTransaction(body: TransactionWithoutReturn.() -> Unit) { + db.subEventQueries.transaction(false, body) + } + + override fun runBatch(parameterBatch: List): List = + db.subEventQueries.selectByServerIdList(parameterBatch).executeAsList() + + @Throws( + JSONException::class, + ApiException::class, + ExecutionException::class, + InterruptedException::class + ) + override fun download() { + var completed = false + try { + super.download() + completed = true + } finally { + val resourceSyncStatus = db.resourceSyncStatusQueries.selectByResourceAndEventSlug( + resource = "subevents", + event_slug = "__all__", + ).executeAsOneOrNull() + + // We need to cache the response timestamp of the *first* page in the result set to make + // sure we don't miss anything between this and the next run. + // + // If the download failed, completed will be false. In case this was a full fetch + // (i.e. no timestamp was stored beforehand) we will still store the timestamp to be + // able to continue properly. + if (firstResponseTimestamp != null) { + if (resourceSyncStatus == null) { + val status = if (completed) { + "complete" + } else { + null + } + + val lastModified = if (completed) { + firstResponseTimestamp + } else { + null + } + + db.resourceSyncStatusQueries.insert( + event_slug = "__all__", + last_modified = lastModified, + meta = null, + resource = "subevents", + status = status, + ) + } else { + if (completed) { + db.resourceSyncStatusQueries.updateLastModified( + last_modified = firstResponseTimestamp, + id = resourceSyncStatus.id, + ) + } + } + } else if (completed && resourceSyncStatus != null) { + db.resourceSyncStatusQueries.updateStatus( + status = "complete", + id = resourceSyncStatus.id + ) + } + firstResponseTimestamp = null + } + } + + @Throws(ApiException::class, ResourceNotModified::class) + override fun downloadPage(url: String, isFirstPage: Boolean): JSONObject? { + if (isFirstPage) { + rlm = + db.resourceSyncStatusQueries.selectByResourceAndEventSlug( + resource = "subevents", + event_slug = "__all__", + ).executeAsOneOrNull() + } + + var resUrl = url + rlm?.let { + // This resource has been fetched before. + // Diff to last time + + // Ordering is crucial here: Only because the server returns the objects in the + // order of modification we can be sure that we don't miss orders created in between our + // paginated requests. If an object were to be modified between our fetch of page 1 + // and 2 that originally wasn't part of the result set, we won't see it (as it will + // be inserted on page 1), but we'll see it the next time, and we will see some + // duplicates on page 2, but we don't care. The important part is that nothing gets + // lost "between the pages". If an order of page 2 gets modified and moves to page + // one while we fetch page 2, again, we won't see it and we'll see some duplicates, + // but the next sync will fix it since we always fetch our diff compared to the time + // of the first page. + + try { + if (!resUrl.contains("modified_since")) { + resUrl += if (resUrl.contains("?")) { + "&" + } else { + "?" + } + resUrl += "ordering=-last_modified&modified_since=" + URLEncoder.encode( + it.last_modified, + "UTF-8" + ) + } + } catch (e: UnsupportedEncodingException) { + e.printStackTrace() + } + } + + val apiResponse = api.fetchResource(resUrl) + if (isFirstPage) { + firstResponseTimestamp = apiResponse.response.header("X-Page-Generated") + } + return apiResponse.data + } + + override fun deleteUnseen(): Boolean { + return rlm == null + } +} diff --git a/libpretixsync/src/main/java/eu/pretix/libpretixsync/sync/BadgeLayoutItemSyncAdapter.java b/libpretixsync/src/main/java/eu/pretix/libpretixsync/sync/BadgeLayoutItemSyncAdapter.java deleted file mode 100644 index eae6c9dc..00000000 --- a/libpretixsync/src/main/java/eu/pretix/libpretixsync/sync/BadgeLayoutItemSyncAdapter.java +++ /dev/null @@ -1,102 +0,0 @@ -package eu.pretix.libpretixsync.sync; - -import org.json.JSONException; -import org.json.JSONObject; - -import java.util.HashMap; -import java.util.Iterator; -import java.util.List; -import java.util.Map; - -import eu.pretix.libpretixsync.api.PretixApi; -import eu.pretix.libpretixsync.db.BadgeLayout; -import eu.pretix.libpretixsync.db.BadgeLayoutItem; -import eu.pretix.libpretixsync.db.Item; -import io.requery.BlockingEntityStore; -import io.requery.Persistable; -import io.requery.query.Tuple; -import io.requery.util.CloseableIterator; - -public class BadgeLayoutItemSyncAdapter extends BaseDownloadSyncAdapter { - private Map itemCache = new HashMap<>(); - private Map layoutCache = new HashMap<>(); - - public BadgeLayoutItemSyncAdapter(BlockingEntityStore store, FileStorage fileStorage, String eventSlug, PretixApi api, String syncCycleId, SyncManager.ProgressFeedback feedback) { - super(store, fileStorage, eventSlug, api, syncCycleId, feedback); - } - - private Item getItem(long id) { - if (itemCache.size() == 0) { - List items = store - .select(Item.class) - .where(Item.EVENT_SLUG.eq(eventSlug)) - .get().toList(); - for (Item item : items) { - itemCache.put(item.getServer_id(), item); - } - } - return itemCache.get(id); - } - - private BadgeLayout getLayout(long id) { - if (layoutCache.size() == 0) { - List items = store - .select(BadgeLayout.class) - .where(BadgeLayout.EVENT_SLUG.eq(eventSlug)) - .get().toList(); - for (BadgeLayout item : items) { - layoutCache.put(item.getServer_id(), item); - } - } - return layoutCache.get(id); - } - - @Override - public void updateObject(BadgeLayoutItem obj, JSONObject jsonobj) throws JSONException { - obj.setItem(getItem(jsonobj.getLong("item"))); - if (!jsonobj.isNull("layout")) { - obj.setLayout(getLayout(jsonobj.getLong("layout"))); - } else { - obj.setLayout(null); - } - obj.setServer_id(jsonobj.getLong("id")); - obj.setJson_data(jsonobj.toString()); - } - - @Override - public CloseableIterator runBatch(List ids) { - return store.select(BadgeLayoutItem.class) - .leftJoin(Item.class).on(Item.ID.eq(BadgeLayoutItem.ITEM_ID)) - .where(Item.EVENT_SLUG.eq(eventSlug)) - .and(BadgeLayoutItem.SERVER_ID.in(ids)) - .get().iterator(); - } - - @Override - CloseableIterator getKnownIDsIterator() { - return store.select(BadgeLayoutItem.SERVER_ID) - .leftJoin(Item.class).on(Item.ID.eq(BadgeLayoutItem.ITEM_ID)) - .where(Item.EVENT_SLUG.eq(eventSlug)) - .get().iterator(); - } - - @Override - String getResourceName() { - return "badgeitems"; - } - - @Override - Long getId(JSONObject obj) throws JSONException { - return obj.getLong("id"); - } - - @Override - Long getId(BadgeLayoutItem obj) { - return obj.getServer_id(); - } - - @Override - BadgeLayoutItem newEmptyObject() { - return new BadgeLayoutItem(); - } -} diff --git a/libpretixsync/src/main/java/eu/pretix/libpretixsync/sync/BadgeLayoutItemSyncAdapter.kt b/libpretixsync/src/main/java/eu/pretix/libpretixsync/sync/BadgeLayoutItemSyncAdapter.kt new file mode 100644 index 00000000..1f3cdc05 --- /dev/null +++ b/libpretixsync/src/main/java/eu/pretix/libpretixsync/sync/BadgeLayoutItemSyncAdapter.kt @@ -0,0 +1,123 @@ +package eu.pretix.libpretixsync.sync + +import app.cash.sqldelight.TransactionWithoutReturn +import app.cash.sqldelight.db.QueryResult +import eu.pretix.libpretixsync.api.PretixApi +import eu.pretix.libpretixsync.sqldelight.BadgeLayoutItem +import eu.pretix.libpretixsync.sqldelight.SyncDatabase +import eu.pretix.libpretixsync.sync.SyncManager.ProgressFeedback +import org.json.JSONObject + + +class BadgeLayoutItemSyncAdapter( + db: SyncDatabase, + fileStorage: FileStorage, + eventSlug: String, + api: PretixApi, + syncCycleId: String, + feedback: ProgressFeedback?, +) : BaseDownloadSyncAdapter( + db = db, + api = api, + syncCycleId = syncCycleId, + eventSlug = eventSlug, + fileStorage = fileStorage, + feedback = feedback, +) { + + private val itemCache: MutableMap = HashMap() + private val layoutCache: MutableMap = HashMap() + + override fun getResourceName(): String = "badgeitems" + + override fun getId(obj: BadgeLayoutItem): Long = obj.server_id!! + + override fun getId(obj: JSONObject): Long = obj.getLong("id") + + override fun getJSON(obj: BadgeLayoutItem): JSONObject = JSONObject(obj.json_data!!) + + override fun queryKnownIDs(): MutableSet { + val res = mutableSetOf() + db.badgeLayoutItemQueries.selectServerIdsByEventSlug(event_slug = eventSlug) + .execute { cursor -> + while (cursor.next().value) { + val id = cursor.getLong(0) + ?: throw RuntimeException("server_id column not available") + + res.add(id) + } + QueryResult.Unit + } + + return res + } + + override fun insert(jsonobj: JSONObject) { + val item = getItemId(jsonobj.getLong("item")) + + val layout = if (!jsonobj.isNull("layout")) { + getLayoutId(jsonobj.getLong("layout")) + } else { + null + } + + db.badgeLayoutItemQueries.insert( + json_data = jsonobj.toString(), + server_id = jsonobj.getLong("id"), + item = item, + layout = layout, + ) + } + + override fun update(obj: BadgeLayoutItem, jsonobj: JSONObject) { + val item = getItemId(jsonobj.getLong("item")) + + val layout = if (!jsonobj.isNull("layout")) { + getLayoutId(jsonobj.getLong("layout")) + } else { + null + } + + db.badgeLayoutItemQueries.updateFromJson( + json_data = jsonobj.toString(), + item = item, + layout = layout, + id = obj.id, + ) + } + + private fun getItemId(id: Long): Long? { + if (itemCache.isEmpty()) { + val items = db.itemQueries.selectByEventSlug(eventSlug).executeAsList() + for (item in items) { + itemCache[item.server_id] = item.id + } + } + return itemCache[id] + } + + private fun getLayoutId(id: Long): Long? { + if (layoutCache.isEmpty()) { + val layouts = db.badgeLayoutQueries.selectByEventSlug(eventSlug).executeAsList() + for (layout in layouts) { + layoutCache[layout.server_id!!] = layout.id + } + } + return layoutCache[id] + } + + override fun delete(key: Long) { + db.badgeLayoutItemQueries.deleteByServerId(key) + } + + override fun runInTransaction(body: TransactionWithoutReturn.() -> Unit) { + db.badgeLayoutItemQueries.transaction(false, body) + } + + override fun runBatch(parameterBatch: List): List = + db.badgeLayoutItemQueries.selectByServerIdListAndEventSlug( + server_id = parameterBatch, + event_slug = eventSlug, + ).executeAsList() + +} diff --git a/libpretixsync/src/main/java/eu/pretix/libpretixsync/sync/BadgeLayoutSyncAdapter.java b/libpretixsync/src/main/java/eu/pretix/libpretixsync/sync/BadgeLayoutSyncAdapter.java deleted file mode 100644 index e07c40a9..00000000 --- a/libpretixsync/src/main/java/eu/pretix/libpretixsync/sync/BadgeLayoutSyncAdapter.java +++ /dev/null @@ -1,119 +0,0 @@ -package eu.pretix.libpretixsync.sync; - -import org.json.JSONArray; -import org.json.JSONException; -import org.json.JSONObject; - -import java.io.IOException; -import java.io.InputStream; -import java.io.OutputStream; -import java.util.ArrayList; -import java.util.Iterator; -import java.util.List; - -import eu.pretix.libpretixsync.api.ApiException; -import eu.pretix.libpretixsync.api.PretixApi; -import eu.pretix.libpretixsync.db.BadgeLayout; -import eu.pretix.libpretixsync.db.Item; -import eu.pretix.libpretixsync.utils.HashUtils; -import io.requery.BlockingEntityStore; -import io.requery.Persistable; -import io.requery.query.Tuple; -import io.requery.util.CloseableIterator; - -public class BadgeLayoutSyncAdapter extends BaseDownloadSyncAdapter { - - public BadgeLayoutSyncAdapter(BlockingEntityStore store, FileStorage fileStorage, String eventSlug, PretixApi api, String syncCycleId, SyncManager.ProgressFeedback feedback) { - super(store, fileStorage, eventSlug, api, syncCycleId, feedback); - } - - @Override - public void updateObject(BadgeLayout obj, JSONObject jsonobj) throws JSONException { - obj.setEvent_slug(eventSlug); - obj.setIs_default(jsonobj.getBoolean("default")); - obj.setServer_id(jsonobj.getLong("id")); - obj.setJson_data(jsonobj.toString()); - - String remote_filename = jsonobj.optString("background"); - if (remote_filename != null && remote_filename.startsWith("http")) { - String hash = HashUtils.toSHA1(remote_filename.getBytes()); - String local_filename = "badgelayout_" + obj.getServer_id() + "_" + hash + ".pdf"; - if (obj.getBackground_filename() != null && !obj.getBackground_filename().equals(local_filename)) { - fileStorage.delete(obj.getBackground_filename()); - obj.setBackground_filename(null); - } - if (!fileStorage.contains(local_filename)) { - try { - PretixApi.ApiResponse file = api.downloadFile(remote_filename); - OutputStream os = fileStorage.writeStream(local_filename); - InputStream is = file.getResponse().body().byteStream(); - byte[] buffer = new byte[1444]; - int byteread; - while ((byteread = is.read(buffer)) != -1) { - os.write(buffer, 0, byteread); - } - is.close(); - os.close(); - obj.setBackground_filename(local_filename); - } catch (ApiException e) { - // TODO: What to do? - e.printStackTrace(); - } catch (IOException e) { - // TODO: What to do? - e.printStackTrace(); - fileStorage.delete(local_filename); - } - } else { - obj.setBackground_filename(local_filename); - } - } else { - if (obj.getBackground_filename() != null) { - fileStorage.delete(obj.getBackground_filename()); - obj.setBackground_filename(null); - } - } - } - - @Override - protected void prepareDelete(BadgeLayout obj) { - super.prepareDelete(obj); - if (obj.getBackground_filename() != null) { - fileStorage.delete(obj.getBackground_filename()); - } - } - - @Override - public CloseableIterator runBatch(List ids) { - return store.select(BadgeLayout.class) - .where(BadgeLayout.EVENT_SLUG.eq(eventSlug)) - .and(BadgeLayout.SERVER_ID.in(ids)) - .get().iterator(); - } - - @Override - CloseableIterator getKnownIDsIterator() { - return store.select(BadgeLayout.SERVER_ID) - .where(BadgeLayout.EVENT_SLUG.eq(eventSlug)) - .get().iterator(); - } - - @Override - String getResourceName() { - return "badgelayouts"; - } - - @Override - Long getId(JSONObject obj) throws JSONException { - return obj.getLong("id"); - } - - @Override - Long getId(BadgeLayout obj) { - return obj.getServer_id(); - } - - @Override - BadgeLayout newEmptyObject() { - return new BadgeLayout(); - } -} diff --git a/libpretixsync/src/main/java/eu/pretix/libpretixsync/sync/BadgeLayoutSyncAdapter.kt b/libpretixsync/src/main/java/eu/pretix/libpretixsync/sync/BadgeLayoutSyncAdapter.kt new file mode 100644 index 00000000..f4cf186d --- /dev/null +++ b/libpretixsync/src/main/java/eu/pretix/libpretixsync/sync/BadgeLayoutSyncAdapter.kt @@ -0,0 +1,142 @@ +package eu.pretix.libpretixsync.sync + +import app.cash.sqldelight.TransactionWithoutReturn +import app.cash.sqldelight.db.QueryResult +import eu.pretix.libpretixsync.api.ApiException +import eu.pretix.libpretixsync.api.PretixApi +import eu.pretix.libpretixsync.sqldelight.BadgeLayout +import eu.pretix.libpretixsync.sqldelight.SyncDatabase +import eu.pretix.libpretixsync.sync.SyncManager.ProgressFeedback +import eu.pretix.libpretixsync.utils.HashUtils +import org.json.JSONObject +import java.io.IOException + +class BadgeLayoutSyncAdapter( + db: SyncDatabase, + fileStorage: FileStorage, + eventSlug: String, + api: PretixApi, + syncCycleId: String, + feedback: ProgressFeedback?, +) : BaseDownloadSyncAdapter( + db = db, + api = api, + syncCycleId = syncCycleId, + eventSlug = eventSlug, + fileStorage = fileStorage, + feedback = feedback, +) { + override fun getResourceName(): String = "badgelayouts" + + override fun getId(obj: BadgeLayout): Long = obj.server_id!! + + override fun getId(obj: JSONObject): Long = obj.getLong("id") + + override fun getJSON(obj: BadgeLayout): JSONObject = JSONObject(obj.json_data!!) + + override fun queryKnownIDs(): MutableSet { + val res = mutableSetOf() + db.badgeLayoutQueries.selectServerIdsByEventSlug(eventSlug).execute { cursor -> + while (cursor.next().value) { + val id = cursor.getLong(0) + ?: throw RuntimeException("server_id column not available") + + res.add(id) + } + QueryResult.Unit + } + + return res + } + + override fun insert(jsonobj: JSONObject) { + val backgroundFilename = processBackground(jsonobj, null) + + db.badgeLayoutQueries.insert( + background_filename = backgroundFilename, + event_slug = eventSlug, + is_default = jsonobj.getBoolean("default"), + json_data = jsonobj.toString(), + server_id = jsonobj.getLong("id"), + ) + } + + override fun update(obj: BadgeLayout, jsonobj: JSONObject) { + val backgroundFilename = processBackground(jsonobj, obj.background_filename) + + db.badgeLayoutQueries.updateFromJson( + background_filename = backgroundFilename, + event_slug = eventSlug, + is_default = jsonobj.getBoolean("default"), + json_data = jsonobj.toString(), + id = obj.id, + ) + } + + private fun processBackground(jsonobj: JSONObject, oldFilename: String?): String? { + val remote_filename = jsonobj.optString("background") + var result: String? = null + + if (remote_filename != null && remote_filename.startsWith("http")) { + val hash = HashUtils.toSHA1(remote_filename.toByteArray()) + val local_filename = "badgelayout_" + jsonobj.getLong("id") + "_" + hash + ".pdf" + if (oldFilename != null && oldFilename != local_filename) { + fileStorage.delete(oldFilename) + result = null + } + if (!fileStorage.contains(local_filename)) { + try { + val file = api.downloadFile(remote_filename) + val os = fileStorage.writeStream(local_filename) + val `is` = file.response.body!!.byteStream() + val buffer = ByteArray(1444) + var byteread: Int + while ((`is`.read(buffer).also { byteread = it }) != -1) { + os.write(buffer, 0, byteread) + } + `is`.close() + os.close() + result = local_filename + } catch (e: ApiException) { + // TODO: What to do? + e.printStackTrace() + } catch (e: IOException) { + // TODO: What to do? + e.printStackTrace() + fileStorage.delete(local_filename) + } + } else { + result = local_filename + } + } else { + if (oldFilename != null) { + fileStorage.delete(oldFilename) + result = null + } + } + + return result + } + + override fun delete(key: Long) { + db.badgeLayoutQueries.deleteByServerId(key) + } + + override fun prepareDelete(obj: BadgeLayout) { + super.prepareDelete(obj) + if (obj.background_filename != null) { + fileStorage.delete(obj.background_filename) + } + } + + override fun runInTransaction(body: TransactionWithoutReturn.() -> Unit) { + db.badgeLayoutQueries.transaction(false, body) + } + + override fun runBatch(parameterBatch: List): List = + db.badgeLayoutQueries.selectByServerIdListAndEventSlug( + server_id = parameterBatch, + event_slug = eventSlug, + ).executeAsList() + +} diff --git a/libpretixsync/src/main/java/eu/pretix/libpretixsync/sync/BaseConditionalSyncAdapter.java b/libpretixsync/src/main/java/eu/pretix/libpretixsync/sync/BaseConditionalSyncAdapter.java deleted file mode 100644 index 1977b998..00000000 --- a/libpretixsync/src/main/java/eu/pretix/libpretixsync/sync/BaseConditionalSyncAdapter.java +++ /dev/null @@ -1,74 +0,0 @@ -package eu.pretix.libpretixsync.sync; - -import org.json.JSONException; -import org.json.JSONObject; - -import java.util.concurrent.ExecutionException; - -import eu.pretix.libpretixsync.api.ApiException; -import eu.pretix.libpretixsync.api.PretixApi; -import eu.pretix.libpretixsync.api.ResourceNotModified; -import eu.pretix.libpretixsync.db.RemoteObject; -import eu.pretix.libpretixsync.db.ResourceSyncStatus; -import io.requery.BlockingEntityStore; -import io.requery.Persistable; - -abstract public class BaseConditionalSyncAdapter extends BaseDownloadSyncAdapter { - - private PretixApi.ApiResponse firstResponse; - - public BaseConditionalSyncAdapter(BlockingEntityStore store, FileStorage fileStorage, String eventSlug, PretixApi api, String syncCycleId, SyncManager.ProgressFeedback feedback) { - super(store, fileStorage, eventSlug, api, syncCycleId, feedback); - } - - @Override - protected JSONObject downloadPage(String url, boolean isFirstPage) throws ApiException, ResourceNotModified { - ResourceSyncStatus resourceSyncStatus = store.select(ResourceSyncStatus.class) - .where(ResourceSyncStatus.RESOURCE.eq(getResourceName())) - .and(ResourceSyncStatus.EVENT_SLUG.eq(eventSlug)) - .limit(1) - .get().firstOrNull(); - if (resourceSyncStatus == null) { - resourceSyncStatus = new ResourceSyncStatus(); - } else { - if (!getMeta().equals(resourceSyncStatus.getMeta()) && !(getMeta().equals("") && resourceSyncStatus.getMeta() == null)) { - store.delete(resourceSyncStatus); - resourceSyncStatus = new ResourceSyncStatus(); - } - } - PretixApi.ApiResponse apiResponse = api.fetchResource(url, resourceSyncStatus.getLast_modified()); - if (isFirstPage) { - firstResponse = apiResponse; - } - return apiResponse.getData(); - } - - public String getMeta() { - return ""; - } - - @Override - public void download() throws JSONException, ApiException, ExecutionException, InterruptedException { - firstResponse = null; - super.download(); - if (firstResponse != null) { - ResourceSyncStatus resourceSyncStatus = store.select(ResourceSyncStatus.class) - .where(ResourceSyncStatus.RESOURCE.eq(getResourceName())) - .and(ResourceSyncStatus.EVENT_SLUG.eq(eventSlug)) - .limit(1) - .get().firstOrNull(); - if (resourceSyncStatus == null) { - resourceSyncStatus = new ResourceSyncStatus(); - resourceSyncStatus.setResource(getResourceName()); - resourceSyncStatus.setEvent_slug(eventSlug); - resourceSyncStatus.setMeta(getMeta()); - } - if (firstResponse.getResponse().header("Last-Modified") != null) { - resourceSyncStatus.setLast_modified(firstResponse.getResponse().header("Last-Modified")); - resourceSyncStatus.setMeta(getMeta()); - store.upsert(resourceSyncStatus); - } - firstResponse = null; - } - } -} diff --git a/libpretixsync/src/main/java/eu/pretix/libpretixsync/sync/BaseConditionalSyncAdapter.kt b/libpretixsync/src/main/java/eu/pretix/libpretixsync/sync/BaseConditionalSyncAdapter.kt new file mode 100644 index 00000000..a00ae2de --- /dev/null +++ b/libpretixsync/src/main/java/eu/pretix/libpretixsync/sync/BaseConditionalSyncAdapter.kt @@ -0,0 +1,100 @@ +package eu.pretix.libpretixsync.sync + +import eu.pretix.libpretixsync.api.ApiException +import eu.pretix.libpretixsync.api.PretixApi +import eu.pretix.libpretixsync.api.ResourceNotModified +import eu.pretix.libpretixsync.sqldelight.SyncDatabase +import eu.pretix.libpretixsync.sync.SyncManager.ProgressFeedback +import org.json.JSONException +import org.json.JSONObject +import java.util.Date +import java.util.concurrent.ExecutionException + +abstract class BaseConditionalSyncAdapter( + db: SyncDatabase, + api: PretixApi, + syncCycleId: String, + eventSlug: String, + fileStorage: FileStorage, + feedback: ProgressFeedback?, +) : BaseDownloadSyncAdapter( + db = db, + api = api, + syncCycleId = syncCycleId, + eventSlug = eventSlug, + fileStorage = fileStorage, + feedback = feedback, +) { + private var firstResponse: PretixApi.ApiResponse? = null + + @Throws(ApiException::class, ResourceNotModified::class) + override fun downloadPage(url: String, isFirstPage: Boolean): JSONObject? { + val resourceSyncStatus = db.resourceSyncStatusQueries.selectByResourceAndEventSlug( + resource = getResourceName(), + event_slug = eventSlug, + ).executeAsOneOrNull() + + val lastModified = if (resourceSyncStatus == null) { + null + } else { + if (getMeta() != resourceSyncStatus.meta && !(getMeta() == "" && resourceSyncStatus.meta == null)) { + db.resourceSyncStatusQueries.deleteById(resourceSyncStatus.id) + Date().toString() + } else { + resourceSyncStatus.last_modified + } + } + val apiResponse = api.fetchResource(url, lastModified) + + if (isFirstPage) { + firstResponse = apiResponse + } + return apiResponse.data + } + + protected open fun getMeta(): String { + return "" + } + + @Throws( + JSONException::class, + ApiException::class, + ExecutionException::class, + InterruptedException::class + ) + override fun download() { + firstResponse = null + super.download() + + val currentResponse = firstResponse + if (currentResponse != null) { + val resourceSyncStatus = db.resourceSyncStatusQueries.selectByResourceAndEventSlug( + resource = getResourceName(), + event_slug = eventSlug, + ).executeAsOneOrNull() + + val lastModified = if (currentResponse.response.header("Last-Modified") != null) { + currentResponse.response.header("Last-Modified") + } else { + null + } + + if (lastModified != null && resourceSyncStatus == null) { + db.resourceSyncStatusQueries.insert( + event_slug = eventSlug, + last_modified = lastModified, + resource = getResourceName(), + meta = getMeta(), + status = null, + ) + } else if (lastModified != null && resourceSyncStatus != null) { + db.resourceSyncStatusQueries.updateLastModifiedAndMeta( + last_modified = lastModified, + meta = getMeta(), + id = resourceSyncStatus.id, + ) + } + firstResponse = null + } + } +} diff --git a/libpretixsync/src/main/java/eu/pretix/libpretixsync/sync/BaseDownloadSyncAdapter.java b/libpretixsync/src/main/java/eu/pretix/libpretixsync/sync/BaseDownloadSyncAdapter.java deleted file mode 100644 index 7480c60e..00000000 --- a/libpretixsync/src/main/java/eu/pretix/libpretixsync/sync/BaseDownloadSyncAdapter.java +++ /dev/null @@ -1,273 +0,0 @@ -package eu.pretix.libpretixsync.sync; - -import org.json.JSONArray; -import org.json.JSONException; -import org.json.JSONObject; - -import java.util.ArrayList; -import java.util.HashMap; -import java.util.HashSet; -import java.util.Iterator; -import java.util.List; -import java.util.Map; -import java.util.Set; -import java.util.concurrent.Callable; -import java.util.concurrent.ExecutionException; -import java.util.concurrent.ExecutorService; -import java.util.concurrent.Executors; - -import eu.pretix.libpretixsync.api.ApiException; -import eu.pretix.libpretixsync.api.PretixApi; -import eu.pretix.libpretixsync.api.ResourceNotModified; -import eu.pretix.libpretixsync.db.Migrations; -import eu.pretix.libpretixsync.db.RemoteObject; -import eu.pretix.libpretixsync.utils.JSONUtils; -import io.requery.BlockingEntityStore; -import io.requery.Persistable; -import io.requery.query.Tuple; -import io.requery.util.CloseableIterator; -import java8.util.concurrent.CompletableFuture; - -public abstract class BaseDownloadSyncAdapter implements DownloadSyncAdapter, BatchedQueryIterator.BatchedQueryCall { - - protected BlockingEntityStore store; - protected PretixApi api; - protected String syncCycleId; - protected String eventSlug; - protected FileStorage fileStorage; - protected Set knownIDs; - protected Set seenIDs; - protected int sizeBefore; - protected ExecutorService threadPool = Executors.newCachedThreadPool(); - protected SyncManager.ProgressFeedback feedback; - protected int total; - protected int inserted; - protected int totalOnline; - protected SyncManager.CanceledState canceledState; - - public BaseDownloadSyncAdapter(BlockingEntityStore store, FileStorage fileStorage, String eventSlug, PretixApi api, String syncCycleId, SyncManager.ProgressFeedback feedback) { - this.store = store; - this.api = api; - this.syncCycleId = syncCycleId; - this.eventSlug = eventSlug; - this.fileStorage = fileStorage; - this.feedback = feedback; - } - - @Override - public void setCancelState(SyncManager.CanceledState state) { - canceledState = state; - } - - @Override - public void download() throws JSONException, ApiException, ExecutionException, InterruptedException { - if (feedback != null) { - feedback.postFeedback("Downloading " + getResourceName() + " ["+ eventSlug + "] …"); - } - try { - total = 0; - inserted = 0; - knownIDs = getKnownIDs(); - sizeBefore = getKnownCount(); - seenIDs = new HashSet<>(); - downloadData(); - - if (deleteUnseen()) { - if (knownIDs == null) { - throw new RuntimeException("getKnownIDsIterator() must not return null if deleteUnseen() returns true."); - } - for (Map.Entry obj : getKnownObjects(knownIDs).entrySet()) { - prepareDelete(obj.getValue()); - store.delete(obj.getValue()); - } - } - } catch (ResourceNotModified e) { - // Do nothing - } - } - - protected Iterator getKnownObjectsIterator(Set ids) { - return new BatchedQueryIterator(ids.iterator(), this); - } - - abstract CloseableIterator getKnownIDsIterator(); - - protected int getKnownCount() { - if (knownIDs == null) { - throw new RuntimeException("getKnownIDsIterator() must be overridden if deleteUnseen() returns true."); - } - return knownIDs.size(); - } - - protected Set getKnownIDs() { - CloseableIterator it = getKnownIDsIterator(); - if (it == null) { - return null; - } - Set known = new HashSet<>(); - while (it.hasNext()) { - Tuple obj = it.next(); - known.add(obj.get(0)); - } - it.close(); - return known; - } - - protected Map getKnownObjects(Set ids) { - if (ids.isEmpty()) { - return new HashMap(); - } - Iterator it = getKnownObjectsIterator(ids); - Map known = new HashMap<>(); - while (it.hasNext()) { - try { - T obj = it.next(); - if (known.containsKey(getId(obj))) { - store.delete(known.get(getId(obj))); - } - known.put(getId(obj), obj); - } catch (BatchEmptyException e) { - // Ignore - } - } - return known; - } - - protected JSONObject preprocessObject(JSONObject obj) { - return obj; - } - - protected void processPage(final JSONArray data) { - int l = data.length(); - store.runInTransaction(new Callable() { - @Override - public Void call() throws Exception { - Set fetchIds = new HashSet<>(); - for (int i = 0; i < l; i++) { - JSONObject jsonobj = data.getJSONObject(i); - fetchIds.add(getId(jsonobj)); - } - - Map known = getKnownObjects(fetchIds); - List inserts = new ArrayList<>(); - - for (int i = 0; i < l; i++) { - JSONObject jsonobj = preprocessObject(data.getJSONObject(i)); - jsonobj.put("__libpretixsync_dbversion", Migrations.CURRENT_VERSION); - jsonobj.put("__libpretixsync_syncCycleId", syncCycleId); - K jsonid = getId(jsonobj); - T obj; - JSONObject old = null; - if (seenIDs.contains(jsonid)) { - continue; - } else if (known.containsKey(jsonid)) { - obj = known.get(jsonid); - old = obj.getJSON(); - } else { - obj = newEmptyObject(); - } - if (known.containsKey(jsonid)) { - known.remove(jsonid); - if (knownIDs != null) knownIDs.remove(jsonid); - if (!JSONUtils.similar(jsonobj, old)) { - updateObject(obj, jsonobj); - store.update(obj); - } - } else { - updateObject(obj, jsonobj); - if (autoPersist()) { - inserts.add(obj); - } - } - seenIDs.add(jsonid); - } - inserted += inserts.size(); - store.insert(inserts); - afterPage(); - return null; - } - }); - total += l; - if (feedback != null) { - feedback.postFeedback("Processed " + total + "/" + totalOnline + " " + getResourceName() + " (total in database: ~" + (sizeBefore + inserted) + ") " + " [" + eventSlug + "] …"); - } - } - - protected void afterPage() { - - } - - protected void prepareDelete(T obj) { - - } - - protected boolean autoPersist() { - return true; - } - - protected boolean deleteUnseen() { - return true; - } - - public abstract void updateObject(T obj, JSONObject jsonobj) throws JSONException; - - protected CompletableFuture asyncProcessPage(JSONArray data) { - CompletableFuture completableFuture = new CompletableFuture<>(); - - threadPool.submit(() -> { - try { - processPage(data); - } catch (Exception e) { - completableFuture.completeExceptionally(e); - } finally { - completableFuture.complete(true); - } - }); - - return completableFuture; - } - - protected String getUrl() { - return api.eventResourceUrl(eventSlug, getResourceName()); - } - - protected void downloadData() throws JSONException, ApiException, ResourceNotModified, ExecutionException, InterruptedException { - - String url = getUrl(); - boolean isFirstPage = true; - CompletableFuture future = null; - try { - while (true) { - if (canceledState != null && canceledState.isCanceled()) - throw new InterruptedException(); - JSONObject page = downloadPage(url, isFirstPage); - if (future != null) { - future.get(); - } - totalOnline = page.getInt("count"); - future = asyncProcessPage(page.getJSONArray("results")); - if (page.isNull("next")) { - break; - } - url = page.getString("next"); - isFirstPage = false; - } - } finally { - if (future != null) { - future.get(); - } - } - } - - protected JSONObject downloadPage(String url, boolean isFirstPage) throws ApiException, ResourceNotModified { - return api.fetchResource(url).getData(); - } - - abstract String getResourceName(); - - abstract K getId(JSONObject obj) throws JSONException; - - abstract K getId(T obj); - - abstract T newEmptyObject(); -} diff --git a/libpretixsync/src/main/java/eu/pretix/libpretixsync/sync/BaseDownloadSyncAdapter.kt b/libpretixsync/src/main/java/eu/pretix/libpretixsync/sync/BaseDownloadSyncAdapter.kt new file mode 100644 index 00000000..b9941aeb --- /dev/null +++ b/libpretixsync/src/main/java/eu/pretix/libpretixsync/sync/BaseDownloadSyncAdapter.kt @@ -0,0 +1,242 @@ +package eu.pretix.libpretixsync.sync + +import app.cash.sqldelight.TransactionWithoutReturn +import eu.pretix.libpretixsync.api.ApiException +import eu.pretix.libpretixsync.api.PretixApi +import eu.pretix.libpretixsync.api.ResourceNotModified +import eu.pretix.libpretixsync.db.Migrations +import eu.pretix.libpretixsync.sqldelight.SyncDatabase +import eu.pretix.libpretixsync.sync.SyncManager.CanceledState +import eu.pretix.libpretixsync.sync.SyncManager.ProgressFeedback +import eu.pretix.libpretixsync.utils.JSONUtils +import java8.util.concurrent.CompletableFuture +import org.json.JSONArray +import org.json.JSONException +import org.json.JSONObject +import java.util.concurrent.ExecutionException +import java.util.concurrent.Executors + +abstract class BaseDownloadSyncAdapter( + protected var db: SyncDatabase, + protected var api: PretixApi, + protected var syncCycleId: String, + protected var eventSlug: String, + protected var fileStorage: FileStorage, + protected var feedback: ProgressFeedback?, +) : DownloadSyncAdapter, BatchedQueryIterator.BatchedQueryCall { + protected var knownIDs: MutableSet? = null + protected val seenIDs: MutableSet = mutableSetOf() + protected var sizeBefore = 0L + protected var threadPool = Executors.newCachedThreadPool() + protected var total = 0 + protected var inserted = 0 + protected var totalOnline = 0 + protected var canceledState: CanceledState? = null + + override fun setCancelState(state: CanceledState?) { + canceledState = state + } + + @Throws( + JSONException::class, + ApiException::class, + ExecutionException::class, + InterruptedException::class + ) + override fun download() { + feedback?.postFeedback("Downloading " + getResourceName() + " [" + eventSlug + "] …") + + try { + total = 0 + inserted = 0 + knownIDs = queryKnownIDs() + sizeBefore = getKnownCount() + seenIDs.clear() + + downloadData() + + if (deleteUnseen()) { + val ids = knownIDs + ?: throw RuntimeException("knownIDs can't be null if deleteUnseen() returns true.") + + for ((key, value) in queryKnownObjects(ids)) { + prepareDelete(value) + delete(key) + } + } + } catch (e: ResourceNotModified) { + // Do nothing + } + } + + protected open fun getKnownCount(): Long = knownIDs.let { + if (it == null) { + throw RuntimeException("knownIDs can't be null if deleteUnseen() returns true.") + } + return it.size.toLong() + } + + protected open fun queryKnownObjects(ids: Set): MutableMap { + if (ids.isEmpty()) { + return mutableMapOf() + } + + val it = BatchedQueryIterator(ids.iterator(), this) + val known = mutableMapOf() + while (it.hasNext()) { + try { + val obj = it.next() + val key = getId(obj) + if (known.containsKey(key)) { + delete(key) + } + known[key] = obj + } catch (e: BatchEmptyException) { + // Ignore + } + } + return known + } + + protected open fun preprocessObject(obj: JSONObject): JSONObject { + return obj + } + + protected fun processPage(data: JSONArray) { + val l = data.length() + runInTransaction { + val fetchIds: MutableSet = HashSet() + for (i in 0 until l) { + val jsonobj = data.getJSONObject(i) + fetchIds.add(getId(jsonobj)) + } + val known: MutableMap = queryKnownObjects(fetchIds) + for (i in 0 until l) { + val jsonobj = preprocessObject(data.getJSONObject(i)) + jsonobj.put( + "__libpretixsync_dbversion", + Migrations.CURRENT_VERSION + ) + jsonobj.put("__libpretixsync_syncCycleId", syncCycleId) + val jsonid: K = getId(jsonobj) + var obj: T? + var old: JSONObject? = null + + if (seenIDs.contains(jsonid)) { + continue + } else if (known.containsKey(jsonid)) { + obj = known.getValue(jsonid) + old = getJSON(obj) + } else { + obj = null + } + if (obj != null) { + known.remove(jsonid) + knownIDs?.let { + it.remove(jsonid) + } + if (!JSONUtils.similar(jsonobj, old)) { + update(obj, jsonobj) + } + } else { + insert(jsonobj) + inserted += 1 + } + seenIDs.add(jsonid) + } + + afterPage() + null + } + total += l + + feedback?.postFeedback("Processed " + total + "/" + totalOnline + " " + getResourceName() + " (total in database: ~" + (sizeBefore + inserted) + ") " + " [" + eventSlug + "] …") + } + + protected open fun afterPage() {} + + protected open fun prepareDelete(obj: T) {} + + protected open fun deleteUnseen(): Boolean { + return true + } + + protected fun asyncProcessPage(data: JSONArray): CompletableFuture { + val completableFuture = CompletableFuture() + threadPool.submit { + try { + processPage(data) + } catch (e: Exception) { + completableFuture.completeExceptionally(e) + } finally { + completableFuture.complete(true) + } + } + return completableFuture + } + + protected open fun getUrl(): String { + return api.eventResourceUrl(eventSlug, getResourceName()) + } + + @Throws( + JSONException::class, + ApiException::class, + ResourceNotModified::class, + ExecutionException::class, + InterruptedException::class + ) + protected open fun downloadData() { + var url = getUrl() + var isFirstPage = true + var future: CompletableFuture? = null + try { + while (true) { + val isCanceled = canceledState.let { + it != null && it.isCanceled + } + if (isCanceled) { + throw InterruptedException() + } + + val page = downloadPage(url, isFirstPage) ?: throw ApiException("page is null") + + future?.get() + totalOnline = page.getInt("count") + future = asyncProcessPage(page.getJSONArray("results")) + if (page.isNull("next")) { + break + } + url = page.getString("next") + isFirstPage = false + } + } finally { + future?.get() + } + } + + @Throws(ApiException::class, ResourceNotModified::class) + protected open fun downloadPage(url: String, isFirstPage: Boolean): JSONObject? { + return api.fetchResource(url).data + } + + abstract fun getResourceName(): String + + abstract fun getId(obj: T): K + + @Throws(JSONException::class) + abstract fun getId(obj: JSONObject): K + + @Throws(JSONException::class) + abstract fun getJSON(obj: T): JSONObject + + abstract fun queryKnownIDs(): MutableSet? + + abstract fun insert(jsonobj: JSONObject) + + abstract fun update(obj: T, jsonobj: JSONObject) + + abstract fun delete(key: K) + + abstract fun runInTransaction(body: TransactionWithoutReturn.() -> Unit) +} diff --git a/libpretixsync/src/main/java/eu/pretix/libpretixsync/sync/BaseSingleObjectSyncAdapter.java b/libpretixsync/src/main/java/eu/pretix/libpretixsync/sync/BaseSingleObjectSyncAdapter.java deleted file mode 100644 index 0e768f48..00000000 --- a/libpretixsync/src/main/java/eu/pretix/libpretixsync/sync/BaseSingleObjectSyncAdapter.java +++ /dev/null @@ -1,104 +0,0 @@ -package eu.pretix.libpretixsync.sync; - -import org.json.JSONException; -import org.json.JSONObject; - -import java.util.concurrent.Callable; - -import eu.pretix.libpretixsync.api.ApiException; -import eu.pretix.libpretixsync.api.PretixApi; -import eu.pretix.libpretixsync.api.ResourceNotModified; -import eu.pretix.libpretixsync.db.Migrations; -import eu.pretix.libpretixsync.db.RemoteObject; -import eu.pretix.libpretixsync.utils.JSONUtils; -import io.requery.BlockingEntityStore; -import io.requery.Persistable; - -public abstract class BaseSingleObjectSyncAdapter implements DownloadSyncAdapter { - - protected BlockingEntityStore store; - protected PretixApi api; - protected String eventSlug; - protected String key; - protected String syncCycleId; - protected SyncManager.ProgressFeedback feedback; - protected SyncManager.CanceledState canceledState; - - public BaseSingleObjectSyncAdapter(BlockingEntityStore store, String eventSlug, String key, PretixApi api, String syncCycleId, SyncManager.ProgressFeedback feedback) { - this.store = store; - this.api = api; - this.eventSlug = eventSlug; - this.key = key; - this.syncCycleId = syncCycleId; - this.feedback = feedback; - } - - @Override - public void setCancelState(SyncManager.CanceledState state) { - canceledState = state; - } - - @Override - public void download() throws JSONException, ApiException { - if (feedback != null) { - feedback.postFeedback("Downloading " + getResourceName() + " ["+ eventSlug + "] …"); - } - try { - JSONObject data = downloadRawData(); - processData(data); - } catch (ResourceNotModified e) { - // Do nothing - } - } - - abstract T getKnownObject(); - - protected void processData(final JSONObject data) { - try { - data.put("__libpretixsync_dbversion", Migrations.CURRENT_VERSION); - data.put("__libpretixsync_syncCycleId", syncCycleId); - } catch (JSONException e) { - e.printStackTrace(); - } - - store.runInTransaction(new Callable() { - @Override - public Void call() throws Exception { - T known = getKnownObject(); - T obj; - JSONObject old = null; - if (known != null) { - obj = known; - old = obj.getJSON(); - if (!JSONUtils.similar(data, old)) { - updateObject(obj, data); - store.update(obj); - } - } else { - obj = newEmptyObject(); - updateObject(obj, data); - store.insert(obj); - } - return null; - } - }); - } - - public abstract void updateObject(T obj, JSONObject jsonobj) throws JSONException; - - protected String getUrl() { - return api.eventResourceUrl(eventSlug, getResourceName() + "/" + key); - } - - protected JSONObject downloadRawData() throws ApiException, ResourceNotModified { - return downloadPage(getUrl()); - } - - protected JSONObject downloadPage(String url) throws ApiException, ResourceNotModified { - return api.fetchResource(url).getData(); - } - - abstract String getResourceName(); - - abstract T newEmptyObject(); -} diff --git a/libpretixsync/src/main/java/eu/pretix/libpretixsync/sync/BaseSingleObjectSyncAdapter.kt b/libpretixsync/src/main/java/eu/pretix/libpretixsync/sync/BaseSingleObjectSyncAdapter.kt new file mode 100644 index 00000000..e927be10 --- /dev/null +++ b/libpretixsync/src/main/java/eu/pretix/libpretixsync/sync/BaseSingleObjectSyncAdapter.kt @@ -0,0 +1,88 @@ +package eu.pretix.libpretixsync.sync + +import app.cash.sqldelight.TransactionWithoutReturn +import eu.pretix.libpretixsync.api.ApiException +import eu.pretix.libpretixsync.api.PretixApi +import eu.pretix.libpretixsync.api.ResourceNotModified +import eu.pretix.libpretixsync.db.Migrations +import eu.pretix.libpretixsync.sqldelight.SyncDatabase +import eu.pretix.libpretixsync.sync.SyncManager.CanceledState +import eu.pretix.libpretixsync.sync.SyncManager.ProgressFeedback +import eu.pretix.libpretixsync.utils.JSONUtils +import org.json.JSONException +import org.json.JSONObject + +abstract class BaseSingleObjectSyncAdapter( + protected var db: SyncDatabase, + protected var eventSlug: String, + protected var key: String, + protected var api: PretixApi, + protected var syncCycleId: String, + protected var feedback: ProgressFeedback? = null, +) : DownloadSyncAdapter { + + private var canceledState: CanceledState? = null + + override fun download() { + if (feedback != null) { + feedback!!.postFeedback("Downloading " + getResourceName() + " [" + eventSlug + "] …") + } + try { + val data = downloadRawData() + processData(data) + } catch (e: ResourceNotModified) { + // Do nothing + } + } + + override fun setCancelState(state: SyncManager.CanceledState?) { + canceledState = state + } + + abstract fun getKnownObject(): T? + + protected fun processData(data: JSONObject) { + try { + data.put("__libpretixsync_dbversion", Migrations.CURRENT_VERSION) + data.put("__libpretixsync_syncCycleId", syncCycleId) + } catch (e: JSONException) { + e.printStackTrace() + } + + runInTransaction { + val known = getKnownObject() + if (known != null) { + val old = getJSON(known) + if (!JSONUtils.similar(data, old)) { + update(known, data) + } + } else { + insert(data) + } + } + } + + protected open fun getUrl(): String { + return api.eventResourceUrl(eventSlug, getResourceName() + "/" + key) + } + + @Throws(ApiException::class, ResourceNotModified::class) + protected fun downloadRawData(): JSONObject { + return downloadPage(getUrl()) + } + + @Throws(ApiException::class, ResourceNotModified::class) + protected fun downloadPage(url: String): JSONObject { + return api.fetchResource(url).data!! + } + + abstract fun getResourceName(): String + + abstract fun runInTransaction(body: TransactionWithoutReturn.() -> Unit) + + abstract fun insert(jsonobj: JSONObject) + + abstract fun update(obj: T, jsonobj: JSONObject) + + abstract fun getJSON(obj: T): JSONObject +} diff --git a/libpretixsync/src/main/java/eu/pretix/libpretixsync/sync/BatchedQueryIterator.java b/libpretixsync/src/main/java/eu/pretix/libpretixsync/sync/BatchedQueryIterator.java deleted file mode 100644 index 7bdf4051..00000000 --- a/libpretixsync/src/main/java/eu/pretix/libpretixsync/sync/BatchedQueryIterator.java +++ /dev/null @@ -1,58 +0,0 @@ -package eu.pretix.libpretixsync.sync; - -import java.util.ArrayList; -import java.util.Iterator; -import java.util.List; - -import io.requery.util.CloseableIterator; - -public class BatchedQueryIterator implements Iterator { - /** - * An iterator that performs a query with an arbitrary number of parameters of the same type, - * such as a WHERE a IN (…) query with an unknown number of values. We can't just use IN (…) - * naively, since SQLite has a limit on 999 variables per query (SQLITE_MAX_VARIABLE_NUMBER), - * so we need to do it in batches. - */ - - private static final int BATCH_SIZE = 500; - private Iterator allParameters; - private List buffer = new ArrayList<>(); - private BatchedQueryCall callBack; - - public interface BatchedQueryCall { - public CloseableIterator runBatch(List parameterBatch); - } - - public BatchedQueryIterator(Iterator allParameters, BatchedQueryCall callBack) { - this.allParameters = allParameters; - this.callBack = callBack; - } - - @Override - public boolean hasNext() { - return buffer.size() > 0 || allParameters.hasNext(); - } - - @Override - public T next() { - if (buffer.size() == 0) { - List batch = new ArrayList<>(); - for (int i = 0; i < BATCH_SIZE; i++) { - if (allParameters.hasNext()) { - batch.add(allParameters.next()); - } else { - break; - } - } - CloseableIterator batchResult = callBack.runBatch(batch); - while (batchResult.hasNext()) { - buffer.add(batchResult.next()); - } - batchResult.close(); - if (buffer.size() == 0) { - throw new BatchEmptyException(); - } - } - return buffer.remove(0); - } -} diff --git a/libpretixsync/src/main/java/eu/pretix/libpretixsync/sync/BatchedQueryIterator.kt b/libpretixsync/src/main/java/eu/pretix/libpretixsync/sync/BatchedQueryIterator.kt new file mode 100644 index 00000000..8b1caa61 --- /dev/null +++ b/libpretixsync/src/main/java/eu/pretix/libpretixsync/sync/BatchedQueryIterator.kt @@ -0,0 +1,43 @@ +package eu.pretix.libpretixsync.sync + +class BatchedQueryIterator( + private var allParameters: Iterator, + private var callBack: BatchedQueryCall, +) : Iterator { + /** + * An iterator that performs a query with an arbitrary number of parameters of the same type, + * such as a WHERE a IN (…) query with an unknown number of values. We can't just use IN (…) + * naively, since SQLite has a limit on 999 variables per query (SQLITE_MAX_VARIABLE_NUMBER), + * so we need to do it in batches. + */ + private val BATCH_SIZE = 500 + private val buffer: MutableList = ArrayList() + + interface BatchedQueryCall { + fun runBatch(parameterBatch: List): List + } + + override fun hasNext(): Boolean { + return buffer.size > 0 || allParameters.hasNext() + } + + override fun next(): T { + if (buffer.size == 0) { + val batch: MutableList = ArrayList() + for (i in 0 until BATCH_SIZE) { + if (allParameters.hasNext()) { + batch.add(allParameters.next()) + } else { + break + } + } + val batchResult = callBack.runBatch(batch) + buffer.addAll(batchResult) + + if (buffer.size == 0) { + throw BatchEmptyException() + } + } + return buffer.removeAt(0) + } +} diff --git a/libpretixsync/src/main/java/eu/pretix/libpretixsync/sync/BlockedTicketSecretSyncAdapter.java b/libpretixsync/src/main/java/eu/pretix/libpretixsync/sync/BlockedTicketSecretSyncAdapter.java deleted file mode 100644 index 74f96dce..00000000 --- a/libpretixsync/src/main/java/eu/pretix/libpretixsync/sync/BlockedTicketSecretSyncAdapter.java +++ /dev/null @@ -1,194 +0,0 @@ -package eu.pretix.libpretixsync.sync; - -import org.json.JSONArray; -import org.json.JSONException; -import org.json.JSONObject; - -import java.io.UnsupportedEncodingException; -import java.net.URLEncoder; -import java.util.List; -import java.util.concurrent.ExecutionException; - -import eu.pretix.libpretixsync.api.ApiException; -import eu.pretix.libpretixsync.api.PretixApi; -import eu.pretix.libpretixsync.api.ResourceNotModified; -import eu.pretix.libpretixsync.db.BlockedTicketSecret; -import eu.pretix.libpretixsync.db.ResourceSyncStatus; -import io.requery.BlockingEntityStore; -import io.requery.Persistable; -import io.requery.query.Tuple; -import io.requery.util.CloseableIterator; - -public class BlockedTicketSecretSyncAdapter extends BaseDownloadSyncAdapter { - private String firstResponseTimestamp; - private ResourceSyncStatus rlm; - - public BlockedTicketSecretSyncAdapter(BlockingEntityStore store, FileStorage fileStorage, String eventSlug, PretixApi api, String syncCycleId, SyncManager.ProgressFeedback feedback) { - super(store, fileStorage, eventSlug, api, syncCycleId, feedback); - } - - @Override - protected boolean autoPersist() { - return false; - } - - @Override - public void download() throws JSONException, ApiException, ExecutionException, InterruptedException { - boolean completed = false; - try { - super.download(); - completed = true; - } finally { - ResourceSyncStatus resourceSyncStatus = store.select(ResourceSyncStatus.class) - .where(ResourceSyncStatus.RESOURCE.eq(getResourceName())) - .and(ResourceSyncStatus.EVENT_SLUG.eq(eventSlug)) - .limit(1) - .get().firstOrNull(); - - // We need to cache the response timestamp of the *first* page in the result set to make - // sure we don't miss anything between this and the next run. - // - // If the download failed, completed will be false. In case this was a full fetch - // (i.e. no timestamp was stored beforehand) we will still store the timestamp to be - // able to continue properly. - if (firstResponseTimestamp != null) { - if (resourceSyncStatus == null) { - resourceSyncStatus = new ResourceSyncStatus(); - resourceSyncStatus.setResource(getResourceName()); - resourceSyncStatus.setEvent_slug(eventSlug); - if (completed) { - resourceSyncStatus.setStatus("complete"); - resourceSyncStatus.setLast_modified(firstResponseTimestamp); - store.upsert(resourceSyncStatus); - } - } else { - if (completed) { - resourceSyncStatus.setLast_modified(firstResponseTimestamp); - store.upsert(resourceSyncStatus); - } - } - } else if (completed && resourceSyncStatus != null) { - resourceSyncStatus.setStatus("complete"); - store.update(resourceSyncStatus); - } - firstResponseTimestamp = null; - } - - // We clean up unblocked records after the sync - store.delete(BlockedTicketSecret.class).where(BlockedTicketSecret.BLOCKED.eq(false)); - } - - protected boolean deleteUnseen() { - return rlm == null; - } - - @Override - CloseableIterator getKnownIDsIterator() { - return store.select(BlockedTicketSecret.SERVER_ID) - .where(BlockedTicketSecret.EVENT_SLUG.eq(eventSlug)) - .get().iterator(); - } - - @Override - public void updateObject(BlockedTicketSecret obj, JSONObject jsonobj) throws JSONException { - obj.setEvent_slug(eventSlug); - obj.setServer_id(jsonobj.getLong("id")); - obj.setUpdated(jsonobj.getString("updated")); - obj.setBlocked(jsonobj.getBoolean("blocked")); - obj.setSecret(jsonobj.getString("secret")); - obj.setJson_data(jsonobj.toString()); - - if (obj.getId() == null && obj.isBlocked()) { - // If not blocked and not yet in our database, we don't need to save it, as we only care - // about blocked entries. - store.insert(obj); - } else { - - } - } - - @Override - protected String getUrl() { - return api.eventResourceUrl(eventSlug, getResourceName()); - } - - @Override - String getResourceName() { - return "blockedsecrets"; - } - - @Override - Long getId(JSONObject obj) throws JSONException { - return obj.getLong("id"); - } - - @Override - Long getId(BlockedTicketSecret obj) { - return obj.getServer_id(); - } - - @Override - BlockedTicketSecret newEmptyObject() { - return new BlockedTicketSecret(); - } - - @Override - public CloseableIterator runBatch(List parameterBatch) { - return store.select(BlockedTicketSecret.class) - .where(BlockedTicketSecret.SERVER_ID.in(parameterBatch)) - .and(BlockedTicketSecret.EVENT_SLUG.eq(eventSlug)) - .get().iterator(); - } - - @Override - protected JSONObject downloadPage(String url, boolean isFirstPage) throws ApiException, ResourceNotModified { - if (isFirstPage) { - rlm = store.select(ResourceSyncStatus.class) - .where(ResourceSyncStatus.RESOURCE.eq(getResourceName())) - .and(ResourceSyncStatus.EVENT_SLUG.eq(eventSlug)) - .limit(1) - .get().firstOrNull(); - } - - if (rlm != null) { - // This resource has been fetched before. - // Diff to last time - - // Ordering is crucial here: Only because the server returns the objects in the - // order of modification we can be sure that we don't miss orders created in between our - // paginated requests. If an object were to be modified between our fetch of page 1 - // and 2 that originally wasn't part of the result set, we won't see it (as it will - // be inserted on page 1), but we'll see it the next time, and we will see some - // duplicates on page 2, but we don't care. The important part is that nothing gets - // lost "between the pages". If an order of page 2 gets modified and moves to page - // one while we fetch page 2, again, we won't see it and we'll see some duplicates, - // but the next sync will fix it since we always fetch our diff compared to the time - // of the first page. - try { - if (!url.contains("updated_since")) { - if (url.contains("?")) { - url += "&"; - } else { - url += "?"; - } - url += "ordering=-updated&updated_since=" + URLEncoder.encode(rlm.getLast_modified(), "UTF-8"); - } - } catch (UnsupportedEncodingException e) { - e.printStackTrace(); - } - } - - PretixApi.ApiResponse apiResponse = api.fetchResource(url); - if (isFirstPage) { - try { - JSONArray results = apiResponse.getData().getJSONArray("results"); - if (results.length() > 0) { - firstResponseTimestamp = results.getJSONObject(0).getString("updated"); - } - } catch (JSONException | NullPointerException e) { - e.printStackTrace(); - } - } - return apiResponse.getData(); - } -} diff --git a/libpretixsync/src/main/java/eu/pretix/libpretixsync/sync/BlockedTicketSecretSyncAdapter.kt b/libpretixsync/src/main/java/eu/pretix/libpretixsync/sync/BlockedTicketSecretSyncAdapter.kt new file mode 100644 index 00000000..c1698ee3 --- /dev/null +++ b/libpretixsync/src/main/java/eu/pretix/libpretixsync/sync/BlockedTicketSecretSyncAdapter.kt @@ -0,0 +1,221 @@ +package eu.pretix.libpretixsync.sync + +import app.cash.sqldelight.TransactionWithoutReturn +import app.cash.sqldelight.db.QueryResult +import eu.pretix.libpretixsync.api.ApiException +import eu.pretix.libpretixsync.api.PretixApi +import eu.pretix.libpretixsync.api.ResourceNotModified +import eu.pretix.libpretixsync.sqldelight.BlockedTicketSecret +import eu.pretix.libpretixsync.sqldelight.ResourceSyncStatus +import eu.pretix.libpretixsync.sqldelight.SyncDatabase +import eu.pretix.libpretixsync.sync.SyncManager.ProgressFeedback +import org.json.JSONException +import org.json.JSONObject +import java.io.UnsupportedEncodingException +import java.net.URLEncoder +import java.util.concurrent.ExecutionException + +class BlockedTicketSecretSyncAdapter( + db: SyncDatabase, + fileStorage: FileStorage, + eventSlug: String, + api: PretixApi, + syncCycleId: String, + feedback: ProgressFeedback?, +) : BaseDownloadSyncAdapter( + db = db, + api = api, + syncCycleId = syncCycleId, + eventSlug = eventSlug, + fileStorage = fileStorage, + feedback = feedback, +) { + + private var firstResponseTimestamp: String? = null + private var rlm: ResourceSyncStatus? = null + + override fun getResourceName(): String = "blockedsecrets" + + override fun getId(obj: BlockedTicketSecret): Long = obj.server_id!! + + override fun getId(obj: JSONObject): Long = obj.getLong("id") + + override fun getJSON(obj: BlockedTicketSecret): JSONObject = JSONObject(obj.json_data!!) + + override fun queryKnownIDs(): MutableSet { + val res = mutableSetOf() + db.blockedTicketSecretQueries.selectServerIdsByEventSlug(eventSlug).execute { cursor -> + while (cursor.next().value) { + val id = + cursor.getLong(0) ?: throw RuntimeException("server_id column not available") + res.add(id) + } + + QueryResult.Unit + } + + return res + } + + override fun insert(jsonobj: JSONObject) { + val blocked = jsonobj.getBoolean("blocked") + // If not blocked and not yet in our database, we don't need to save it, as we only care + // about blocked entries. + if (!blocked) { + return + } + + db.blockedTicketSecretQueries.insert( + blocked = blocked, + event_slug = eventSlug, + json_data = jsonobj.toString(), + secret = jsonobj.getString("secret"), + server_id = jsonobj.getLong("id"), + updated = jsonobj.getString("updated"), + ) + } + + override fun update(obj: BlockedTicketSecret, jsonobj: JSONObject) { + // TODO: Test new behaviour. Original version had no update case + db.blockedTicketSecretQueries.updateFromJson( + blocked = jsonobj.getBoolean("blocked"), + event_slug = eventSlug, + json_data = jsonobj.toString(), + secret = jsonobj.getString("secret"), + updated = jsonobj.getString("updated"), + id = obj.id, + ) + } + + override fun delete(key: Long) { + db.blockedTicketSecretQueries.deleteByServerId(key) + } + + override fun deleteUnseen(): Boolean { + return rlm == null + } + + override fun runInTransaction(body: TransactionWithoutReturn.() -> Unit) { + db.blockedTicketSecretQueries.transaction(false, body) + } + + override fun runBatch(parameterBatch: List): List = + db.blockedTicketSecretQueries.selectByServerIdListAndEventSlug( + server_id = parameterBatch, + event_slug = eventSlug, + ).executeAsList() + + @Throws( + JSONException::class, + ApiException::class, + ExecutionException::class, + InterruptedException::class + ) + override fun download() { + var completed = false + try { + super.download() + completed = true + } finally { + val resourceSyncStatus = db.resourceSyncStatusQueries.selectByResourceAndEventSlug( + resource = getResourceName(), + event_slug = eventSlug, + ).executeAsOneOrNull() + + // We need to cache the response timestamp of the *first* page in the result set to make + // sure we don't miss anything between this and the next run. + // + // If the download failed, completed will be false. In case this was a full fetch + // (i.e. no timestamp was stored beforehand) we will still store the timestamp to be + // able to continue properly. + if (firstResponseTimestamp != null) { + if (resourceSyncStatus == null) { + if (completed) { + db.resourceSyncStatusQueries.insert( + event_slug = eventSlug, + last_modified = firstResponseTimestamp, + meta = null, + resource = getResourceName(), + status = "complete", + ) + } + } else { + if (completed) { + db.resourceSyncStatusQueries.updateLastModified( + last_modified = firstResponseTimestamp, + id = resourceSyncStatus.id, + ) + } + } + } else if (completed && resourceSyncStatus != null) { + db.resourceSyncStatusQueries.updateStatus( + status = "complete", + id = resourceSyncStatus.id + ) + } + firstResponseTimestamp = null + } + + // We clean up unblocked records after the sync + db.blockedTicketSecretQueries.deleteNotBlocked() + } + + @Throws(ApiException::class, ResourceNotModified::class) + override fun downloadPage(url: String, isFirstPage: Boolean): JSONObject? { + if (isFirstPage) { + rlm = db.resourceSyncStatusQueries.selectByResourceAndEventSlug( + resource = getResourceName(), + event_slug = eventSlug, + ).executeAsOneOrNull() + } + + var resUrl = url + rlm?.let { + // This resource has been fetched before. + // Diff to last time + + // Ordering is crucial here: Only because the server returns the objects in the + // order of modification we can be sure that we don't miss orders created in between our + // paginated requests. If an object were to be modified between our fetch of page 1 + // and 2 that originally wasn't part of the result set, we won't see it (as it will + // be inserted on page 1), but we'll see it the next time, and we will see some + // duplicates on page 2, but we don't care. The important part is that nothing gets + // lost "between the pages". If an order of page 2 gets modified and moves to page + // one while we fetch page 2, again, we won't see it and we'll see some duplicates, + // but the next sync will fix it since we always fetch our diff compared to the time + // of the first page. + + try { + if (!resUrl.contains("updated_since")) { + resUrl += if (resUrl.contains("?")) { + "&" + } else { + "?" + } + resUrl += "ordering=-updated&updated_since=" + URLEncoder.encode( + it.last_modified, + "UTF-8" + ) + } + } catch (e: UnsupportedEncodingException) { + e.printStackTrace() + } + } + + val apiResponse = api.fetchResource(resUrl) + if (isFirstPage) { + try { + val results = apiResponse.data!!.getJSONArray("results") + if (results.length() > 0) { + firstResponseTimestamp = results.getJSONObject(0).getString("updated") + } + } catch (e: JSONException) { + e.printStackTrace() + } catch (e: NullPointerException) { + e.printStackTrace() + } + } + return apiResponse.data + } + +} diff --git a/libpretixsync/src/main/java/eu/pretix/libpretixsync/sync/CashierSyncAdapter.java b/libpretixsync/src/main/java/eu/pretix/libpretixsync/sync/CashierSyncAdapter.java deleted file mode 100644 index 4d2e24cc..00000000 --- a/libpretixsync/src/main/java/eu/pretix/libpretixsync/sync/CashierSyncAdapter.java +++ /dev/null @@ -1,68 +0,0 @@ -package eu.pretix.libpretixsync.sync; - -import org.json.JSONException; -import org.json.JSONObject; - -import java.util.List; - -import eu.pretix.libpretixsync.api.PretixApi; -import eu.pretix.libpretixsync.db.Cashier; -import eu.pretix.libpretixsync.db.ItemCategory; -import io.requery.BlockingEntityStore; -import io.requery.Persistable; -import io.requery.query.Tuple; -import io.requery.util.CloseableIterator; - -public class CashierSyncAdapter extends BaseConditionalSyncAdapter { - - public CashierSyncAdapter(BlockingEntityStore store, FileStorage fileStorage, PretixApi api, String syncCycleId, SyncManager.ProgressFeedback feedback) { - super(store, fileStorage, "__all__", api, syncCycleId, feedback); - } - - @Override - public void updateObject(Cashier obj, JSONObject jsonobj) throws JSONException { - obj.setServer_id(jsonobj.getLong("id")); - obj.setName(jsonobj.getString("name")); - obj.setUserid(jsonobj.getString("userid")); - obj.setPin(jsonobj.isNull("pin") ? "" : jsonobj.getString("pin")); - obj.setJson_data(jsonobj.toString()); - obj.setActive(jsonobj.getBoolean("active")); - } - - @Override - public CloseableIterator runBatch(List ids) { - return store.select(Cashier.class) - .where(Cashier.SERVER_ID.in(ids)) - .get().iterator(); - } - - @Override - CloseableIterator getKnownIDsIterator() { - return store.select(Cashier.SERVER_ID) - .get().iterator(); - } - - @Override - String getResourceName() { - return "cashiers"; - } - - protected String getUrl() { - return api.organizerResourceUrl("pos/" + getResourceName()); - } - - @Override - Long getId(JSONObject obj) throws JSONException { - return obj.getLong("id"); - } - - @Override - Long getId(Cashier obj) { - return obj.getServer_id(); - } - - @Override - Cashier newEmptyObject() { - return new Cashier(); - } -} diff --git a/libpretixsync/src/main/java/eu/pretix/libpretixsync/sync/CashierSyncAdapter.kt b/libpretixsync/src/main/java/eu/pretix/libpretixsync/sync/CashierSyncAdapter.kt new file mode 100644 index 00000000..353b3e02 --- /dev/null +++ b/libpretixsync/src/main/java/eu/pretix/libpretixsync/sync/CashierSyncAdapter.kt @@ -0,0 +1,83 @@ +package eu.pretix.libpretixsync.sync + +import app.cash.sqldelight.TransactionWithoutReturn +import app.cash.sqldelight.db.QueryResult +import eu.pretix.libpretixsync.api.PretixApi +import eu.pretix.libpretixsync.sqldelight.Cashier +import eu.pretix.libpretixsync.sqldelight.SyncDatabase +import org.json.JSONObject + +class CashierSyncAdapter( + db: SyncDatabase, + fileStorage: FileStorage, + api: PretixApi, + syncCycleId: String, + feedback: SyncManager.ProgressFeedback?, +) : BaseConditionalSyncAdapter( + db = db, + fileStorage = fileStorage, + eventSlug = "__all__", + api = api, + syncCycleId = syncCycleId, + feedback = feedback, +) { + + override fun getResourceName(): String = "cashiers" + + override fun getUrl(): String = api.organizerResourceUrl("pos/" + getResourceName()) + + override fun getId(obj: Cashier): Long = obj.server_id!! + + override fun getId(obj: JSONObject): Long = obj.getLong("id") + + override fun getJSON(obj: Cashier): JSONObject = JSONObject(obj.json_data) + + override fun queryKnownIDs(): MutableSet { + val res = mutableSetOf() + db.cashierQueries.selectServerIds().execute { cursor -> + while (cursor.next().value) { + val id = cursor.getLong(0) + ?: throw RuntimeException("server_id column not available") + + res.add(id) + } + QueryResult.Unit + } + + return res + } + + override fun insert(jsonobj: JSONObject) { + db.cashierQueries.insert( + active = jsonobj.getBoolean("active"), + json_data = jsonobj.toString(), + name = jsonobj.getString("name"), + pin = if (jsonobj.isNull("pin")) "" else jsonobj.getString("pin"), + server_id = jsonobj.getLong("id"), + userid = jsonobj.getString("userid"), + ) + } + + override fun update(obj: Cashier, jsonobj: JSONObject) { + db.cashierQueries.updateFromJson( + active = jsonobj.getBoolean("active"), + json_data = jsonobj.toString(), + name = jsonobj.getString("name"), + pin = if (jsonobj.isNull("pin")) "" else jsonobj.getString("pin"), + userid = jsonobj.getString("userid"), + id = obj.id, + ) + } + + override fun delete(key: Long) { + db.cashierQueries.deleteByServerId(key) + } + + override fun runInTransaction(body: TransactionWithoutReturn.() -> Unit) { + db.cashierQueries.transaction(false, body) + } + + override fun runBatch(parameterBatch: List): List = + db.cashierQueries.selectByServerIdList(parameterBatch).executeAsList() + +} diff --git a/libpretixsync/src/main/java/eu/pretix/libpretixsync/sync/CheckInListSyncAdapter.java b/libpretixsync/src/main/java/eu/pretix/libpretixsync/sync/CheckInListSyncAdapter.java deleted file mode 100644 index efa1fc45..00000000 --- a/libpretixsync/src/main/java/eu/pretix/libpretixsync/sync/CheckInListSyncAdapter.java +++ /dev/null @@ -1,133 +0,0 @@ -package eu.pretix.libpretixsync.sync; - -import org.json.JSONArray; -import org.json.JSONException; -import org.json.JSONObject; - -import java.util.ArrayList; -import java.util.Iterator; -import java.util.List; - -import eu.pretix.libpretixsync.api.PretixApi; -import eu.pretix.libpretixsync.db.CheckInList; -import eu.pretix.libpretixsync.db.Item; -import eu.pretix.libpretixsync.db.Migrations; -import eu.pretix.libpretixsync.utils.JSONUtils; -import io.requery.BlockingEntityStore; -import io.requery.Persistable; -import io.requery.query.Tuple; -import io.requery.util.CloseableIterator; - -public class CheckInListSyncAdapter extends BaseConditionalSyncAdapter { - private Long subeventId; - - public CheckInListSyncAdapter(BlockingEntityStore store, FileStorage fileStorage, String eventSlug, PretixApi api, String syncCycleId, SyncManager.ProgressFeedback feedback, Long subeventId) { - super(store, fileStorage, eventSlug, api, syncCycleId, feedback); - this.subeventId = subeventId; - } - - protected String getUrl() { - String url = api.eventResourceUrl(eventSlug, getResourceName()); - url += "?exclude=checkin_count&exclude=position_count"; - if (this.subeventId != null && this.subeventId > 0L) { - url += "&subevent_match=" + this.subeventId; - } - return url; - } - - @Override - public void updateObject(CheckInList obj, JSONObject jsonobj) throws JSONException { - obj.setEvent_slug(eventSlug); - obj.setServer_id(jsonobj.getLong("id")); - obj.setSubevent_id(jsonobj.optLong("subevent")); - obj.setName(jsonobj.optString("name", "")); - obj.setInclude_pending(jsonobj.optBoolean("include_pending")); - obj.setAll_items(jsonobj.optBoolean("all_products")); - obj.setJson_data(jsonobj.toString()); - JSONArray itemsarr = jsonobj.getJSONArray("limit_products"); - List itemids = new ArrayList<>(); - for (int i = 0; i < itemsarr.length(); i++) { - itemids.add(itemsarr.getLong(i)); - } - if (!itemids.isEmpty()) { - List items = store.select(Item.class).where( - Item.SERVER_ID.in(itemids) - ).get().toList(); - for (Item item : items) { - if (!obj.getItems().contains(item)) { - obj.getItems().add(item); - } - } - obj.getItems().retainAll(items); - } else { - obj.getItems().clear(); - } - } - - - @Override - public CloseableIterator runBatch(List ids) { - return store.select(CheckInList.class) - .where(CheckInList.EVENT_SLUG.eq(eventSlug)) - .and(CheckInList.SERVER_ID.in(ids)) - .get().iterator(); - } - - @Override - CloseableIterator getKnownIDsIterator() { - return store.select(CheckInList.SERVER_ID) - .where(CheckInList.EVENT_SLUG.eq(eventSlug)) - .get().iterator(); - } - - @Override - String getResourceName() { - return "checkinlists"; - } - - @Override - Long getId(JSONObject obj) throws JSONException { - return obj.getLong("id"); - } - - @Override - public String getMeta() { - if (this.subeventId != null && this.subeventId > 0L) { - return "subevent=" + this.subeventId; - } else { - return super.getMeta(); - } - } - - @Override - Long getId(CheckInList obj) { - return obj.getServer_id(); - } - - @Override - CheckInList newEmptyObject() { - return new CheckInList(); - } - - public void standaloneRefreshFromJSON(JSONObject data) throws JSONException { - CheckInList obj = store.select(CheckInList.class) - .where(CheckInList.SERVER_ID.eq(data.getLong("id"))) - .get().firstOr(newEmptyObject()); - JSONObject old = null; - if (obj.getId() != null) { - old = obj.getJSON(); - } - // Store object - data.put("__libpretixsync_dbversion", Migrations.CURRENT_VERSION); - data.put("__libpretixsync_syncCycleId", syncCycleId); - if (old == null) { - updateObject(obj, data); - store.insert(obj); - } else { - if (!JSONUtils.similar(data, old)) { - updateObject(obj, data); - store.update(obj); - } - } - } -} diff --git a/libpretixsync/src/main/java/eu/pretix/libpretixsync/sync/CheckInListSyncAdapter.kt b/libpretixsync/src/main/java/eu/pretix/libpretixsync/sync/CheckInListSyncAdapter.kt new file mode 100644 index 00000000..195f2906 --- /dev/null +++ b/libpretixsync/src/main/java/eu/pretix/libpretixsync/sync/CheckInListSyncAdapter.kt @@ -0,0 +1,172 @@ +package eu.pretix.libpretixsync.sync + +import app.cash.sqldelight.TransactionWithoutReturn +import app.cash.sqldelight.db.QueryResult +import eu.pretix.libpretixsync.api.PretixApi +import eu.pretix.libpretixsync.db.Migrations +import eu.pretix.libpretixsync.sqldelight.CheckInList +import eu.pretix.libpretixsync.sqldelight.SyncDatabase +import eu.pretix.libpretixsync.sync.SyncManager.ProgressFeedback +import eu.pretix.libpretixsync.utils.JSONUtils +import org.json.JSONException +import org.json.JSONObject + +class CheckInListSyncAdapter( + db: SyncDatabase, + fileStorage: FileStorage, + eventSlug: String, + api: PretixApi, + syncCycleId: String, + feedback: ProgressFeedback?, + private val subeventId: Long?, +) : BaseConditionalSyncAdapter( + db = db, + fileStorage = fileStorage, + eventSlug = eventSlug, + api = api, + syncCycleId = syncCycleId, + feedback = feedback, +) { + override fun getResourceName(): String = "checkinlists" + + override fun getUrl(): String { + var url = api.eventResourceUrl(eventSlug, getResourceName()) + url += "?exclude=checkin_count&exclude=position_count" + if (this.subeventId != null && this.subeventId > 0L) { + url += "&subevent_match=" + this.subeventId + } + return url + } + + public override fun getMeta(): String { + return if (this.subeventId != null && this.subeventId > 0L) { + "subevent=" + this.subeventId + } else { + super.getMeta() + } + } + + override fun getId(obj: CheckInList): Long = obj.server_id!! + + override fun getId(obj: JSONObject): Long = obj.getLong("id") + + override fun getJSON(obj: CheckInList): JSONObject = JSONObject(obj.json_data!!) + + override fun queryKnownIDs(): MutableSet { + val res = mutableSetOf() + db.checkInListQueries.selectServerIdsByEventSlug(eventSlug).execute { cursor -> + while (cursor.next().value) { + val id = cursor.getLong(0) + ?: throw RuntimeException("server_id column not available") + + res.add(id) + } + QueryResult.Unit + } + + return res + } + + override fun insert(jsonobj: JSONObject) { + val listId = db.checkInListQueries.transactionWithResult { + db.checkInListQueries.insert( + all_items = jsonobj.optBoolean("all_products"), + event_slug = eventSlug, + include_pending = jsonobj.optBoolean("include_pending"), + json_data = jsonobj.toString(), + name = jsonobj.optString("name", ""), + server_id = jsonobj.getLong("id"), + subevent_id = jsonobj.optLong("subevent"), + ) + + db.compatQueries.getLastInsertedCheckInListId().executeAsOne() + } + + upsertItemRelations(listId, emptySet(), jsonobj) + } + + override fun update(obj: CheckInList, jsonobj: JSONObject) { + val existingRelations = db.checkInListQueries.selectRelationsForList(obj.id) + .executeAsList() + .map { + // Not-null assertion needed for SQLite + it.ItemId!! + } + .toSet() + + db.checkInListQueries.updateFromJson( + all_items = jsonobj.optBoolean("all_products"), + event_slug = eventSlug, + include_pending = jsonobj.optBoolean("include_pending"), + json_data = jsonobj.toString(), + name = jsonobj.optString("name", ""), + subevent_id = jsonobj.optLong("subevent"), + id = obj.id, + ) + + upsertItemRelations(obj.id, existingRelations, jsonobj) + } + + private fun upsertItemRelations(listId: Long, existingIds: Set, jsonobj: JSONObject) { + val itemsarr = jsonobj.getJSONArray("limit_products") + val itemids = ArrayList(itemsarr.length()) + for (i in 0 until itemsarr.length()) { + itemids.add(itemsarr.getLong(i)) + } + val newIds = if (itemids.isNotEmpty()) { + db.itemQueries.selectByServerIdListAndEventSlug( + server_id = itemids, + event_slug = eventSlug, + ).executeAsList().map { it.id }.toSet() + } else { + emptySet() + } + + for (newId in newIds - existingIds) { + db.checkInListQueries.insertItemRelation( + item_id = newId, + checkin_list_id = listId, + ) + } + for (oldId in existingIds - newIds) { + db.checkInListQueries.deleteItemRelation( + item_id = oldId, + checkin_list_id = listId, + ) + } + } + + override fun delete(key: Long) { + val list = db.checkInListQueries.selectByServerId(key).executeAsOne() + db.checkInListQueries.deleteItemRelationsForList(list.id) + db.checkInListQueries.deleteByServerId(key) + } + + override fun runInTransaction(body: TransactionWithoutReturn.() -> Unit) { + db.checkInListQueries.transaction(false, body) + } + + override fun runBatch(parameterBatch: List): List = + db.checkInListQueries.selectByServerIdListAndEventSlug( + server_id = parameterBatch, + event_slug = eventSlug, + ).executeAsList() + + @Throws(JSONException::class) + fun standaloneRefreshFromJSON(data: JSONObject) { + val known = db.checkInListQueries.selectByServerId(data.getLong("id")).executeAsOneOrNull() + + // Store object + data.put("__libpretixsync_dbversion", Migrations.CURRENT_VERSION) + data.put("__libpretixsync_syncCycleId", syncCycleId) + if (known == null) { + insert(data) + } else { + val old = JSONObject(known.json_data!!) + if (!JSONUtils.similar(data, old)) { + update(known, data) + } + } + } + +} diff --git a/libpretixsync/src/main/java/eu/pretix/libpretixsync/sync/EventSyncAdapter.java b/libpretixsync/src/main/java/eu/pretix/libpretixsync/sync/EventSyncAdapter.java deleted file mode 100644 index c3890385..00000000 --- a/libpretixsync/src/main/java/eu/pretix/libpretixsync/sync/EventSyncAdapter.java +++ /dev/null @@ -1,85 +0,0 @@ -package eu.pretix.libpretixsync.sync; - -import org.joda.time.format.ISODateTimeFormat; -import org.json.JSONException; -import org.json.JSONObject; - -import java.util.List; - -import eu.pretix.libpretixsync.api.PretixApi; -import eu.pretix.libpretixsync.db.Event; -import eu.pretix.libpretixsync.db.Migrations; -import eu.pretix.libpretixsync.utils.JSONUtils; -import io.requery.BlockingEntityStore; -import io.requery.Persistable; - -public class EventSyncAdapter extends BaseSingleObjectSyncAdapter { - public EventSyncAdapter(BlockingEntityStore store, String eventSlug, String key, PretixApi api, String syncCycleId, SyncManager.ProgressFeedback feedback) { - super(store, eventSlug, key, api, syncCycleId, feedback); - } - - @Override - public void updateObject(Event obj, JSONObject jsonobj) throws JSONException { - obj.setSlug(jsonobj.getString("slug")); - obj.setCurrency(jsonobj.getString("currency")); - obj.setDate_from(ISODateTimeFormat.dateTimeParser().parseDateTime(jsonobj.getString("date_from")).toDate()); - if (!jsonobj.isNull("date_to")) { - obj.setDate_to(ISODateTimeFormat.dateTimeParser().parseDateTime(jsonobj.getString("date_to")).toDate()); - } - obj.setLive(jsonobj.getBoolean("live")); - obj.setHas_subevents(jsonobj.getBoolean("has_subevents")); - obj.setJson_data(jsonobj.toString()); - } - - Event getKnownObject() { - List is = store.select(Event.class) - .where(Event.SLUG.eq(key)) - .get().toList(); - if (is.size() == 0) { - return null; - } else if (is.size() == 1) { - return is.get(0); - } else { - // What's going on here? Let's delete and re-fetch - store.delete(is); - return null; - } - } - - @Override - protected String getUrl() { - return api.organizerResourceUrl("events/" + key); - } - - @Override - String getResourceName() { - return "events"; - } - - @Override - Event newEmptyObject() { - return new Event(); - } - - public void standaloneRefreshFromJSON(JSONObject data) throws JSONException { - Event obj = store.select(Event.class) - .where(Event.SLUG.eq(data.getString("slug"))) - .get().firstOr(newEmptyObject()); - JSONObject old = null; - if (obj.getId() != null) { - old = obj.getJSON(); - } - // Store object - data.put("__libpretixsync_dbversion", Migrations.CURRENT_VERSION); - data.put("__libpretixsync_syncCycleId", syncCycleId); - if (old == null) { - updateObject(obj, data); - store.insert(obj); - } else { - if (!JSONUtils.similar(data, old)) { - updateObject(obj, data); - store.update(obj); - } - } - } -} diff --git a/libpretixsync/src/main/java/eu/pretix/libpretixsync/sync/EventSyncAdapter.kt b/libpretixsync/src/main/java/eu/pretix/libpretixsync/sync/EventSyncAdapter.kt new file mode 100644 index 00000000..cc0d29f3 --- /dev/null +++ b/libpretixsync/src/main/java/eu/pretix/libpretixsync/sync/EventSyncAdapter.kt @@ -0,0 +1,113 @@ +package eu.pretix.libpretixsync.sync + +import app.cash.sqldelight.TransactionWithoutReturn +import eu.pretix.libpretixsync.api.PretixApi +import eu.pretix.libpretixsync.db.Migrations +import eu.pretix.libpretixsync.sqldelight.Event +import eu.pretix.libpretixsync.sqldelight.SyncDatabase +import eu.pretix.libpretixsync.sync.SyncManager.ProgressFeedback +import eu.pretix.libpretixsync.utils.JSONUtils +import org.joda.time.format.ISODateTimeFormat +import org.json.JSONException +import org.json.JSONObject + +class EventSyncAdapter( + db: SyncDatabase, + eventSlug: String, + key: String, + api: PretixApi, + syncCycleId: String, + feedback: ProgressFeedback? = null, +) : BaseSingleObjectSyncAdapter( + db = db, + eventSlug = eventSlug, + key = key, + api = api, + syncCycleId = syncCycleId, + feedback = feedback, +) { + + override fun getKnownObject(): Event? { + val known = db.eventQueries.selectBySlug(eventSlug).executeAsList() + + return if (known.isEmpty()) { + null + } else if (known.size == 1) { + known[0] + } else { + // What's going on here? Let's delete and re-fetch + db.eventQueries.deleteBySlug(eventSlug) + null + } + } + + override fun getResourceName(): String = "events" + + override fun getUrl(): String = api.organizerResourceUrl("events/$key") + + override fun getJSON(obj: Event): JSONObject = JSONObject(obj.json_data!!) + + override fun insert(jsonobj: JSONObject) { + val dateFrom = + ISODateTimeFormat.dateTimeParser().parseDateTime(jsonobj.getString("date_from")) + .toDate() + + val dateTo = if (!jsonobj.isNull("date_to")) { + ISODateTimeFormat.dateTimeParser().parseDateTime(jsonobj.getString("date_to")).toDate() + } else { + null + } + + db.eventQueries.insert( + currency = jsonobj.getString("currency"), + date_to = dateTo, + date_from = dateFrom, + has_subevents = jsonobj.getBoolean("has_subevents"), + json_data = jsonobj.toString(), + live = jsonobj.getBoolean("live"), + slug = jsonobj.getString("slug"), + ) + } + + override fun update(obj: Event, jsonobj: JSONObject) { + val dateFrom = + ISODateTimeFormat.dateTimeParser().parseDateTime(jsonobj.getString("date_from")) + .toDate() + + val dateTo = if (!jsonobj.isNull("date_to")) { + ISODateTimeFormat.dateTimeParser().parseDateTime(jsonobj.getString("date_to")).toDate() + } else { + null + } + + db.eventQueries.updateFromJson( + currency = jsonobj.getString("currency"), + date_to = dateTo, + date_from = dateFrom, + has_subevents = jsonobj.getBoolean("has_subevents"), + json_data = jsonobj.toString(), + live = jsonobj.getBoolean("live"), + slug = obj.slug, + ) + } + + override fun runInTransaction(body: TransactionWithoutReturn.() -> Unit) { + db.eventQueries.transaction(false, body) + } + + @Throws(JSONException::class) + fun standaloneRefreshFromJSON(data: JSONObject) { + // Store object + data.put("__libpretixsync_dbversion", Migrations.CURRENT_VERSION) + data.put("__libpretixsync_syncCycleId", syncCycleId) + val known = getKnownObject() + if (known == null) { + insert(data) + } else { + val old = JSONObject(known.json_data!!) + if (!JSONUtils.similar(data, old)) { + update(known, data) + } + } + } +} diff --git a/libpretixsync/src/main/java/eu/pretix/libpretixsync/sync/InvoiceSettingsSyncAdapter.java b/libpretixsync/src/main/java/eu/pretix/libpretixsync/sync/InvoiceSettingsSyncAdapter.java deleted file mode 100644 index 5f9879a2..00000000 --- a/libpretixsync/src/main/java/eu/pretix/libpretixsync/sync/InvoiceSettingsSyncAdapter.java +++ /dev/null @@ -1,23 +0,0 @@ -package eu.pretix.libpretixsync.sync; - -import org.json.JSONException; -import org.json.JSONObject; - -import java.util.List; - -import eu.pretix.libpretixsync.api.PretixApi; -import eu.pretix.libpretixsync.db.Settings; -import io.requery.BlockingEntityStore; -import io.requery.Persistable; - -public class InvoiceSettingsSyncAdapter extends SettingsSyncAdapter { - - public InvoiceSettingsSyncAdapter(BlockingEntityStore store, String eventSlug, String key, PretixApi api, String syncCycleId, SyncManager.ProgressFeedback feedback) { - super(store, eventSlug, key, api, syncCycleId, feedback); - } - - @Override - protected String getUrl() { - return api.eventResourceUrl(eventSlug, "invoicesettings"); - } -} diff --git a/libpretixsync/src/main/java/eu/pretix/libpretixsync/sync/InvoiceSettingsSyncAdapter.kt b/libpretixsync/src/main/java/eu/pretix/libpretixsync/sync/InvoiceSettingsSyncAdapter.kt new file mode 100644 index 00000000..bb729f2c --- /dev/null +++ b/libpretixsync/src/main/java/eu/pretix/libpretixsync/sync/InvoiceSettingsSyncAdapter.kt @@ -0,0 +1,25 @@ +package eu.pretix.libpretixsync.sync + +import eu.pretix.libpretixsync.api.PretixApi +import eu.pretix.libpretixsync.sqldelight.SyncDatabase +import eu.pretix.libpretixsync.sync.SyncManager.ProgressFeedback + +class InvoiceSettingsSyncAdapter( + db: SyncDatabase, + eventSlug: String, + key: String, + api: PretixApi, + syncCycleId: String, + feedback: ProgressFeedback? = null, +) : SettingsSyncAdapter( + db = db, + eventSlug = eventSlug, + key = key, + api = api, + syncCycleId = syncCycleId, + feedback = feedback, +) { + override fun getUrl(): String { + return api.eventResourceUrl(eventSlug, "invoicesettings") + } +} diff --git a/libpretixsync/src/main/java/eu/pretix/libpretixsync/sync/ItemCategorySyncAdapter.java b/libpretixsync/src/main/java/eu/pretix/libpretixsync/sync/ItemCategorySyncAdapter.java deleted file mode 100644 index 0582c5b0..00000000 --- a/libpretixsync/src/main/java/eu/pretix/libpretixsync/sync/ItemCategorySyncAdapter.java +++ /dev/null @@ -1,65 +0,0 @@ -package eu.pretix.libpretixsync.sync; - -import org.json.JSONException; -import org.json.JSONObject; - -import java.util.Iterator; -import java.util.List; - -import eu.pretix.libpretixsync.api.PretixApi; -import eu.pretix.libpretixsync.db.ItemCategory; -import io.requery.BlockingEntityStore; -import io.requery.Persistable; -import io.requery.query.Tuple; -import io.requery.util.CloseableIterator; - -public class ItemCategorySyncAdapter extends BaseConditionalSyncAdapter { - - public ItemCategorySyncAdapter(BlockingEntityStore store, FileStorage fileStorage, String eventSlug, PretixApi api, String syncCycleId, SyncManager.ProgressFeedback feedback) { - super(store, fileStorage, eventSlug, api, syncCycleId, feedback); - } - - @Override - public void updateObject(ItemCategory obj, JSONObject jsonobj) throws JSONException { - obj.setEvent_slug(eventSlug); - obj.setServer_id(jsonobj.getLong("id")); - obj.setPosition(jsonobj.getLong("position")); - obj.setIs_addon(jsonobj.optBoolean("is_addon", false)); - obj.setJson_data(jsonobj.toString()); - } - - @Override - public CloseableIterator runBatch(List ids) { - return store.select(ItemCategory.class) - .where(ItemCategory.EVENT_SLUG.eq(eventSlug)) - .and(ItemCategory.SERVER_ID.in(ids)) - .get().iterator(); - } - - @Override - CloseableIterator getKnownIDsIterator() { - return store.select(ItemCategory.SERVER_ID) - .where(ItemCategory.EVENT_SLUG.eq(eventSlug)) - .get().iterator(); - } - - @Override - String getResourceName() { - return "categories"; - } - - @Override - Long getId(JSONObject obj) throws JSONException { - return obj.getLong("id"); - } - - @Override - Long getId(ItemCategory obj) { - return obj.getServer_id(); - } - - @Override - ItemCategory newEmptyObject() { - return new ItemCategory(); - } -} diff --git a/libpretixsync/src/main/java/eu/pretix/libpretixsync/sync/ItemCategorySyncAdapter.kt b/libpretixsync/src/main/java/eu/pretix/libpretixsync/sync/ItemCategorySyncAdapter.kt new file mode 100644 index 00000000..4bcae54a --- /dev/null +++ b/libpretixsync/src/main/java/eu/pretix/libpretixsync/sync/ItemCategorySyncAdapter.kt @@ -0,0 +1,84 @@ +package eu.pretix.libpretixsync.sync + +import app.cash.sqldelight.TransactionWithoutReturn +import app.cash.sqldelight.db.QueryResult +import eu.pretix.libpretixsync.api.PretixApi +import eu.pretix.libpretixsync.sqldelight.ItemCategory +import eu.pretix.libpretixsync.sqldelight.SyncDatabase +import eu.pretix.libpretixsync.sync.SyncManager.ProgressFeedback +import org.json.JSONObject + +class ItemCategorySyncAdapter( + db: SyncDatabase, + fileStorage: FileStorage, + eventSlug: String, + api: PretixApi, + syncCycleId: String, + feedback: ProgressFeedback?, +) : BaseConditionalSyncAdapter( + db = db, + fileStorage = fileStorage, + eventSlug = eventSlug, + api = api, + syncCycleId = syncCycleId, + feedback = feedback, +) { + + override fun getResourceName(): String = "categories" + + override fun getId(obj: ItemCategory): Long = obj.server_id!! + + override fun getId(obj: JSONObject): Long = obj.getLong("id") + + override fun getJSON(obj: ItemCategory): JSONObject = JSONObject(obj.json_data) + + override fun queryKnownIDs(): MutableSet { + val res = mutableSetOf() + db.itemCategoryQueries.selectServerIdsByEventSlug(eventSlug) + .execute { cursor -> + while (cursor.next().value) { + val id = cursor.getLong(0) + ?: throw RuntimeException("server_id column not available") + + res.add(id) + } + QueryResult.Unit + } + + return res + } + + override fun insert(jsonobj: JSONObject) { + db.itemCategoryQueries.insert( + event_slug = eventSlug, + is_addon = jsonobj.optBoolean("is_addon", false), + json_data = jsonobj.toString(), + position = jsonobj.getLong("position"), + server_id = jsonobj.getLong("id"), + ) + } + + override fun update(obj: ItemCategory, jsonobj: JSONObject) { + db.itemCategoryQueries.updateFromJson( + event_slug = eventSlug, + is_addon = jsonobj.optBoolean("is_addon", false), + json_data = jsonobj.toString(), + position = jsonobj.getLong("position"), + id = obj.id, + ) + } + + override fun delete(key: Long) { + db.itemCategoryQueries.deleteByServerId(key) + } + + override fun runInTransaction(body: TransactionWithoutReturn.() -> Unit) { + db.itemCategoryQueries.transaction(false, body) + } + + override fun runBatch(parameterBatch: List): List = + db.itemCategoryQueries.selectByServerIdListAndEventSlug( + server_id = parameterBatch, + event_slug = eventSlug, + ).executeAsList() +} diff --git a/libpretixsync/src/main/java/eu/pretix/libpretixsync/sync/ItemSyncAdapter.java b/libpretixsync/src/main/java/eu/pretix/libpretixsync/sync/ItemSyncAdapter.java deleted file mode 100644 index 782dfbef..00000000 --- a/libpretixsync/src/main/java/eu/pretix/libpretixsync/sync/ItemSyncAdapter.java +++ /dev/null @@ -1,137 +0,0 @@ -package eu.pretix.libpretixsync.sync; - -import org.json.JSONException; -import org.json.JSONObject; - -import java.io.IOException; -import java.io.InputStream; -import java.io.OutputStream; -import java.util.Iterator; -import java.util.List; - -import eu.pretix.libpretixsync.api.ApiException; -import eu.pretix.libpretixsync.api.PretixApi; -import eu.pretix.libpretixsync.db.Item; -import eu.pretix.libpretixsync.db.Migrations; -import eu.pretix.libpretixsync.utils.HashUtils; -import eu.pretix.libpretixsync.utils.JSONUtils; -import io.requery.BlockingEntityStore; -import io.requery.Persistable; -import io.requery.query.Tuple; -import io.requery.util.CloseableIterator; - -public class ItemSyncAdapter extends BaseConditionalSyncAdapter { - - public ItemSyncAdapter(BlockingEntityStore store, FileStorage fileStorage, String eventSlug, PretixApi api, String syncCycleId, SyncManager.ProgressFeedback feedback) { - super(store, fileStorage, eventSlug, api, syncCycleId, feedback); - } - - @Override - public void updateObject(Item obj, JSONObject jsonobj) throws JSONException { - obj.setEvent_slug(eventSlug); - obj.setServer_id(jsonobj.getLong("id")); - obj.setPosition(jsonobj.getLong("position")); - obj.setCategory_id(jsonobj.optLong("category")); - obj.setAdmission(jsonobj.optBoolean("admission", false)); - obj.setActive(jsonobj.optBoolean("active", true)); - obj.setCheckin_text(jsonobj.optString("checkin_text")); - obj.setJson_data(jsonobj.toString()); - - String remote_filename = jsonobj.optString("picture"); - if (remote_filename != null && remote_filename.startsWith("http")) { - String hash = HashUtils.toSHA1(remote_filename.getBytes()); - String local_filename = "item_" + obj.getServer_id() + "_" + hash + remote_filename.substring(remote_filename.lastIndexOf(".")); - if (obj.getPicture_filename() != null && !obj.getPicture_filename().equals(local_filename)) { - fileStorage.delete(obj.getPicture_filename()); - obj.setPicture_filename(null); - } - if (!fileStorage.contains(local_filename)) { - try { - PretixApi.ApiResponse file = api.downloadFile(remote_filename); - OutputStream os = fileStorage.writeStream(local_filename); - InputStream is = file.getResponse().body().byteStream(); - byte[] buffer = new byte[1444]; - int byteread; - while ((byteread = is.read(buffer)) != -1) { - os.write(buffer, 0, byteread); - } - is.close(); - os.close(); - obj.setPicture_filename(local_filename); - } catch (ApiException e) { - // TODO: What to do? - e.printStackTrace(); - } catch (IOException e) { - // TODO: What to do? - e.printStackTrace(); - fileStorage.delete(local_filename); - } - } else { - obj.setPicture_filename(local_filename); - } - } else { - if (obj.getPicture_filename() != null) { - fileStorage.delete(obj.getPicture_filename()); - obj.setPicture_filename(null); - } - } - } - - @Override - public CloseableIterator runBatch(List ids) { - return store.select(Item.class) - .where(Item.EVENT_SLUG.eq(eventSlug)) - .and(Item.SERVER_ID.in(ids)) - .get().iterator(); - } - - @Override - CloseableIterator getKnownIDsIterator() { - return store.select(Item.SERVER_ID) - .where(Item.EVENT_SLUG.eq(eventSlug)) - .get().iterator(); - } - - @Override - String getResourceName() { - return "items"; - } - - @Override - Long getId(JSONObject obj) throws JSONException { - return obj.getLong("id"); - } - - @Override - Long getId(Item obj) { - return obj.getServer_id(); - } - - @Override - Item newEmptyObject() { - return new Item(); - } - - public void standaloneRefreshFromJSON(JSONObject data) throws JSONException { - Item obj = store.select(Item.class) - .where(Item.SERVER_ID.eq(data.getLong("id"))) - .get().firstOr(newEmptyObject()); - JSONObject old = null; - if (obj.getId() != null) { - old = obj.getJSON(); - } - - // Store object - data.put("__libpretixsync_dbversion", Migrations.CURRENT_VERSION); - data.put("__libpretixsync_syncCycleId", syncCycleId); - if (old == null) { - updateObject(obj, data); - store.insert(obj); - } else { - if (!JSONUtils.similar(data, old)) { - updateObject(obj, data); - store.update(obj); - } - } - } -} diff --git a/libpretixsync/src/main/java/eu/pretix/libpretixsync/sync/ItemSyncAdapter.kt b/libpretixsync/src/main/java/eu/pretix/libpretixsync/sync/ItemSyncAdapter.kt new file mode 100644 index 00000000..4340fec7 --- /dev/null +++ b/libpretixsync/src/main/java/eu/pretix/libpretixsync/sync/ItemSyncAdapter.kt @@ -0,0 +1,176 @@ +package eu.pretix.libpretixsync.sync + +import app.cash.sqldelight.TransactionWithoutReturn +import app.cash.sqldelight.db.QueryResult +import eu.pretix.libpretixsync.api.ApiException +import eu.pretix.libpretixsync.api.PretixApi +import eu.pretix.libpretixsync.db.Migrations +import eu.pretix.libpretixsync.sqldelight.Item +import eu.pretix.libpretixsync.sqldelight.SyncDatabase +import eu.pretix.libpretixsync.sync.SyncManager.ProgressFeedback +import eu.pretix.libpretixsync.utils.HashUtils +import eu.pretix.libpretixsync.utils.JSONUtils +import org.json.JSONException +import org.json.JSONObject +import java.io.IOException +import java.io.OutputStream + +class ItemSyncAdapter( + db: SyncDatabase, + fileStorage: FileStorage, + eventSlug: String, + api: PretixApi, + syncCycleId: String, + feedback: ProgressFeedback?, +) : BaseConditionalSyncAdapter( + db = db, + fileStorage = fileStorage, + eventSlug = eventSlug, + api = api, + syncCycleId = syncCycleId, + feedback = feedback, +) { + + override fun getResourceName(): String = "items" + + override fun getId(obj: Item): Long = obj.server_id!! + + override fun getId(obj: JSONObject): Long = obj.getLong("id") + + override fun getJSON(obj: Item): JSONObject = JSONObject(obj.json_data) + + override fun queryKnownIDs(): MutableSet { + val res = mutableSetOf() + db.itemQueries.selectServerIdsByEventSlug(eventSlug).execute { cursor -> + while (cursor.next().value) { + val id = cursor.getLong(0) + ?: throw RuntimeException("server_id column not available") + + res.add(id) + } + QueryResult.Unit + } + + return res + } + + override fun insert(jsonobj: JSONObject) { + val pictureFilename = processPicture(jsonobj, null) + + db.itemQueries.insert( + active = jsonobj.optBoolean("active", true), + admission = jsonobj.optBoolean("admission", false), + category_id = jsonobj.optLong("category"), + checkin_text = jsonobj.optString("checkin_text"), + event_slug = eventSlug, + json_data = jsonobj.toString(), + picture_filename = pictureFilename, + position = jsonobj.getLong("position"), + server_id = jsonobj.getLong("id"), + ticket_layout_id = null, + ticket_layout_pretixpos_id = null, + ) + } + + override fun update(obj: Item, jsonobj: JSONObject) { + val pictureFilename = processPicture(jsonobj, obj.picture_filename) + + db.itemQueries.updateFromJson( + active = jsonobj.optBoolean("active", true), + admission = jsonobj.optBoolean("admission", false), + category_id = jsonobj.optLong("category"), + checkin_text = jsonobj.optString("checkin_text"), + event_slug = eventSlug, + json_data = jsonobj.toString(), + picture_filename = pictureFilename, + position = jsonobj.getLong("position"), + id = obj.id, + ) + } + + override fun delete(key: Long) { + db.itemQueries.deleteByServerId(key) + } + + private fun processPicture(jsonobj: JSONObject, oldFilename: String?): String? { + val remote_filename: String = jsonobj.optString("picture") + var result: String? = null; + + if (remote_filename.startsWith("http")) { + val hash = HashUtils.toSHA1(remote_filename.toByteArray()) + val local_filename = + "item_" + jsonobj.getLong("id") + "_" + hash + remote_filename.substring( + remote_filename.lastIndexOf(".") + ) + if (oldFilename != null && oldFilename != local_filename) { + fileStorage.delete(oldFilename) + result = null + } + if (!fileStorage.contains(local_filename)) { + try { + val file = api.downloadFile(remote_filename) + + val outStream: OutputStream = fileStorage.writeStream(local_filename) + val inStream = file.response.body?.byteStream() + + if (inStream == null) { + outStream.close() + throw IOException() + } + + val buffer = ByteArray(1444) + var byteread: Int + while (inStream.read(buffer).also { byteread = it } != -1) { + outStream.write(buffer, 0, byteread) + } + inStream.close() + outStream.close() + result = local_filename + } catch (e: ApiException) { + // TODO: What to do? + e.printStackTrace() + } catch (e: IOException) { + // TODO: What to do? + e.printStackTrace() + fileStorage.delete(local_filename) + } + } else { + result = local_filename + } + } else { + if (oldFilename != null) { + fileStorage.delete(oldFilename) + result = null + } + } + + return result + } + + override fun runInTransaction(body: TransactionWithoutReturn.() -> Unit) { + db.itemQueries.transaction(false, body) + } + + override fun runBatch(parameterBatch: List): List = + db.itemQueries.selectByServerIdListAndEventSlug( + server_id = parameterBatch, + event_slug = eventSlug, + ).executeAsList() + + @Throws(JSONException::class) + fun standaloneRefreshFromJSON(data: JSONObject) { + val obj = db.itemQueries.selectByServerId(data.getLong("id")).executeAsOneOrNull() + val old: JSONObject? = obj?.json_data?.let { JSONObject(it) } + + // Store object + data.put("__libpretixsync_dbversion", Migrations.CURRENT_VERSION) + data.put("__libpretixsync_syncCycleId", syncCycleId) + if (old == null) { + insert(data) + } else { + if (!JSONUtils.similar(data, old)) { + update(obj, data) + } + } + } +} diff --git a/libpretixsync/src/main/java/eu/pretix/libpretixsync/sync/MediumKeySetSyncAdapter.java b/libpretixsync/src/main/java/eu/pretix/libpretixsync/sync/MediumKeySetSyncAdapter.java deleted file mode 100644 index 55840e41..00000000 --- a/libpretixsync/src/main/java/eu/pretix/libpretixsync/sync/MediumKeySetSyncAdapter.java +++ /dev/null @@ -1,91 +0,0 @@ -package eu.pretix.libpretixsync.sync; - -import org.json.JSONArray; -import org.json.JSONException; -import org.json.JSONObject; - -import java.util.List; -import java.util.concurrent.ExecutionException; - -import eu.pretix.libpretixsync.api.ApiException; -import eu.pretix.libpretixsync.api.PretixApi; -import eu.pretix.libpretixsync.api.ResourceNotModified; -import eu.pretix.libpretixsync.db.MediumKeySet; -import io.requery.BlockingEntityStore; -import io.requery.Persistable; -import io.requery.query.Tuple; -import io.requery.util.CloseableIterator; -import kotlin.NotImplementedError; - -public class MediumKeySetSyncAdapter extends BaseDownloadSyncAdapter { - JSONArray data; - - public MediumKeySetSyncAdapter(BlockingEntityStore store, FileStorage fileStorage, PretixApi api, String syncCycleId, SyncManager.ProgressFeedback feedback, JSONArray data) { - super(store, fileStorage, "__all__", api, syncCycleId, feedback); - this.data = data; - } - - private String rlmName() { - return "mediumkeyset"; - } - - @Override - protected String getUrl() { - throw new NotImplementedError(); - } - - @Override - - protected void downloadData() throws JSONException, ApiException, ResourceNotModified, ExecutionException, InterruptedException { - asyncProcessPage(data).get(); - } - - @Override - public void updateObject(MediumKeySet obj, JSONObject jsonobj) throws JSONException { - obj.setPublic_id(jsonobj.getLong("public_id")); - obj.setMedia_type(jsonobj.getString("media_type")); - obj.setOrganizer(jsonobj.getString("organizer")); - obj.setActive(jsonobj.getBoolean("active")); - obj.setUid_key(jsonobj.getString("uid_key")); - obj.setDiversification_key(jsonobj.getString("diversification_key")); - obj.setJson_data(jsonobj.toString()); - } - - @Override - protected boolean deleteUnseen() { - return true; - } - - @Override - public CloseableIterator runBatch(List ids) { - return store.select(MediumKeySet.class) - .where(MediumKeySet.PUBLIC_ID.in(ids)) - .get().iterator(); - } - - @Override - CloseableIterator getKnownIDsIterator() { - return store.select(MediumKeySet.PUBLIC_ID) - .get().iterator(); - } - - @Override - String getResourceName() { - return "mediumkeyset"; - } - - @Override - Long getId(JSONObject obj) throws JSONException { - return obj.getLong("public_id"); - } - - @Override - Long getId(MediumKeySet obj) { - return obj.getPublic_id(); - } - - @Override - MediumKeySet newEmptyObject() { - return new MediumKeySet(); - } -} diff --git a/libpretixsync/src/main/java/eu/pretix/libpretixsync/sync/MediumKeySetSyncAdapter.kt b/libpretixsync/src/main/java/eu/pretix/libpretixsync/sync/MediumKeySetSyncAdapter.kt new file mode 100644 index 00000000..085e3480 --- /dev/null +++ b/libpretixsync/src/main/java/eu/pretix/libpretixsync/sync/MediumKeySetSyncAdapter.kt @@ -0,0 +1,108 @@ +package eu.pretix.libpretixsync.sync + +import app.cash.sqldelight.TransactionWithoutReturn +import app.cash.sqldelight.db.QueryResult +import eu.pretix.libpretixsync.api.ApiException +import eu.pretix.libpretixsync.api.PretixApi +import eu.pretix.libpretixsync.api.ResourceNotModified +import eu.pretix.libpretixsync.sqldelight.MediumKeySet +import eu.pretix.libpretixsync.sqldelight.SyncDatabase +import eu.pretix.libpretixsync.sync.SyncManager.ProgressFeedback +import org.json.JSONArray +import org.json.JSONException +import org.json.JSONObject +import java.util.concurrent.ExecutionException + +class MediumKeySetSyncAdapter( + db: SyncDatabase, + fileStorage: FileStorage, + api: PretixApi, + syncCycleId: String, + feedback: ProgressFeedback?, + private var data: JSONArray, +) : BaseDownloadSyncAdapter( + db = db, + api = api, + syncCycleId = syncCycleId, + eventSlug = "__all__", + fileStorage = fileStorage, + feedback = feedback, +) { + + override fun getResourceName(): String = "mediumkeyset" + + // TODO: Seems unused? + private fun rlmName(): String = "mediumkeyset" + + override fun getId(obj: MediumKeySet): Long = obj.public_id!! + + override fun getId(obj: JSONObject): Long = obj.getLong("public_id") + + override fun getJSON(obj: MediumKeySet): JSONObject = JSONObject(obj.json_data!!) + + override fun queryKnownIDs(): MutableSet { + val res = mutableSetOf() + db.mediumKeySetQueries.selectPublicIds().execute { cursor -> + while (cursor.next().value) { + val id = cursor.getLong(0) + ?: throw RuntimeException("public_id column not available") + + res.add(id) + } + QueryResult.Unit + } + + return res + } + + override fun insert(jsonobj: JSONObject) { + db.mediumKeySetQueries.insert( + active = jsonobj.getBoolean("active"), + diversification_key = jsonobj.getString("diversification_key"), + json_data = jsonobj.toString(), + media_type = jsonobj.getString("media_type"), + organizer = jsonobj.getString("organizer"), + public_id = jsonobj.getLong("public_id"), + uid_key = jsonobj.getString("uid_key") + ) + } + + override fun update(obj: MediumKeySet, jsonobj: JSONObject) { + db.mediumKeySetQueries.updateFromJson( + active = jsonobj.getBoolean("active"), + diversification_key = jsonobj.getString("diversification_key"), + json_data = jsonobj.toString(), + media_type = jsonobj.getString("media_type"), + organizer = jsonobj.getString("organizer"), + public_id = jsonobj.getLong("public_id"), + uid_key = jsonobj.getString("uid_key"), + id = obj.id, + ) + } + + override fun delete(key: Long) { + db.mediumKeySetQueries.deleteByPublicId(key) + } + + override fun deleteUnseen(): Boolean { + return true + } + + override fun runInTransaction(body: TransactionWithoutReturn.() -> Unit) { + db.mediumKeySetQueries.transaction(false, body) + } + + override fun runBatch(parameterBatch: List): List = + db.mediumKeySetQueries.selectByPublicIdList(parameterBatch).executeAsList() + + @Throws( + JSONException::class, + ApiException::class, + ResourceNotModified::class, + ExecutionException::class, + InterruptedException::class + ) + override fun downloadData() { + asyncProcessPage(data).get() + } +} diff --git a/libpretixsync/src/main/java/eu/pretix/libpretixsync/sync/OrderCleanup.kt b/libpretixsync/src/main/java/eu/pretix/libpretixsync/sync/OrderCleanup.kt index ae794ae8..0340766a 100644 --- a/libpretixsync/src/main/java/eu/pretix/libpretixsync/sync/OrderCleanup.kt +++ b/libpretixsync/src/main/java/eu/pretix/libpretixsync/sync/OrderCleanup.kt @@ -2,25 +2,22 @@ package eu.pretix.libpretixsync.sync import eu.pretix.libpretixsync.api.ApiException import eu.pretix.libpretixsync.api.PretixApi -import eu.pretix.libpretixsync.db.* +import eu.pretix.libpretixsync.models.db.toModel +import eu.pretix.libpretixsync.sqldelight.SyncDatabase import eu.pretix.libpretixsync.sync.SyncManager.ProgressFeedback -import io.requery.BlockingEntityStore -import io.requery.Persistable import io.requery.RollbackException -import io.requery.query.Tuple -import org.joda.time.DateTime -import org.joda.time.Duration import org.json.JSONException +import java.time.Duration import kotlin.math.max -class OrderCleanup(val store: BlockingEntityStore, val fileStorage: FileStorage, val api: PretixApi, val syncCycleId: String, val feedback: ProgressFeedback?) { +class OrderCleanup(val db: SyncDatabase, val fileStorage: FileStorage, val api: PretixApi, val syncCycleId: String, val feedback: ProgressFeedback?) { private var subeventsDeletionDate: MutableMap = HashMap() private fun deletionTimeForSubevent(sid: Long, eventSlug: String): Long? { if (subeventsDeletionDate.containsKey(sid)) { return subeventsDeletionDate[sid] } try { - SubEventSyncAdapter(store, eventSlug, sid.toString(), api, syncCycleId) { }.download() + SubEventSyncAdapter(db, eventSlug, sid.toString(), api, syncCycleId) { }.download() } catch (e: RollbackException) { subeventsDeletionDate[sid] = null return null @@ -31,13 +28,13 @@ class OrderCleanup(val store: BlockingEntityStore, val fileStorage: subeventsDeletionDate[sid] = null return null } - val se = store.select(SubEvent::class.java).where(SubEvent.SERVER_ID.eq(sid)).get().firstOrNull() + val se = db.subEventQueries.selectByServerId(sid).executeAsOneOrNull()?.toModel() if (se == null) { subeventsDeletionDate[sid] = null return null } - val d = DateTime(if (se.getDate_to() != null) se.getDate_to() else se.getDate_from()) - val v = d.plus(Duration.standardDays(14)).millis + val d = se.dateTo ?: se.dateFrom + val v = d.plus(Duration.ofDays(14)).toInstant().toEpochMilli() subeventsDeletionDate[sid] = v return v } @@ -60,28 +57,24 @@ class OrderCleanup(val store: BlockingEntityStore, val fileStorage: // Further above, in updateObject(), we *always* reset the deletion date to 0 for anything // that's in the diff. This way, we can be sure to "un-delete" orders when they are changed // -- or when the subevent date is changed, which triggers all orders to be in the diff. - val ordercount: Int = store.count(Order::class.java) - .where(Order.EVENT_SLUG.eq(eventSlug)) - .and(Order.DELETE_AFTER_TIMESTAMP.isNull().or(Order.DELETE_AFTER_TIMESTAMP.lt(1L))) - .get().value() + val ordercount = db.orderCleanupQueries.countOrders( + event_slug = eventSlug, + ).executeAsOne() var done = 0 feedback?.postFeedback("Checking for old orders ($done/$ordercount) [$eventSlug] …") while (true) { - val orders: List = store.select(Order::class.java) - .where(Order.EVENT_SLUG.eq(eventSlug)) - .and(Order.DELETE_AFTER_TIMESTAMP.isNull().or(Order.DELETE_AFTER_TIMESTAMP.lt(1L))) - .limit(100) - .get().toList() - if (orders.isEmpty()) { + val orderIds = db.orderCleanupQueries.selectOrderIds(event_slug = eventSlug).executeAsList() + if (orderIds.isEmpty()) { break } - for (o in orders) { + for (orderId in orderIds) { var deltime: Long? = null try { - val subeventIds = store.select(OrderPosition.SUBEVENT_ID) - .from(OrderPosition::class.java) - .where(OrderPosition.ORDER_ID.eq(o.id)) - .get().toList().map { it.get(0) as Long? }.distinct() + val subeventIds = db.orderCleanupQueries.selectSubEventIdsForOrder(orderId) + .executeAsList() + .map { it.subevent_id } + .distinct() + if (subeventIds.isEmpty()) { deltime = System.currentTimeMillis() } @@ -105,8 +98,10 @@ class OrderCleanup(val store: BlockingEntityStore, val fileStorage: if (deltime == null) { continue } - o.setDeleteAfterTimestamp(deltime) - store.update(o) + db.orderCleanupQueries.updateDeleteAfterTimestamp( + delete_after_timestamp = deltime, + id = orderId, + ) done++ if (done % 50 == 0) { feedback?.postFeedback("Checking for old orders ($done/$ordercount) …") @@ -114,18 +109,33 @@ class OrderCleanup(val store: BlockingEntityStore, val fileStorage: } } feedback?.postFeedback("Deleting old orders…") - var deleted = 0 + var deleted = 0L while (true) { - val ordersToDelete: List = store.select(Order.ID).where(Order.DELETE_AFTER_TIMESTAMP.lt(System.currentTimeMillis()).and(Order.DELETE_AFTER_TIMESTAMP.gt(1L))).and(Order.ID.notIn(store.select(OrderPosition.ORDER_ID).from(OrderPosition::class.java).where(OrderPosition.SUBEVENT_ID.eq(subeventId)))).limit(200).get().toList() + // TODO: Why NOT IN? + val ordersToDelete = db.orderCleanupQueries.selectOrderIdsToDelete( + current_timestamp = System.currentTimeMillis(), + sub_event_id = subeventId, + ).executeAsList() + if (ordersToDelete.isEmpty()) { break } val idsToDelete: MutableList = ArrayList() - for (t in ordersToDelete) { - idsToDelete.add(t.get(0)) + for (id in ordersToDelete) { + idsToDelete.add(id) + } + + // Count affected rows manually, since there is no convenient way + // to do this with one query that works on SQLite and Postgres + val count = db.orderCleanupQueries.transactionWithResult { + val count = db.orderCleanupQueries.countOrdersByIdList(idsToDelete).executeAsOne() + // sqlite foreign keys are created with `on delete cascade`, + // so order positions and checkins are handled automatically + db.orderCleanupQueries.deleteOrders(idsToDelete) + count } - // sqlite foreign keys are created with `on delete cascade`, so order positions and checkins are handled automatically - deleted += store.delete(Order::class.java).where(Order.ID.`in`(idsToDelete)).get().value() + + deleted += count feedback?.postFeedback("Deleting old orders ($deleted)…") } } @@ -135,10 +145,10 @@ class OrderCleanup(val store: BlockingEntityStore, val fileStorage: if (eventsDeletionDate.containsKey(slug)) { return eventsDeletionDate[slug] } - val e: Event = store.select(Event::class.java).where(Event.SLUG.eq(slug)).get().firstOrNull() - ?: return null - val d = DateTime(if (e.getDate_to() != null) e.getDate_to() else e.getDate_from()) - val v = d.plus(Duration.standardDays(14)).millis + val e = db.eventQueries.selectBySlug(slug).executeAsOneOrNull()?.toModel() ?: return null + val d = e.dateTo ?: e.dateFrom + val v = d.plus(Duration.ofDays(14)).toInstant().toEpochMilli() + eventsDeletionDate[slug] = v return v } @@ -147,29 +157,35 @@ class OrderCleanup(val store: BlockingEntityStore, val fileStorage: if (keepSlugs.isEmpty()) return feedback?.postFeedback("Deleting orders of old events…") - val tuples: List = store.select(Order.EVENT_SLUG) - .from(Order::class.java) - .where(Order.EVENT_SLUG.notIn(keepSlugs)) - .groupBy(Order.EVENT_SLUG) - .orderBy(Order.EVENT_SLUG) - .get().toList() - var deleted = 0 - for (t in tuples) { - val slug = t.get(0) + val slugs = db.orderCleanupQueries.selectOldEventSlugs(keepSlugs) + .executeAsList() + .map { it.event_slug!! } + + var deleted = 0L + for (slug in slugs) { val deletionDate = deletionTimeForEvent(slug) if (deletionDate == null || deletionDate < System.currentTimeMillis()) { - store.delete(ResourceSyncStatus::class.java).where(ResourceSyncStatus.RESOURCE.like("order%")).and(ResourceSyncStatus.EVENT_SLUG.eq(slug)) + db.resourceSyncStatusQueries.deleteByResourceFilterAndEventSlug( + filter = "order%", + event_slug = slug, + ) while (true) { - val ordersToDelete: List = store.select(Order.ID).where(Order.EVENT_SLUG.eq(slug)).limit(200).get().toList() - if (ordersToDelete.isEmpty()) { + val idsToDelete = db.orderCleanupQueries.selectOrderIdsForOldEvent(slug) + .executeAsList() + if (idsToDelete.isEmpty()) { break } - val idsToDelete: MutableList = ArrayList() - for (t2 in ordersToDelete) { - idsToDelete.add(t2.get(0)) + + // Count affected rows manually, since there is no convenient way + // to do this with one query that works on SQLite and Postgres + val count = db.orderCleanupQueries.transactionWithResult { + val count = db.orderCleanupQueries.countOrdersByIdList(idsToDelete).executeAsOne() + // sqlite foreign keys are created with `on delete cascade`, + // so order positions and checkins are handled automatically + db.orderCleanupQueries.deleteOrders(idsToDelete) + count } - // sqlite foreign keys are created with `on delete cascade`, so order positions and checkins are handled automatically - deleted += store.delete(Order::class.java).where(Order.ID.`in`(idsToDelete)).get().value() + deleted += count feedback?.postFeedback("Deleting orders of old events ($deleted)…") } } @@ -177,15 +193,13 @@ class OrderCleanup(val store: BlockingEntityStore, val fileStorage: } fun deleteOldPdfImages() { - store.delete(CachedPdfImage::class.java).where( - CachedPdfImage.ORDERPOSITION_ID.notIn(store.select(OrderPosition.SERVER_ID).from(OrderPosition::class.java)) - ) + db.cachedPdfImageQueries.deleteOld() for (filename in fileStorage.listFiles { _, s -> s.startsWith("pdfimage_") }) { val namebase = filename.split("\\.".toRegex()).toTypedArray()[0] val etag = namebase.split("_".toRegex()).toTypedArray()[1] - if (store.count(CachedPdfImage::class.java).where(CachedPdfImage.ETAG.eq(etag)).get().value() == 0) { + if (db.cachedPdfImageQueries.countEtag(etag).executeAsOne() == 0L) { fileStorage.delete(filename) } } } -} \ No newline at end of file +} diff --git a/libpretixsync/src/main/java/eu/pretix/libpretixsync/sync/OrderSyncAdapter.java b/libpretixsync/src/main/java/eu/pretix/libpretixsync/sync/OrderSyncAdapter.java deleted file mode 100644 index 646ce995..00000000 --- a/libpretixsync/src/main/java/eu/pretix/libpretixsync/sync/OrderSyncAdapter.java +++ /dev/null @@ -1,574 +0,0 @@ -package eu.pretix.libpretixsync.sync; - -import org.joda.time.DateTime; -import org.joda.time.DateTimeZone; -import org.joda.time.Duration; -import org.joda.time.format.DateTimeFormat; -import org.joda.time.format.DateTimeFormatter; -import org.joda.time.format.ISODateTimeFormat; -import org.json.JSONArray; -import org.json.JSONException; -import org.json.JSONObject; - -import java.io.File; -import java.io.FilenameFilter; -import java.io.IOException; -import java.io.InputStream; -import java.io.OutputStream; -import java.io.UnsupportedEncodingException; -import java.net.URLEncoder; -import java.util.ArrayList; -import java.util.Date; -import java.util.HashMap; -import java.util.HashSet; -import java.util.Iterator; -import java.util.List; -import java.util.Map; -import java.util.Set; -import java.util.concurrent.ExecutionException; - -import eu.pretix.libpretixsync.api.ApiException; -import eu.pretix.libpretixsync.api.PretixApi; -import eu.pretix.libpretixsync.api.ResourceNotModified; -import eu.pretix.libpretixsync.db.CachedPdfImage; -import eu.pretix.libpretixsync.db.CheckIn; -import eu.pretix.libpretixsync.db.CheckInList; -import eu.pretix.libpretixsync.db.Event; -import eu.pretix.libpretixsync.db.Item; -import eu.pretix.libpretixsync.db.Migrations; -import eu.pretix.libpretixsync.db.Order; -import eu.pretix.libpretixsync.db.OrderPosition; -import eu.pretix.libpretixsync.db.ResourceSyncStatus; -import eu.pretix.libpretixsync.db.SubEvent; -import eu.pretix.libpretixsync.utils.HashUtils; -import eu.pretix.libpretixsync.utils.JSONUtils; -import io.requery.BlockingEntityStore; -import io.requery.Persistable; -import io.requery.RollbackException; -import io.requery.query.Scalar; -import io.requery.query.Tuple; -import io.requery.util.CloseableIterator; -import kotlin.reflect.jvm.internal.impl.util.Check; - -public class OrderSyncAdapter extends BaseDownloadSyncAdapter { - public OrderSyncAdapter(BlockingEntityStore store, FileStorage fileStorage, String eventSlug, Long subeventId, boolean withPdfData, boolean is_pretixpos, PretixApi api, String syncCylceId, SyncManager.ProgressFeedback feedback) { - super(store, fileStorage, eventSlug, api, syncCylceId, feedback); - this.withPdfData = withPdfData; - this.subeventId = subeventId; - this.is_pretixpos = is_pretixpos; - } - - private Map itemCache = new HashMap<>(); - private Map listCache = new HashMap<>(); - private Map> checkinCache = new HashMap<>(); - private List checkinCreateCache = new ArrayList<>(); - private String firstResponseTimestamp; - private String lastOrderTimestamp; - private ResourceSyncStatus rlm; - private boolean withPdfData; - private boolean is_pretixpos; - private Long subeventId; - - private String rlmName() { - if (withPdfData) { - return "orders_withpdfdata"; - } else { - return "orders"; - } - } - - @Override - public void download() throws JSONException, ApiException, ExecutionException, InterruptedException { - boolean completed = false; - try { - super.download(); - completed = true; - } finally { - ResourceSyncStatus resourceSyncStatus = store.select(ResourceSyncStatus.class) - .where(ResourceSyncStatus.RESOURCE.eq(rlmName())) - .and(ResourceSyncStatus.EVENT_SLUG.eq(eventSlug)) - .limit(1) - .get().firstOrNull(); - - // We need to cache the response timestamp of the *first* page in the result set to make - // sure we don't miss anything between this and the next run. - // - // If the download failed, completed will be false. In case this was a full fetch - // (i.e. no timestamp was stored beforehand) we will still store the timestamp to be - // able to continue properly. - if (firstResponseTimestamp != null) { - if (resourceSyncStatus == null) { - resourceSyncStatus = new ResourceSyncStatus(); - resourceSyncStatus.setResource(rlmName()); - resourceSyncStatus.setEvent_slug(eventSlug); - if (completed) { - resourceSyncStatus.setStatus("complete"); - } else { - resourceSyncStatus.setStatus("incomplete:" + lastOrderTimestamp); - } - resourceSyncStatus.setLast_modified(firstResponseTimestamp); - store.upsert(resourceSyncStatus); - } else { - if (completed) { - resourceSyncStatus.setLast_modified(firstResponseTimestamp); - store.upsert(resourceSyncStatus); - } - } - } else if (completed && resourceSyncStatus != null) { - resourceSyncStatus.setStatus("complete"); - store.update(resourceSyncStatus); - } else if (!completed && lastOrderTimestamp != null && resourceSyncStatus != null) { - resourceSyncStatus.setStatus("incomplete:" + lastOrderTimestamp); - store.update(resourceSyncStatus); - } - lastOrderTimestamp = null; - firstResponseTimestamp = null; - } - } - - private Item getItem(long id) { - if (itemCache.size() == 0) { - List items = store - .select(Item.class) - .get().toList(); - for (Item item : items) { - itemCache.put(item.getServer_id(), item); - } - } - return itemCache.get(id); - } - - private void updatePositionObject(OrderPosition obj, JSONObject jsonobj, JSONObject jsonorder, JSONObject parent) throws JSONException { - obj.setServer_id(jsonobj.getLong("id")); - obj.setPositionid(jsonobj.getLong("positionid")); - obj.setAttendee_name(jsonobj.isNull("attendee_name") ? "" : jsonobj.optString("attendee_name")); - obj.setAttendee_email(jsonobj.isNull("attendee_email") ? "" : jsonobj.optString("attendee_email")); - obj.setSecret(jsonobj.optString("secret")); - obj.setJson_data(jsonobj.toString()); - obj.setItem(getItem(jsonobj.getLong("item"))); - obj.setSubevent_id(jsonobj.optLong("subevent")); - obj.setVariation_id(jsonobj.optLong("variation")); - - if (obj.getAttendee_name() == null && parent != null && !parent.isNull("attendee_name")) { - obj.setAttendee_name(parent.getString("attendee_name")); - } - if (obj.getAttendee_email() == null && parent != null && !parent.isNull("attendee_email")) { - obj.setAttendee_email(parent.getString("attendee_email")); - } - - if (obj.getAttendee_name() == null) { - try { - JSONObject jInvoiceAddress = jsonorder.getJSONObject("invoice_address"); - if (jInvoiceAddress.isNull("name")) { - obj.setAttendee_name(jInvoiceAddress.getString("name")); - } - } catch (JSONException e) { - e.printStackTrace(); - } - } - - if (obj.getId() == null) { - store.insert(obj); - } - - Map known = new HashMap<>(); - List checkincache = checkinCache.get(obj.getId()); - if (checkincache != null) { - for (CheckIn op : checkincache) { - if (op.getServer_id() != null && op.getServer_id() > 0) { - known.put(op.getServer_id(), op); - } else { - store.delete(op); - } - } - } - JSONArray checkins = jsonobj.getJSONArray("checkins"); - for (int i = 0; i < checkins.length(); i++) { - JSONObject ci = checkins.getJSONObject(i); - Long listid = ci.getLong("list"); - if (known.containsKey(listid)) { - CheckIn ciobj = known.remove(listid); - ciobj.setPosition(obj); - ciobj.setType(ci.optString("type", "entry")); - ciobj.setListId(listid); - ciobj.setDatetime(ISODateTimeFormat.dateTimeParser().parseDateTime(ci.getString("datetime")).toDate()); - ciobj.setJson_data(ci.toString()); - store.update(ciobj); - } else { - CheckIn ciobj = new CheckIn(); - ciobj.setPosition(obj); - ciobj.setType(ci.optString("type", "entry")); - ciobj.setListId(listid); - ciobj.setDatetime(ISODateTimeFormat.dateTimeParser().parseDateTime(ci.getString("datetime")).toDate()); - ciobj.setJson_data(ci.toString()); - ciobj.setServer_id(ci.optLong("id")); - checkinCreateCache.add(ciobj); - } - } - if (known.size() > 0) { - store.delete(known.values()); - } - - - // Images - if (jsonobj.has("pdf_data")) { - JSONObject pdfdata = jsonobj.getJSONObject("pdf_data"); - if (pdfdata.has("images")) { - JSONObject images = pdfdata.getJSONObject("images"); - updatePdfImages(store, fileStorage, api, obj.getServer_id(), images); - } - } - } - - public static void updatePdfImages(BlockingEntityStore store, FileStorage fileStorage, PretixApi api, Long serverId, JSONObject images) { - Set seen_etags = new HashSet<>(); - for (Iterator it = images.keys(); it.hasNext(); ) { - String k = (String) it.next(); - String remote_filename = images.optString(k); - if (remote_filename == null || !remote_filename.startsWith("http")) { - continue; - } - String etag = HashUtils.toSHA1(remote_filename.getBytes()); - if (remote_filename.contains("#etag=")) { - etag = remote_filename.split("#etag=")[1]; - } - String local_filename = "pdfimage_" + etag + ".bin"; - seen_etags.add(etag); - - if (!fileStorage.contains(local_filename)) { - try { - PretixApi.ApiResponse file = api.downloadFile(remote_filename); - OutputStream os = fileStorage.writeStream(local_filename); - InputStream is = file.getResponse().body().byteStream(); - byte[] buffer = new byte[1444]; - int byteread; - while ((byteread = is.read(buffer)) != -1) { - os.write(buffer, 0, byteread); - } - is.close(); - os.close(); - } catch (ApiException e) { - // TODO: What to do? - e.printStackTrace(); - } catch (IOException e) { - // TODO: What to do? - e.printStackTrace(); - fileStorage.delete(local_filename); - } - } - CachedPdfImage cpi = store.select(CachedPdfImage.class).where(CachedPdfImage.ORDERPOSITION_ID.eq(serverId)).and(CachedPdfImage.KEY.eq(k)).get().firstOrNull(); - if (cpi == null) { - cpi = new CachedPdfImage(); - cpi.setEtag(etag); - cpi.setKey(k); - cpi.setOrderposition_id(serverId); - store.insert(cpi); - } else { - cpi.setEtag(etag); - store.update(cpi); - } - } - - store.delete(CachedPdfImage.class).where( - CachedPdfImage.ORDERPOSITION_ID.eq(serverId).and( - CachedPdfImage.ETAG.notIn(seen_etags) - ) - ); - } - - @Override - protected void afterPage() { - super.afterPage(); - store.insert(checkinCreateCache); - checkinCreateCache.clear(); - } - - @Override - public void updateObject(Order obj, JSONObject jsonobj) throws JSONException { - obj.setEvent_slug(eventSlug); - obj.setCode(jsonobj.getString("code")); - obj.setStatus(jsonobj.getString("status")); - obj.setEmail(jsonobj.optString("email")); - obj.setCheckin_attention(jsonobj.optBoolean("checkin_attention")); - obj.setCheckin_text(jsonobj.optString("checkin_text")); - obj.setValid_if_pending(jsonobj.optBoolean("valid_if_pending", false)); - JSONObject json_data = new JSONObject(jsonobj.toString()); - json_data.remove("positions"); - obj.setJson_data(json_data.toString()); - obj.setDeleteAfterTimestamp(0L); - - if (obj.getId() == null) { - store.insert(obj); - } - - Map known = new HashMap<>(); - List allPos = store.select(OrderPosition.class) - .leftJoin(Order.class).on(Order.ID.eq(OrderPosition.ORDER_ID)) - .where(OrderPosition.ORDER_ID.eq(obj.getId())).get().toList(); - for (OrderPosition op : allPos) { - known.put(op.getServer_id(), op); - } - - JSONArray posarray = jsonobj.getJSONArray("positions"); - Map posmap = new HashMap<>(); - for (int i = 0; i < posarray.length(); i++) { - JSONObject posjson = posarray.getJSONObject(i); - posmap.put(posjson.getLong("id"), posjson); - } - for (int i = 0; i < posarray.length(); i++) { - JSONObject posjson = posarray.getJSONObject(i); - posjson.put("__libpretixsync_dbversion", Migrations.CURRENT_VERSION); - posjson.put("__libpretixsync_syncCycleId", syncCycleId); - Long jsonid = posjson.getLong("id"); - JSONObject old = null; - OrderPosition posobj; - if (known.containsKey(jsonid)) { - posobj = known.get(jsonid); - old = posobj.getJSON(); - } else { - posobj = new OrderPosition(); - posobj.setOrder(obj); - } - JSONObject parent = null; - if (!posjson.isNull("addon_to")) { - parent = posmap.get(posjson.getLong("addon_to")); - } - if (known.containsKey(jsonid)) { - known.remove(jsonid); - if (!JSONUtils.similar(posjson, old)) { - updatePositionObject(posobj, posjson, jsonobj, parent); - store.update(posobj); - } - } else { - updatePositionObject(posobj, posjson, jsonobj, parent); - } - } - if (known.size() > 0) { - store.delete(known.values()); - } - } - - @Override - protected boolean deleteUnseen() { - return false; - } - - @Override - protected JSONObject downloadPage(String url, boolean isFirstPage) throws ApiException, ResourceNotModified { - if (isFirstPage) { - rlm = store.select(ResourceSyncStatus.class) - .where(ResourceSyncStatus.RESOURCE.eq(rlmName())) - .and(ResourceSyncStatus.EVENT_SLUG.eq(eventSlug)) - .limit(1) - .get().firstOrNull(); - } - boolean is_continued_fetch = false; - if (!url.contains("testmode=")) { - if (url.contains("?")) { - url += "&"; - } else { - url += "?"; - } - url += "testmode=false&exclude=downloads&exclude=payment_date&exclude=payment_provider&exclude=fees&exclude=positions.downloads"; - if (!is_pretixpos) { - url += "&exclude=payments&exclude=refunds"; - } - if (withPdfData) { - url += "&pdf_data=true"; - } - } - - DateTimeFormatter formatter = DateTimeFormat.forPattern("yyyy-MM-dd'T'HH:mm:ssZ"); - DateTime cutoff = new DateTime().withZone(DateTimeZone.UTC).minus(Duration.standardDays(14)); - String firstrun_params = ""; - try { - if (subeventId != null && subeventId > 0) { - firstrun_params = "&subevent_after=" + URLEncoder.encode(formatter.print(cutoff), "UTF-8"); - } - } catch (UnsupportedEncodingException e) { - e.printStackTrace(); - } - // On event series, we ignore orders that only affect subevents more than 14 days old. - // However, we can only do that on the first run, since we'd otherwise miss if e.g. an order - // that we have in our current database is changed to a date outside that time frame. - - if (rlm != null) { - // This resource has been fetched before. - if (rlm.getStatus() != null && rlm.getStatus().startsWith("incomplete:")) { - // Continuing an interrupted fetch - - // Ordering is crucial here: Only because the server returns the orders in the - // order of creation we can be sure that we don't miss orders created in between our - // paginated requests. - is_continued_fetch = true; - try { - if (!url.contains("created_since")) { - url += "&ordering=datetime&created_since=" + URLEncoder.encode(rlm.getStatus().substring(11), "UTF-8") + firstrun_params; - } - } catch (UnsupportedEncodingException e) { - e.printStackTrace(); - } - } else { - // Diff to last time - - // Ordering is crucial here: Only because the server returns the orders in the - // order of modification we can be sure that we don't miss orders created in between our - // paginated requests. If an order were to be modified between our fetch of page 1 - // and 2 that originally wasn't part of the result set, we won't see it (as it will - // be inserted on page 1), but we'll see it the next time, and we will se some - // duplicates on page 2, but we don't care. The important part is that nothing gets - // lost "between the pages". If an order of page 2 gets modified and moves to page - // one while we fetch page 2, again, we won't see it and we'll see some duplicates, - // but the next sync will fix it since we always fetch our diff compared to the time - // of the first page. - try { - if (!url.contains("modified_since")) { - url += "&ordering=-last_modified&modified_since=" + URLEncoder.encode(rlm.getLast_modified(), "UTF-8"); - } - } catch (UnsupportedEncodingException e) { - e.printStackTrace(); - } - } - } else { - if (!url.contains("subevent_after")) { - url += firstrun_params; - } - } - - PretixApi.ApiResponse apiResponse = api.fetchResource(url); - if (isFirstPage && !is_continued_fetch) { - firstResponseTimestamp = apiResponse.getResponse().header("X-Page-Generated"); - } - JSONObject d = apiResponse.getData(); - if (apiResponse.getResponse().code() == 200) { - try { - JSONArray res = d.getJSONArray("results"); - if (res.length() > 0) { - lastOrderTimestamp = res.getJSONObject(res.length() - 1).getString("datetime"); - } - } catch (JSONException e) { - e.printStackTrace(); - } - } - return d; - } - - @Override - protected Map getKnownObjects(Set ids) { - checkinCache.clear(); - - if (ids.isEmpty()) { - return new HashMap<>(); - } - - List allCheckins = store.select(CheckIn.class) - .leftJoin(OrderPosition.class).on(OrderPosition.ID.eq(CheckIn.POSITION_ID)) - .leftJoin(Order.class).on(Order.ID.eq(OrderPosition.ORDER_ID)) - // Doing this WHERE IN even though we have a JOIN is entirely redundant. - // But we know that ``ids`` is of small size and this will trick SQLite into a - // more efficient query plan that avoids a full table scan :) - .where(CheckIn.POSITION_ID.in( - store.select(OrderPosition.ID) - .where(OrderPosition.ORDER_ID.in( - store.select(Order.ID) - .where(Order.CODE.in(ids)) - )) - )) - .get().toList(); - for (CheckIn c : allCheckins) { - Long pk = c.getPosition().getId(); - if (checkinCache.containsKey(pk)) { - checkinCache.get(pk).add(c); - } else { - List l = new ArrayList<>(); - l.add(c); - checkinCache.put(pk, l); - } - } - - return super.getKnownObjects(ids); - } - - @Override - public CloseableIterator runBatch(List ids) { - return store.select(Order.class) - // pretix guarantees uniqueness of CODE within an organizer account, so we don't need - // to filter by EVENT_SLUG. This is good, because SQLite tends to build a very stupid - // query plan otherwise if statistics are not up to date (using the EVENT_SLUG index - // instead of using the CODE index) - .where(Order.CODE.in(ids)) - .get().iterator(); - } - - @Override - CloseableIterator getKnownIDsIterator() { - return null; - } - - @Override - protected int getKnownCount() { - return store.count(Order.class) - .where(Order.EVENT_SLUG.eq(eventSlug)) - .get().value(); - } - - @Override - protected boolean autoPersist() { - return false; - } - - @Override - String getResourceName() { - return "orders"; - } - - @Override - String getId(JSONObject obj) throws JSONException { - return obj.getString("code"); - } - - @Override - String getId(Order obj) { - return obj.getCode(); - } - - @Override - Order newEmptyObject() { - return new Order(); - } - - public void standaloneRefreshFromJSON(JSONObject data) throws JSONException { - Order order = store.select(Order.class) - .where(Order.CODE.eq(data.getString("code"))) - .get().firstOr(newEmptyObject()); - JSONObject old = null; - if (order.getId() != null) { - old = order.getJSON(); - if (!old.has("positions")) { - JSONArray pos = new JSONArray(); - for (OrderPosition p : order.getPositions()) { - pos.put(p.getJSON()); - } - old.put("positions", pos); - } - } - - // Warm up cache - Set ids = new HashSet<>(); - ids.add(data.getString("code")); - getKnownObjects(ids); - // Store object - data.put("__libpretixsync_dbversion", Migrations.CURRENT_VERSION); - data.put("__libpretixsync_syncCycleId", syncCycleId); - if (old == null) { - updateObject(order, data); - } else { - if (!JSONUtils.similar(data, old)) { - updateObject(order, data); - store.update(order); - } - } - store.insert(checkinCreateCache); - checkinCreateCache.clear(); - } -} diff --git a/libpretixsync/src/main/java/eu/pretix/libpretixsync/sync/OrderSyncAdapter.kt b/libpretixsync/src/main/java/eu/pretix/libpretixsync/sync/OrderSyncAdapter.kt new file mode 100644 index 00000000..f57c1b4f --- /dev/null +++ b/libpretixsync/src/main/java/eu/pretix/libpretixsync/sync/OrderSyncAdapter.kt @@ -0,0 +1,641 @@ +package eu.pretix.libpretixsync.sync + +import app.cash.sqldelight.TransactionWithoutReturn +import eu.pretix.libpretixsync.api.ApiException +import eu.pretix.libpretixsync.api.PretixApi +import eu.pretix.libpretixsync.db.Migrations +import eu.pretix.libpretixsync.sqldelight.CheckIn +import eu.pretix.libpretixsync.sqldelight.Item +import eu.pretix.libpretixsync.sqldelight.OrderPosition +import eu.pretix.libpretixsync.sqldelight.ResourceSyncStatus +import eu.pretix.libpretixsync.sqldelight.SyncDatabase +import eu.pretix.libpretixsync.sync.SyncManager.ProgressFeedback +import eu.pretix.libpretixsync.utils.HashUtils +import eu.pretix.libpretixsync.utils.JSONUtils +import org.joda.time.DateTime +import org.joda.time.DateTimeZone +import org.joda.time.Duration +import org.joda.time.format.DateTimeFormat +import org.joda.time.format.ISODateTimeFormat +import org.json.JSONArray +import org.json.JSONException +import org.json.JSONObject +import java.io.IOException +import java.io.UnsupportedEncodingException +import java.net.URLEncoder +import eu.pretix.libpretixsync.sqldelight.Orders as Order + +class OrderSyncAdapter( + db: SyncDatabase, + fileStorage: FileStorage, + eventSlug: String, + private val subEventId: Long?, + private val withPdfData: Boolean, + private val isPretixpos: Boolean, + api: PretixApi, + syncCylceId: String, + feedback: ProgressFeedback?, +) : BaseDownloadSyncAdapter(db, api, syncCylceId, eventSlug, fileStorage, feedback) { + + private val itemCache: MutableMap = HashMap() + private val checkinCache: MutableMap> = HashMap() + private val checkinCreateCache: MutableList = ArrayList() + + private var firstResponseTimestamp: String? = null + private var lastOrderTimestamp: String? = null + private var rlm: ResourceSyncStatus? = null + + private fun rlmName(): String { + return if (withPdfData) { + "orders_withpdfdata" + } else { + "orders" + } + } + + override fun download() { + var completed = false + try { + super.download() + completed = true + } finally { + val resourceSyncStatus = db.resourceSyncStatusQueries.selectByResourceAndEventSlug( + resource = rlmName(), + event_slug = eventSlug, + ).executeAsOneOrNull() + + // We need to cache the response timestamp of the *first* page in the result set to make + // sure we don't miss anything between this and the next run. + // + // If the download failed, completed will be false. In case this was a full fetch + // (i.e. no timestamp was stored beforehand) we will still store the timestamp to be + // able to continue properly. + if (firstResponseTimestamp != null) { + if (resourceSyncStatus == null) { + val status = if (completed) { + "complete" + } else { + "incomplete:$lastOrderTimestamp" + } + + db.resourceSyncStatusQueries.insert( + event_slug = eventSlug, + last_modified = firstResponseTimestamp, + meta = null, + resource = rlmName(), + status = status, + ) + } else { + if (completed) { + db.resourceSyncStatusQueries.updateLastModified( + last_modified = firstResponseTimestamp, + id = resourceSyncStatus.id, + ) + } + } + } else if (completed && resourceSyncStatus != null) { + db.resourceSyncStatusQueries.updateStatus( + status = "complete", + id = resourceSyncStatus.id, + ) + } else if (!completed && lastOrderTimestamp != null && resourceSyncStatus != null) { + db.resourceSyncStatusQueries.updateStatus( + status = "incomplete:$lastOrderTimestamp", + id = resourceSyncStatus.id, + ) + } + lastOrderTimestamp = null + firstResponseTimestamp = null + } + } + + private fun preparePositionObject(jsonobj: JSONObject, orderId: Long, jsonorder: JSONObject, parent: JSONObject?): OrderPosition { + val jsonName = if (jsonobj.isNull("attendee_name")) "" else jsonobj.optString("attendee_name") + // TODO: BUG: jsonName can never be null, so parent / jInvoiceAddress is never used + // Keeping old behaviour for compatibility + var attendeeName = if (jsonName == null && parent != null && !parent.isNull("attendee_name")) { + parent.getString("attendee_name") + } else { + jsonName + } + if (attendeeName == null) { + try { + val jInvoiceAddress = jsonorder.getJSONObject("invoice_address") + if (jInvoiceAddress.isNull("name")) { + attendeeName = jInvoiceAddress.getString("name") + } + } catch (e: JSONException) { + e.printStackTrace() + } + } + + val jsonEmail = if (jsonobj.isNull("attendee_email")) "" else jsonobj.optString("attendee_email") + // TODO: BUG: jsonEmail can never be null, so parent is never used + // Keeping old behaviour for compatibility + val attendeeEmail = if (jsonEmail == null && parent != null && !parent.isNull("attendee_email")) { + parent.getString("attendee_email") + } else { + jsonEmail + } + + return OrderPosition( + id = -1, + attendee_email = attendeeEmail, + attendee_name = attendeeName, + item = getItem(jsonobj.getLong("item"))?.id, + json_data = jsonobj.toString(), + order_ref = orderId, + positionid = jsonobj.getLong("positionid"), + secret = jsonobj.optString("secret"), + server_id = jsonobj.getLong("id"), + subevent_id = jsonobj.optLong("subevent"), + variation_id = jsonobj.optLong("variation"), + ) + } + + private fun insertPositionObject(jsonobj: JSONObject, orderId: Long, jsonorder: JSONObject, parent: JSONObject?) { + val posobj = preparePositionObject(jsonobj, orderId, jsonorder, parent) + + val id = db.orderPositionQueries.transactionWithResult { + db.orderPositionQueries.insert( + attendee_email = posobj.attendee_email, + attendee_name = posobj.attendee_name, + item = posobj.item, + json_data = posobj.json_data, + order_ref = posobj.order_ref, + positionid = posobj.positionid, + secret = posobj.secret, + server_id = posobj.server_id, + subevent_id = posobj.subevent_id, + variation_id = posobj.variation_id, + ) + + db.compatQueries.getLastInsertedOrderPositionId().executeAsOne() + } + + afterInsertOrUpdatePositionObject(id, posobj.server_id, jsonobj) + } + + private fun updatePositionObject(obj: OrderPosition, jsonobj: JSONObject, orderId: Long, jsonorder: JSONObject, parent: JSONObject?) { + val posobj = preparePositionObject(jsonobj, orderId, jsonorder, parent) + + db.orderPositionQueries.updateFromJson( + attendee_email = posobj.attendee_email, + attendee_name = posobj.attendee_name, + item = posobj.item, + json_data = posobj.json_data, + order_ref = posobj.order_ref, + positionid = posobj.positionid, + secret = posobj.secret, + server_id = posobj.server_id, + subevent_id = posobj.subevent_id, + variation_id = posobj.variation_id, + id = obj.id, + ) + + afterInsertOrUpdatePositionObject(obj.id, obj.server_id, jsonobj) + } + + private fun afterInsertOrUpdatePositionObject(positionId: Long, positionServerId: Long?, jsonobj: JSONObject) { + val known: MutableMap = mutableMapOf() + val checkincache: List? = checkinCache[positionId] + if (checkincache != null) { + for (op in checkincache) { + if (op.server_id != null && op.server_id > 0) { + known[op.server_id] = op + } else { + db.checkInQueries.deleteById(op.id) + } + } + } + val checkins = jsonobj.getJSONArray("checkins") + for (i in 0 until checkins.length()) { + val ci = checkins.getJSONObject(i) + val listid = ci.getLong("list") + if (known.containsKey(listid)) { + val ciobj = known.remove(listid)!! + + db.checkInQueries.updateFromJson( + datetime = ISODateTimeFormat.dateTimeParser().parseDateTime(ci.getString("datetime")).toDate(), + json_data = ci.toString(), + listId = listid, + position = positionId, + type = ci.optString("type", "entry"), + id = ciobj.id, + ) + } else { + val ciobj = CheckIn( + id = -1, + datetime = ISODateTimeFormat.dateTimeParser().parseDateTime(ci.getString("datetime")).toDate(), + json_data = ci.toString(), + listId = listid, + position = positionId, + server_id = ci.optLong("id"), + type = ci.optString("type", "entry"), + ) + checkinCreateCache.add(ciobj) + } + } + if (known.size > 0) { + db.checkInQueries.deleteByIdList(known.values.map { it.id }) + } + + + // Images + if (jsonobj.has("pdf_data")) { + val pdfdata = jsonobj.getJSONObject("pdf_data") + if (pdfdata.has("images")) { + val images = pdfdata.getJSONObject("images") + updatePdfImages(db, fileStorage, api, positionServerId!!, images) + } + } + } + + override fun afterPage() { + super.afterPage() + + checkinCreateCache.forEach { + db.checkInQueries.insert( + datetime = it.datetime, + json_data = it.json_data, + listId = it.listId, + position = it.position, + server_id = it.server_id, + type = it.type, + ) + } + checkinCreateCache.clear() + } + + + override fun insert(jsonobj: JSONObject) { + val json_data = JSONObject(jsonobj.toString()) + json_data.remove("positions") + + val id = db.orderQueries.transactionWithResult { + db.orderQueries.insert( + checkin_attention = jsonobj.optBoolean("checkin_attention"), + checkin_text = jsonobj.optString("checkin_text"), + code = jsonobj.getString("code"), + deleteAfterTimestamp = 0L, + email = jsonobj.optString("email"), + event_slug = eventSlug, + json_data = json_data.toString(), + status = jsonobj.getString("status"), + valid_if_pending = jsonobj.optBoolean("valid_if_pending", false), + ) + db.compatQueries.getLastInsertedOrderId().executeAsOne() + } + + afterInsertOrUpdate(id, jsonobj) + } + + override fun update(obj: Order, jsonobj: JSONObject) { + val json_data = JSONObject(jsonobj.toString()) + json_data.remove("positions") + + db.orderQueries.updateFromJson( + checkin_attention = jsonobj.optBoolean("checkin_attention"), + checkin_text = jsonobj.optString("checkin_text"), + code = jsonobj.getString("code"), + deleteAfterTimestamp = 0L, + email = jsonobj.optString("email"), + event_slug = eventSlug, + json_data = json_data.toString(), + status = jsonobj.getString("status"), + valid_if_pending = jsonobj.optBoolean("valid_if_pending", false), + id = obj.id, + ) + + afterInsertOrUpdate(obj.id, jsonobj) + } + + private fun afterInsertOrUpdate(orderId: Long, jsonobj: JSONObject) { + val known: MutableMap = mutableMapOf() + + val allPos = db.orderPositionQueries.selectForOrder(orderId).executeAsList() + for (op in allPos) { + known[op.server_id!!] = op + } + + val posarray = jsonobj.getJSONArray("positions") + val posmap: MutableMap = java.util.HashMap() + for (i in 0 until posarray.length()) { + val posjson = posarray.getJSONObject(i) + posmap[posjson.getLong("id")] = posjson + } + for (i in 0 until posarray.length()) { + val posjson = posarray.getJSONObject(i) + posjson.put("__libpretixsync_dbversion", Migrations.CURRENT_VERSION) + posjson.put("__libpretixsync_syncCycleId", syncCycleId) + val jsonid = posjson.getLong("id") + var old: JSONObject? = null + var posobj: OrderPosition? = null + if (known.containsKey(jsonid)) { + posobj = known[jsonid] + old = posobj!!.json_data?.let { JSONObject(it) } + } + var parent: JSONObject? = null + if (!posjson.isNull("addon_to")) { + parent = posmap[posjson.getLong("addon_to")] + } + if (posobj != null) { + known.remove(jsonid) + if (!JSONUtils.similar(posjson, old)) { + updatePositionObject(posobj, posjson, orderId, jsonobj, parent) + } + } else { + insertPositionObject(posjson, orderId, jsonobj, parent) + } + } + if (known.size > 0) { + db.orderPositionQueries.deleteByServerIdList(known.values.map { it.server_id }) + } + } + + override fun deleteUnseen(): Boolean = false + + override fun downloadPage(url: String, isFirstPage: Boolean): JSONObject? { + if (isFirstPage) { + rlm = db.resourceSyncStatusQueries.selectByResourceAndEventSlug( + resource = rlmName(), + event_slug = eventSlug, + ).executeAsOneOrNull() + } + var is_continued_fetch = false + var resUrl = url + if (!resUrl.contains("testmode=")) { + resUrl += if (resUrl.contains("?")) { + "&" + } else { + "?" + } + resUrl += "testmode=false&exclude=downloads&exclude=payment_date&exclude=payment_provider&exclude=fees&exclude=positions.downloads" + if (!isPretixpos) { + resUrl += "&exclude=payments&exclude=refunds" + } + if (withPdfData) { + resUrl += "&pdf_data=true" + } + } + + val formatter = DateTimeFormat.forPattern("yyyy-MM-dd'T'HH:mm:ssZ") + val cutoff = DateTime().withZone(DateTimeZone.UTC).minus(Duration.standardDays(14)) + var firstrun_params = "" + try { + if (subEventId != null && subEventId > 0) { + firstrun_params = "&subevent_after=" + URLEncoder.encode(formatter.print(cutoff), "UTF-8") + } + } catch (e: UnsupportedEncodingException) { + e.printStackTrace() + } + + // On event series, we ignore orders that only affect subevents more than 14 days old. + // However, we can only do that on the first run, since we'd otherwise miss if e.g. an order + // that we have in our current database is changed to a date outside that time frame. + val resourceSyncStatus = rlm + if (resourceSyncStatus != null) { + // This resource has been fetched before. + if (resourceSyncStatus.status != null && resourceSyncStatus.status.startsWith("incomplete:")) { + // Continuing an interrupted fetch + + // Ordering is crucial here: Only because the server returns the orders in the + // order of creation we can be sure that we don't miss orders created in between our + // paginated requests. + + is_continued_fetch = true + try { + if (!resUrl.contains("created_since")) { + resUrl += "&ordering=datetime&created_since=" + URLEncoder.encode(resourceSyncStatus.status.substring(11), "UTF-8") + firstrun_params + } + } catch (e: UnsupportedEncodingException) { + e.printStackTrace() + } + } else { + // Diff to last time + + // Ordering is crucial here: Only because the server returns the orders in the + // order of modification we can be sure that we don't miss orders created in between our + // paginated requests. If an order were to be modified between our fetch of page 1 + // and 2 that originally wasn't part of the result set, we won't see it (as it will + // be inserted on page 1), but we'll see it the next time, and we will se some + // duplicates on page 2, but we don't care. The important part is that nothing gets + // lost "between the pages". If an order of page 2 gets modified and moves to page + // one while we fetch page 2, again, we won't see it and we'll see some duplicates, + // but the next sync will fix it since we always fetch our diff compared to the time + // of the first page. + + try { + if (!resUrl.contains("modified_since")) { + resUrl += "&ordering=-last_modified&modified_since=" + URLEncoder.encode(resourceSyncStatus.last_modified, "UTF-8") + } + } catch (e: UnsupportedEncodingException) { + e.printStackTrace() + } + } + } else { + if (!resUrl.contains("subevent_after")) { + resUrl += firstrun_params + } + } + + val apiResponse = api.fetchResource(resUrl) + if (isFirstPage && !is_continued_fetch) { + firstResponseTimestamp = apiResponse.response.header("X-Page-Generated") + } + val d = apiResponse.data + if (apiResponse.response.code == 200) { + try { + val res = d!!.getJSONArray("results") + if (res.length() > 0) { + lastOrderTimestamp = res.getJSONObject(res.length() - 1).getString("datetime") + } + } catch (e: JSONException) { + e.printStackTrace() + } + } + return d + } + + private fun getItem(id: Long): Item? { + if (itemCache.size == 0) { + val items = db.itemQueries.selectAll().executeAsList() + for (item in items) { + itemCache.put(item.server_id, item) + } + } + return itemCache[id] + } + + override fun queryKnownIDs(): MutableSet? = null + + override fun queryKnownObjects(ids: Set): MutableMap { + checkinCache.clear() + + if (ids.isEmpty()) { + return mutableMapOf() + } + + val allCheckins = db.checkInQueries.selectForOrders(ids).executeAsList() + + for (c in allCheckins) { + val pk = c.position!! + if (checkinCache.containsKey(pk)) { + checkinCache[pk]!!.add(c) + } else { + val l: MutableList = java.util.ArrayList() + l.add(c) + checkinCache[pk] = l + } + } + + return super.queryKnownObjects(ids) + } + + override fun runBatch(parameterBatch: List): List { + // pretix guarantees uniqueness of CODE within an organizer account, so we don't need + // to filter by EVENT_SLUG. This is good, because SQLite tends to build a very stupid + // query plan otherwise if statistics are not up to date (using the EVENT_SLUG index + // instead of using the CODE index) + return db.orderQueries.selectByCodeList(parameterBatch).executeAsList() + } + + override fun getKnownCount(): Long { + return db.orderQueries.countForEventSlug(eventSlug).executeAsOne() + } + + override fun getResourceName(): String { + return "orders" + } + + override fun getId(obj: JSONObject): String { + return obj.getString("code") + } + + override fun getId(obj: Order): String { + return obj.code!! + } + + override fun runInTransaction(body: TransactionWithoutReturn.() -> Unit) { + db.orderQueries.transaction(false, body) + } + + override fun getJSON(obj: Order): JSONObject { + return JSONObject(obj.json_data) + } + + override fun delete(key: String) { + db.orderQueries.deleteByCode(key) + } + + fun standaloneRefreshFromJSON(data: JSONObject) { + val order = db.orderQueries.selectByCode(data.getString("code")).executeAsOneOrNull() + + var old: JSONObject? = null + if (order?.id != null) { + old = JSONObject(order.json_data) + if (!old.has("positions")) { + val pos = JSONArray() + val positions = db.orderPositionQueries.selectForOrder(order.id).executeAsList() + for (p in positions) { + pos.put(JSONObject(p.json_data)) + } + old.put("positions", pos) + } + } + + // Warm up cache + val ids = mutableSetOf() + ids.add(data.getString("code")) + queryKnownObjects(ids) + // Store object + data.put("__libpretixsync_dbversion", Migrations.CURRENT_VERSION) + data.put("__libpretixsync_syncCycleId", syncCycleId) + + if (order == null) { + insert(data) + } else { + if (!JSONUtils.similar(data, old)) { + update(order, data) + } + } + + for (c in checkinCreateCache) { + db.checkInQueries.insert( + datetime = c.datetime, + json_data = c.json_data, + listId = c.listId, + position = c.position, + server_id = c.server_id, + type = c.type, + ) + } + + checkinCreateCache.clear() + } + + companion object { + fun updatePdfImages(db: SyncDatabase, fileStorage: FileStorage, api: PretixApi, serverId: Long, images: JSONObject) { + val seen_etags: MutableSet = HashSet() + val it = images.keys() + while (it.hasNext()) { + val k = it.next() as String + val remote_filename = images.optString(k) + if (remote_filename == null || !remote_filename.startsWith("http")) { + continue + } + var etag = HashUtils.toSHA1(remote_filename.toByteArray()) + if (remote_filename.contains("#etag=")) { + etag = remote_filename.split("#etag=".toRegex()).dropLastWhile { it.isEmpty() }.toTypedArray()[1] + } + val local_filename = "pdfimage_$etag.bin" + seen_etags.add(etag) + + if (!fileStorage.contains(local_filename)) { + try { + val file = api.downloadFile(remote_filename) + val os = fileStorage.writeStream(local_filename) + val `is` = file.response.body!!.byteStream() + val buffer = ByteArray(1444) + var byteread: Int + while ((`is`.read(buffer).also { byteread = it }) != -1) { + os.write(buffer, 0, byteread) + } + `is`.close() + os.close() + } catch (e: ApiException) { + // TODO: What to do? + e.printStackTrace() + } catch (e: IOException) { + // TODO: What to do? + e.printStackTrace() + fileStorage.delete(local_filename) + } + } + + var cpi = db.cachedPdfImageQueries.selectForOrderPositionAndKey( + order_position_server_id = serverId, + key = k, + ).executeAsOneOrNull() + + if (cpi == null) { + db.cachedPdfImageQueries.insert( + etag = etag, + key = k, + order_position_server_id = serverId, + ) + } else { + db.cachedPdfImageQueries.updateEtag( + etag = etag, + id = cpi.id, + ) + } + } + + db.cachedPdfImageQueries.deleteUnseen( + order_position_server_id = serverId, + seen_etags = seen_etags, + ) + } + } +} diff --git a/libpretixsync/src/main/java/eu/pretix/libpretixsync/sync/QuestionSyncAdapter.java b/libpretixsync/src/main/java/eu/pretix/libpretixsync/sync/QuestionSyncAdapter.java deleted file mode 100644 index 5a54a2d5..00000000 --- a/libpretixsync/src/main/java/eu/pretix/libpretixsync/sync/QuestionSyncAdapter.java +++ /dev/null @@ -1,107 +0,0 @@ -package eu.pretix.libpretixsync.sync; - -import org.json.JSONArray; -import org.json.JSONException; -import org.json.JSONObject; - -import java.util.ArrayList; -import java.util.Iterator; -import java.util.List; - -import eu.pretix.libpretixsync.api.PretixApi; -import eu.pretix.libpretixsync.db.Item; -import eu.pretix.libpretixsync.db.Migrations; -import eu.pretix.libpretixsync.db.Question; -import eu.pretix.libpretixsync.utils.JSONUtils; -import io.requery.BlockingEntityStore; -import io.requery.Persistable; -import io.requery.query.Tuple; -import io.requery.util.CloseableIterator; - -public class QuestionSyncAdapter extends BaseConditionalSyncAdapter { - - public QuestionSyncAdapter(BlockingEntityStore store, FileStorage fileStorage, String eventSlug, PretixApi api, String syncCycleId, SyncManager.ProgressFeedback feedback) { - super(store, fileStorage, eventSlug, api, syncCycleId, feedback); - } - - @Override - public void updateObject(Question obj, JSONObject jsonobj) throws JSONException { - obj.setEvent_slug(eventSlug); - obj.setServer_id(jsonobj.getLong("id")); - obj.setPosition(jsonobj.getLong("position")); - obj.setRequired(jsonobj.optBoolean("required", false)); - obj.setJson_data(jsonobj.toString()); - JSONArray itemsarr = jsonobj.getJSONArray("items"); - List itemids = new ArrayList<>(); - for (int i = 0; i < itemsarr.length(); i++) { - itemids.add(itemsarr.getLong(i)); - } - List items = store.select(Item.class).where( - Item.SERVER_ID.in(itemids) - ).get().toList(); - for (Item item : items) { - if (!obj.getItems().contains(item)) { - obj.getItems().add(item); - } - } - obj.getItems().retainAll(items); - } - - @Override - public CloseableIterator runBatch(List ids) { - return store.select(Question.class) - .where(Question.EVENT_SLUG.eq(eventSlug)) - .and(Question.SERVER_ID.in(ids)) - .get().iterator(); - } - - @Override - CloseableIterator getKnownIDsIterator() { - return store.select(Question.SERVER_ID) - .where(Question.EVENT_SLUG.eq(eventSlug)) - .get().iterator(); - } - - @Override - String getResourceName() { - return "questions"; - } - - @Override - Long getId(JSONObject obj) throws JSONException { - return obj.getLong("id"); - } - - @Override - Long getId(Question obj) { - return obj.getServer_id(); - } - - @Override - Question newEmptyObject() { - return new Question(); - } - - public void standaloneRefreshFromJSON(JSONObject data) throws JSONException { - Question obj = store.select(Question.class) - .where(Question.SERVER_ID.eq(data.getLong("id"))) - .get().firstOr(newEmptyObject()); - JSONObject old = null; - if (obj.getId() != null) { - old = obj.getJSON(); - } - - // Store object - data.put("__libpretixsync_dbversion", Migrations.CURRENT_VERSION); - data.put("__libpretixsync_syncCycleId", syncCycleId); - if (old == null) { - updateObject(obj, data); - store.insert(obj); - } else { - if (!JSONUtils.similar(data, old)) { - updateObject(obj, data); - store.update(obj); - } - } - } -} diff --git a/libpretixsync/src/main/java/eu/pretix/libpretixsync/sync/QuestionSyncAdapter.kt b/libpretixsync/src/main/java/eu/pretix/libpretixsync/sync/QuestionSyncAdapter.kt new file mode 100644 index 00000000..16b673f6 --- /dev/null +++ b/libpretixsync/src/main/java/eu/pretix/libpretixsync/sync/QuestionSyncAdapter.kt @@ -0,0 +1,150 @@ +package eu.pretix.libpretixsync.sync + +import app.cash.sqldelight.TransactionWithoutReturn +import app.cash.sqldelight.db.QueryResult +import eu.pretix.libpretixsync.api.PretixApi +import eu.pretix.libpretixsync.db.Migrations +import eu.pretix.libpretixsync.sqldelight.Question +import eu.pretix.libpretixsync.sqldelight.SyncDatabase +import eu.pretix.libpretixsync.sync.SyncManager.ProgressFeedback +import eu.pretix.libpretixsync.utils.JSONUtils +import org.json.JSONException +import org.json.JSONObject + +class QuestionSyncAdapter( + db: SyncDatabase, + fileStorage: FileStorage, + eventSlug: String, + api: PretixApi, + syncCycleId: String, + feedback: ProgressFeedback?, +) : BaseConditionalSyncAdapter( + db = db, + fileStorage = fileStorage, + eventSlug = eventSlug, + api = api, + syncCycleId = syncCycleId, + feedback = feedback, +) { + + override fun getResourceName(): String = "questions" + + override fun getId(obj: Question): Long = obj.server_id!! + + override fun getId(obj: JSONObject): Long = obj.getLong("id") + + override fun getJSON(obj: Question): JSONObject = JSONObject(obj.json_data!!) + + override fun queryKnownIDs(): MutableSet { + val res = mutableSetOf() + db.questionQueries.selectServerIdsByEventSlug(eventSlug).execute { cursor -> + while (cursor.next().value) { + val id = cursor.getLong(0) + ?: throw RuntimeException("server_id column not available") + + res.add(id) + } + QueryResult.Unit + } + + return res + } + + override fun insert(jsonobj: JSONObject) { + val questionId = db.questionQueries.transactionWithResult { + db.questionQueries.insert( + event_slug = eventSlug, + json_data = jsonobj.toString(), + position = jsonobj.getLong("position"), + required = jsonobj.optBoolean("required", false), + server_id = jsonobj.getLong("id"), + ) + db.compatQueries.getLastInsertedQuestionId().executeAsOne() + } + + upsertItemRelations(questionId, emptySet(), jsonobj) + } + + override fun update(obj: Question, jsonobj: JSONObject) { + val existingRelations = db.questionQueries.selectRelationsForQuestion(obj.id) + .executeAsList() + .map { + // Not-null assertion needed for SQLite + it.ItemId!! + } + .toSet() + + db.questionQueries.updateFromJson( + event_slug = eventSlug, + json_data = jsonobj.toString(), + position = jsonobj.getLong("position"), + required = jsonobj.optBoolean("required", false), + id = obj.id, + ) + + upsertItemRelations(obj.id, existingRelations, jsonobj) + } + + private fun upsertItemRelations(questionId: Long, existingIds: Set, jsonobj: JSONObject) { + val itemsarr = jsonobj.getJSONArray("items") + val itemids = ArrayList(itemsarr.length()) + for (i in 0 until itemsarr.length()) { + itemids.add(itemsarr.getLong(i)) + } + val newIds = if (itemids.isNotEmpty()) { + db.itemQueries.selectByServerIdListAndEventSlug( + server_id = itemids, + event_slug = eventSlug, + ).executeAsList().map { it.id }.toSet() + } else { + emptySet() + } + + for (newId in newIds - existingIds) { + db.questionQueries.insertItemRelation( + item_id = newId, + question_id = questionId, + ) + } + for (oldId in existingIds - newIds) { + db.questionQueries.deleteItemRelation( + item_id = oldId, + question_id = questionId, + ) + } + } + + override fun delete(key: Long) { + val question = db.questionQueries.selectByServerId(key).executeAsOne() + db.questionQueries.deleteItemRelationsForQuestion(question.id) + db.questionQueries.deleteByServerId(key) + } + + override fun runInTransaction(body: TransactionWithoutReturn.() -> Unit) { + db.questionQueries.transaction(false, body) + } + + override fun runBatch(parameterBatch: List): List = + db.questionQueries.selectByServerIdListAndEventSlug( + server_id = parameterBatch, + event_slug = eventSlug, + ).executeAsList() + + @Throws(JSONException::class) + fun standaloneRefreshFromJSON(data: JSONObject) { + val known = db.questionQueries.selectByServerId(data.getLong("id")).executeAsOneOrNull() + + // Store object + data.put("__libpretixsync_dbversion", Migrations.CURRENT_VERSION) + data.put("__libpretixsync_syncCycleId", syncCycleId) + if (known == null) { + insert(data) + } else { + val old = JSONObject(known.json_data!!) + if (!JSONUtils.similar(data, old)) { + update(known, data) + } + } + } + +} diff --git a/libpretixsync/src/main/java/eu/pretix/libpretixsync/sync/QuotaSyncAdapter.java b/libpretixsync/src/main/java/eu/pretix/libpretixsync/sync/QuotaSyncAdapter.java deleted file mode 100644 index 001f9871..00000000 --- a/libpretixsync/src/main/java/eu/pretix/libpretixsync/sync/QuotaSyncAdapter.java +++ /dev/null @@ -1,106 +0,0 @@ -package eu.pretix.libpretixsync.sync; - -import org.json.JSONArray; -import org.json.JSONException; -import org.json.JSONObject; - -import java.util.ArrayList; -import java.util.List; - -import eu.pretix.libpretixsync.api.PretixApi; -import eu.pretix.libpretixsync.db.Item; -import eu.pretix.libpretixsync.db.Quota; -import io.requery.BlockingEntityStore; -import io.requery.Persistable; -import io.requery.query.Tuple; -import io.requery.util.CloseableIterator; - -public class QuotaSyncAdapter extends BaseDownloadSyncAdapter { - private Long subeventId; - - public QuotaSyncAdapter(BlockingEntityStore store, FileStorage fileStorage, String eventSlug, PretixApi api, String syncCycleId, SyncManager.ProgressFeedback feedback, Long subeventId) { - super(store, fileStorage, eventSlug, api, syncCycleId, feedback); - this.subeventId = subeventId; - } - - protected String getUrl() { - String url = api.eventResourceUrl(eventSlug, getResourceName()); - url += "?with_availability=true"; - if (this.subeventId != null && this.subeventId > 0L) { - url += "&subevent=" + this.subeventId; - } - return url; - } - - @Override - public void updateObject(Quota obj, JSONObject jsonobj) throws JSONException { - obj.setEvent_slug(eventSlug); - obj.setServer_id(jsonobj.getLong("id")); - obj.setSubevent_id(jsonobj.optLong("subevent")); - obj.setJson_data(jsonobj.toString()); - obj.setSize(jsonobj.isNull("size") ? null : jsonobj.getLong("size")); - if (jsonobj.has("available")) { - obj.setAvailable(jsonobj.getBoolean("available") ? 1L : 0L); - obj.setAvailable_number(jsonobj.isNull("available_number") ? null : jsonobj.getLong("available_number")); - } else { - obj.setAvailable(null); - obj.setAvailable_number(null); - } - JSONArray itemsarr = jsonobj.getJSONArray("items"); - List itemids = new ArrayList<>(); - for (int i = 0; i < itemsarr.length(); i++) { - itemids.add(itemsarr.getLong(i)); - } - List items = store.select(Item.class).where( - Item.SERVER_ID.in(itemids) - ).get().toList(); - for (Item item : items) { - if (!obj.getItems().contains(item)) { - obj.getItems().add(item); - } - } - obj.getItems().retainAll(items); - } - - @Override - public CloseableIterator runBatch(List ids) { - return store.select(Quota.class) - .where(Quota.EVENT_SLUG.eq(eventSlug)) - .and(Quota.SERVER_ID.in(ids)) - .get().iterator(); - } - - @Override - CloseableIterator getKnownIDsIterator() { - if (subeventId != null && subeventId > 0L) { - return store.select(Quota.SERVER_ID) - .where(Quota.EVENT_SLUG.eq(eventSlug)) - .and(Quota.SUBEVENT_ID.eq(subeventId)) - .get().iterator(); - } else { - return store.select(Quota.SERVER_ID) - .where(Quota.EVENT_SLUG.eq(eventSlug)) - .get().iterator(); - } - } - - @Override - String getResourceName() { - return "quotas"; - } - - @Override - Long getId(JSONObject obj) throws JSONException { - return obj.getLong("id"); - } - - @Override - Long getId(Quota obj) { - return obj.getServer_id(); - } - - @Override - Quota newEmptyObject() { - return new Quota(); - } -} diff --git a/libpretixsync/src/main/java/eu/pretix/libpretixsync/sync/QuotaSyncAdapter.kt b/libpretixsync/src/main/java/eu/pretix/libpretixsync/sync/QuotaSyncAdapter.kt new file mode 100644 index 00000000..b5e45028 --- /dev/null +++ b/libpretixsync/src/main/java/eu/pretix/libpretixsync/sync/QuotaSyncAdapter.kt @@ -0,0 +1,179 @@ +package eu.pretix.libpretixsync.sync + +import app.cash.sqldelight.TransactionWithoutReturn +import app.cash.sqldelight.db.QueryResult +import eu.pretix.libpretixsync.api.PretixApi +import eu.pretix.libpretixsync.sqldelight.Quota +import eu.pretix.libpretixsync.sqldelight.SyncDatabase +import eu.pretix.libpretixsync.sync.SyncManager.ProgressFeedback +import org.json.JSONObject + +class QuotaSyncAdapter( + db: SyncDatabase, + fileStorage: FileStorage, + eventSlug: String, + api: PretixApi, + syncCycleId: String, + feedback: ProgressFeedback?, + private val subeventId: Long?, +) : BaseDownloadSyncAdapter( + db = db, + api = api, + syncCycleId = syncCycleId, + eventSlug = eventSlug, + fileStorage = fileStorage, + feedback = feedback, +) { + override fun getResourceName(): String = "quotas" + + override fun getUrl(): String { + var url = api.eventResourceUrl(eventSlug, getResourceName()) + url += "?with_availability=true" + if (this.subeventId != null && this.subeventId > 0L) { + url += "&subevent=" + this.subeventId + } + return url + } + + override fun getId(obj: Quota): Long = obj.server_id!! + + override fun getId(obj: JSONObject): Long = obj.getLong("id") + + override fun getJSON(obj: Quota): JSONObject = JSONObject(obj.json_data!!) + + override fun queryKnownIDs(): MutableSet { + val res = mutableSetOf() + + if (subeventId != null && subeventId > 0L) { + db.quotaQueries.selectServerIdsByEventSlugAndSubEvent( + event_slug = eventSlug, + subevent_id = subeventId, + ).execute { cursor -> + while (cursor.next().value) { + val id = cursor.getLong(0) + ?: throw RuntimeException("server_id column not available") + + res.add(id) + } + QueryResult.Unit + } + } else { + db.quotaQueries.selectServerIdsByEventSlug(eventSlug).execute { cursor -> + while (cursor.next().value) { + val id = cursor.getLong(0) + ?: throw RuntimeException("server_id column not available") + + res.add(id) + } + QueryResult.Unit + } + } + + return res + } + + override fun insert(jsonobj: JSONObject) { + val (available, availableNumber) = if (jsonobj.has("available")) { + val available = if (jsonobj.getBoolean("available")) true else false + val number = + if (jsonobj.isNull("available_number")) null else jsonobj.getLong("available_number") + + Pair(available, number) + } else { + Pair(null, null) + } + + val quotaId = db.quotaQueries.transactionWithResult { + db.quotaQueries.insert( + available = available, + available_number = availableNumber, + event_slug = eventSlug, + json_data = jsonobj.toString(), + server_id = jsonobj.getLong("id"), + size = if (jsonobj.isNull("size")) null else jsonobj.getLong("size"), + subevent_id = jsonobj.optLong("subevent"), + ) + + db.compatQueries.getLastInsertedQuotaId().executeAsOne() + } + + upsertItemRelations(quotaId, emptySet(), jsonobj) + } + + override fun update(obj: Quota, jsonobj: JSONObject) { + val existingRelations = db.quotaQueries.selectRelationsForQuota(obj.id) + .executeAsList() + .map { + // Not-null assertion needed for SQLite + it.ItemId!! + } + .toSet() + + val (available, availableNumber) = if (jsonobj.has("available")) { + val available = if (jsonobj.getBoolean("available")) true else false + val number = + if (jsonobj.isNull("available_number")) null else jsonobj.getLong("available_number") + + Pair(available, number) + } else { + Pair(null, null) + } + + db.quotaQueries.updateFromJson( + available = available, + available_number = availableNumber, + event_slug = eventSlug, + json_data = jsonobj.toString(), + size = if (jsonobj.isNull("size")) null else jsonobj.getLong("size"), + subevent_id = jsonobj.optLong("subevent"), + id = obj.id, + ) + + upsertItemRelations(obj.id, existingRelations, jsonobj) + } + + private fun upsertItemRelations(quotaId: Long, existingIds: Set, jsonobj: JSONObject) { + val itemsarr = jsonobj.getJSONArray("items") + val itemids = ArrayList(itemsarr.length()) + for (i in 0 until itemsarr.length()) { + itemids.add(itemsarr.getLong(i)) + } + val newIds = if (itemids.isNotEmpty()) { + db.itemQueries.selectByServerIdListAndEventSlug( + server_id = itemids, + event_slug = eventSlug, + ).executeAsList().map { it.id }.toSet() + } else { + emptySet() + } + + for (newId in newIds - existingIds) { + db.quotaQueries.insertItemRelation( + item_id = newId, + quota_id = quotaId, + ) + } + for (oldId in existingIds - newIds) { + db.quotaQueries.deleteItemRelation( + item_id = oldId, + quota_id = quotaId, + ) + } + } + + override fun delete(key: Long) { + val quota = db.quotaQueries.selectByServerId(key).executeAsOne() + db.quotaQueries.deleteItemRelationsForQuota(quota.id) + db.quotaQueries.deleteByServerId(key) + } + + override fun runInTransaction(body: TransactionWithoutReturn.() -> Unit) = + db.quotaQueries.transaction(false, body) + + override fun runBatch(parameterBatch: List): List = + db.quotaQueries.selectByServerIdListAndEventSlug( + server_id = parameterBatch, + event_slug = eventSlug, + ).executeAsList() + +} diff --git a/libpretixsync/src/main/java/eu/pretix/libpretixsync/sync/ReusableMediaSyncAdapter.java b/libpretixsync/src/main/java/eu/pretix/libpretixsync/sync/ReusableMediaSyncAdapter.java deleted file mode 100644 index bb3d9b5f..00000000 --- a/libpretixsync/src/main/java/eu/pretix/libpretixsync/sync/ReusableMediaSyncAdapter.java +++ /dev/null @@ -1,230 +0,0 @@ -package eu.pretix.libpretixsync.sync; - -import org.joda.time.DateTime; -import org.joda.time.DateTimeZone; -import org.joda.time.Duration; -import org.joda.time.format.DateTimeFormat; -import org.joda.time.format.DateTimeFormatter; -import org.joda.time.format.ISODateTimeFormat; -import org.json.JSONArray; -import org.json.JSONException; -import org.json.JSONObject; - -import java.io.IOException; -import java.io.InputStream; -import java.io.OutputStream; -import java.io.UnsupportedEncodingException; -import java.net.URLEncoder; -import java.util.ArrayList; -import java.util.HashMap; -import java.util.HashSet; -import java.util.Iterator; -import java.util.List; -import java.util.Map; -import java.util.Set; -import java.util.concurrent.ExecutionException; - -import eu.pretix.libpretixsync.api.ApiException; -import eu.pretix.libpretixsync.api.PretixApi; -import eu.pretix.libpretixsync.api.ResourceNotModified; -import eu.pretix.libpretixsync.db.CachedPdfImage; -import eu.pretix.libpretixsync.db.CheckIn; -import eu.pretix.libpretixsync.db.Item; -import eu.pretix.libpretixsync.db.Migrations; -import eu.pretix.libpretixsync.db.Order; -import eu.pretix.libpretixsync.db.OrderPosition; -import eu.pretix.libpretixsync.db.ResourceSyncStatus; -import eu.pretix.libpretixsync.db.ReusableMedium; -import eu.pretix.libpretixsync.utils.HashUtils; -import eu.pretix.libpretixsync.utils.JSONUtils; -import io.requery.BlockingEntityStore; -import io.requery.Persistable; -import io.requery.query.Tuple; -import io.requery.util.CloseableIterator; - -public class ReusableMediaSyncAdapter extends BaseDownloadSyncAdapter { - public ReusableMediaSyncAdapter(BlockingEntityStore store, FileStorage fileStorage, String eventSlug, PretixApi api, String syncCylceId, SyncManager.ProgressFeedback feedback) { - super(store, fileStorage, "__all__", api, syncCylceId, feedback); - } - - private String firstResponseTimestamp; - private String lastMediumTimestamp; - private ResourceSyncStatus rlm; - - private String rlmName() { - return "reusablemedia"; - } - - @Override - protected String getUrl() { - return api.organizerResourceUrl(getResourceName()); - } - - @Override - public void download() throws JSONException, ApiException, ExecutionException, InterruptedException { - boolean completed = false; - try { - super.download(); - completed = true; - } finally { - ResourceSyncStatus resourceSyncStatus = store.select(ResourceSyncStatus.class) - .where(ResourceSyncStatus.RESOURCE.eq(rlmName())) - .and(ResourceSyncStatus.EVENT_SLUG.eq("__all__")) - .limit(1) - .get().firstOrNull(); - - // We need to cache the response timestamp of the *first* page in the result set to make - // sure we don't miss anything between this and the next run. - // - // If the download failed, completed will be false. In case this was a full fetch - // (i.e. no timestamp was stored beforehand) we will still store the timestamp to be - // able to continue properly. - if (firstResponseTimestamp != null) { - if (resourceSyncStatus == null) { - resourceSyncStatus = new ResourceSyncStatus(); - resourceSyncStatus.setResource(rlmName()); - resourceSyncStatus.setEvent_slug("__all__"); - if (completed) { - resourceSyncStatus.setStatus("complete"); - } else { - resourceSyncStatus.setStatus("incomplete:" + lastMediumTimestamp); - } - resourceSyncStatus.setLast_modified(firstResponseTimestamp); - store.upsert(resourceSyncStatus); - } else { - if (completed) { - resourceSyncStatus.setLast_modified(firstResponseTimestamp); - store.upsert(resourceSyncStatus); - } - } - } else if (completed && resourceSyncStatus != null) { - resourceSyncStatus.setStatus("complete"); - store.update(resourceSyncStatus); - } else if (!completed && lastMediumTimestamp != null && resourceSyncStatus != null) { - resourceSyncStatus.setStatus("incomplete:" + lastMediumTimestamp); - store.update(resourceSyncStatus); - } - lastMediumTimestamp = null; - firstResponseTimestamp = null; - } - } - - @Override - public void updateObject(ReusableMedium obj, JSONObject jsonobj) throws JSONException { - obj.setServer_id(jsonobj.getLong("id")); - obj.setType(jsonobj.getString("type")); - obj.setIdentifier(jsonobj.getString("identifier")); - obj.setActive(jsonobj.getBoolean("active")); - obj.setExpires(jsonobj.optString("expires")); - obj.setCustomer_id(jsonobj.optLong("customer")); - obj.setLinked_giftcard_id(jsonobj.optLong("linked_giftcard")); - obj.setLinked_orderposition_id(jsonobj.optLong("linked_orderposition")); - obj.setJson_data(jsonobj.toString()); - } - - @Override - protected boolean deleteUnseen() { - return false; - } - - @Override - protected JSONObject downloadPage(String url, boolean isFirstPage) throws ApiException, ResourceNotModified { - if (isFirstPage) { - rlm = store.select(ResourceSyncStatus.class) - .where(ResourceSyncStatus.RESOURCE.eq(rlmName())) - .and(ResourceSyncStatus.EVENT_SLUG.eq("__all__")) - .limit(1) - .get().firstOrNull(); - } - boolean is_continued_fetch = false; - - if (rlm != null) { - // This resource has been fetched before. - if (rlm.getStatus() != null && rlm.getStatus().startsWith("incomplete:")) { - // Continuing an interrupted fetch - - // Ordering is crucial here: Only because the server returns the orders in the - // order of creation we can be sure that we don't miss orders created in between our - // paginated requests. - is_continued_fetch = true; - try { - if (!url.contains("created_since")) { - url += "?ordering=datetime&created_since=" + URLEncoder.encode(rlm.getStatus().substring(11), "UTF-8"); - } - } catch (UnsupportedEncodingException e) { - e.printStackTrace(); - } - } else { - // Diff to last time - - // Ordering is crucial here: Only because the server returns the media in the - // order of modification we can be sure that we don't miss media created in between our - // paginated requests. If a medium were to be modified between our fetch of page 1 - // and 2 that originally wasn't part of the result set, we won't see it (as it will - // be inserted on page 1), but we'll see it the next time, and we will se some - // duplicates on page 2, but we don't care. The important part is that nothing gets - // lost "between the pages". If a medium of page 2 gets modified and moves to page - // one while we fetch page 2, again, we won't see it and we'll see some duplicates, - // but the next sync will fix it since we always fetch our diff compared to the time - // of the first page. - try { - if (!url.contains("updated_since")) { - url += "?ordering=-updated&updated_since=" + URLEncoder.encode(rlm.getLast_modified(), "UTF-8"); - } - } catch (UnsupportedEncodingException e) { - e.printStackTrace(); - } - } - } - - PretixApi.ApiResponse apiResponse = api.fetchResource(url); - if (isFirstPage && !is_continued_fetch) { - firstResponseTimestamp = apiResponse.getResponse().header("X-Page-Generated"); - } - JSONObject d = apiResponse.getData(); - if (apiResponse.getResponse().code() == 200) { - try { - JSONArray res = d.getJSONArray("results"); - if (res.length() > 0) { - lastMediumTimestamp = res.getJSONObject(res.length() - 1).getString("created"); - } - } catch (JSONException e) { - e.printStackTrace(); - } - } - return d; - } - - @Override - public CloseableIterator runBatch(List ids) { - return store.select(ReusableMedium.class) - .where(ReusableMedium.SERVER_ID.in(ids)) - .get().iterator(); - } - - @Override - CloseableIterator getKnownIDsIterator() { - return store.select(ReusableMedium.SERVER_ID) - .get().iterator(); - } - - @Override - String getResourceName() { - return "reusablemedia"; - } - - @Override - Long getId(JSONObject obj) throws JSONException { - return obj.getLong("id"); - } - - @Override - Long getId(ReusableMedium obj) { - return obj.getServer_id(); - } - - @Override - ReusableMedium newEmptyObject() { - return new ReusableMedium(); - } -} diff --git a/libpretixsync/src/main/java/eu/pretix/libpretixsync/sync/ReusableMediaSyncAdapter.kt b/libpretixsync/src/main/java/eu/pretix/libpretixsync/sync/ReusableMediaSyncAdapter.kt new file mode 100644 index 00000000..1b45e9cd --- /dev/null +++ b/libpretixsync/src/main/java/eu/pretix/libpretixsync/sync/ReusableMediaSyncAdapter.kt @@ -0,0 +1,245 @@ +package eu.pretix.libpretixsync.sync + +import app.cash.sqldelight.TransactionWithoutReturn +import app.cash.sqldelight.db.QueryResult +import eu.pretix.libpretixsync.api.ApiException +import eu.pretix.libpretixsync.api.PretixApi +import eu.pretix.libpretixsync.api.ResourceNotModified +import eu.pretix.libpretixsync.sqldelight.ResourceSyncStatus +import eu.pretix.libpretixsync.sqldelight.ReusableMedium +import eu.pretix.libpretixsync.sqldelight.SyncDatabase +import eu.pretix.libpretixsync.sync.SyncManager.ProgressFeedback +import org.json.JSONException +import org.json.JSONObject +import java.io.UnsupportedEncodingException +import java.net.URLEncoder +import java.util.concurrent.ExecutionException + +class ReusableMediaSyncAdapter( + db: SyncDatabase, + fileStorage: FileStorage, + api: PretixApi, + syncCycleId: String, + feedback: ProgressFeedback?, +) : BaseDownloadSyncAdapter( + db = db, + api = api, + syncCycleId = syncCycleId, + eventSlug = "__all__", + fileStorage = fileStorage, + feedback = feedback, +) { + + private var firstResponseTimestamp: String? = null + private var lastMediumTimestamp: String? = null + private var rlm: ResourceSyncStatus? = null + + override fun getResourceName(): String = "reusablemedia" + + private fun rlmName(): String = "reusablemedia" + + override fun getUrl(): String = api.organizerResourceUrl(getResourceName()) + + override fun getId(obj: ReusableMedium): Long = obj.server_id!! + + override fun getId(obj: JSONObject): Long = obj.getLong("id") + + override fun getJSON(obj: ReusableMedium): JSONObject = JSONObject(obj.json_data!!) + + override fun queryKnownIDs(): MutableSet { + val res = mutableSetOf() + db.reusableMediumQueries.selectServerIds().execute { cursor -> + while (cursor.next().value) { + val id = cursor.getLong(0) + ?: throw RuntimeException("server_id column not available") + + res.add(id) + } + QueryResult.Unit + } + + return res + } + + override fun insert(jsonobj: JSONObject) { + db.reusableMediumQueries.insert( + active = jsonobj.getBoolean("active"), + customer_id = jsonobj.optLong("customer"), + expires = jsonobj.optString("expires"), + identifier = jsonobj.getString("identifier"), + json_data = jsonobj.toString(), + linked_giftcard_id = jsonobj.optLong("linked_giftcard"), + linked_orderposition_id = jsonobj.optLong("linked_orderposition"), + server_id = jsonobj.getLong("id"), + type = jsonobj.getString("type"), + ) + } + + override fun update(obj: ReusableMedium, jsonobj: JSONObject) { + db.reusableMediumQueries.updateFromJson( + active = jsonobj.getBoolean("active"), + customer_id = jsonobj.optLong("customer"), + expires = jsonobj.optString("expires"), + identifier = jsonobj.getString("identifier"), + json_data = jsonobj.toString(), + linked_giftcard_id = jsonobj.optLong("linked_giftcard"), + linked_orderposition_id = jsonobj.optLong("linked_orderposition"), + type = jsonobj.getString("type"), + id = obj.id, + ) + } + + override fun delete(key: Long) { + db.reusableMediumQueries.deleteByServerId(key) + } + + override fun deleteUnseen(): Boolean { + return false + } + + override fun runInTransaction(body: TransactionWithoutReturn.() -> Unit) { + db.reusableMediumQueries.transaction(false, body) + } + + override fun runBatch(parameterBatch: List): List = + db.reusableMediumQueries.selectByServerIdList(parameterBatch).executeAsList() + + @Throws( + JSONException::class, + ApiException::class, + ExecutionException::class, + InterruptedException::class + ) + override fun download() { + var completed = false + try { + super.download() + completed = true + } finally { + val resourceSyncStatus = db.resourceSyncStatusQueries.selectByResourceAndEventSlug( + resource = rlmName(), + event_slug = "__all__", + ).executeAsOneOrNull() + + // We need to cache the response timestamp of the *first* page in the result set to make + // sure we don't miss anything between this and the next run. + // + // If the download failed, completed will be false. In case this was a full fetch + // (i.e. no timestamp was stored beforehand) we will still store the timestamp to be + // able to continue properly. + if (firstResponseTimestamp != null) { + if (resourceSyncStatus == null) { + val status = if (completed) { + "complete" + } else { + "incomplete:$lastMediumTimestamp" + } + + db.resourceSyncStatusQueries.insert( + event_slug = "__all__", + last_modified = firstResponseTimestamp, + meta = null, + resource = rlmName(), + status = status, + ) + + } else { + if (completed) { + db.resourceSyncStatusQueries.updateLastModified( + last_modified = firstResponseTimestamp, + id = resourceSyncStatus.id, + ) + } + } + } else if (completed && resourceSyncStatus != null) { + db.resourceSyncStatusQueries.updateStatus( + status = "complete", + id = resourceSyncStatus.id, + ) + } else if (!completed && lastMediumTimestamp != null && resourceSyncStatus != null) { + db.resourceSyncStatusQueries.updateStatus( + status = "incomplete:$lastMediumTimestamp", + id = resourceSyncStatus.id, + ) + } + lastMediumTimestamp = null + firstResponseTimestamp = null + } + } + + @Throws(ApiException::class, ResourceNotModified::class) + override fun downloadPage(url: String, isFirstPage: Boolean): JSONObject? { + if (isFirstPage) { + rlm = db.resourceSyncStatusQueries.selectByResourceAndEventSlug( + resource = rlmName(), + event_slug = "__all__", + ).executeAsOneOrNull() + } + var is_continued_fetch = false + + var resUrl = url + rlm?.let { + // This resource has been fetched before. + if (it.status != null && it.status.startsWith("incomplete:")) { + // Continuing an interrupted fetch + + // Ordering is crucial here: Only because the server returns the orders in the + // order of creation we can be sure that we don't miss orders created in between our + // paginated requests. + + is_continued_fetch = true + try { + if (!resUrl.contains("created_since")) { + resUrl += "?ordering=datetime&created_since=" + URLEncoder.encode( + it.status.substring(11), "UTF-8" + ) + } + } catch (e: UnsupportedEncodingException) { + e.printStackTrace() + } + } else { + // Diff to last time + + // Ordering is crucial here: Only because the server returns the media in the + // order of modification we can be sure that we don't miss media created in between our + // paginated requests. If a medium were to be modified between our fetch of page 1 + // and 2 that originally wasn't part of the result set, we won't see it (as it will + // be inserted on page 1), but we'll see it the next time, and we will se some + // duplicates on page 2, but we don't care. The important part is that nothing gets + // lost "between the pages". If a medium of page 2 gets modified and moves to page + // one while we fetch page 2, again, we won't see it and we'll see some duplicates, + // but the next sync will fix it since we always fetch our diff compared to the time + // of the first page. + + try { + if (!resUrl.contains("updated_since")) { + resUrl += "?ordering=-updated&updated_since=" + URLEncoder.encode( + it.last_modified, + "UTF-8" + ) + } + } catch (e: UnsupportedEncodingException) { + e.printStackTrace() + } + } + } + + val apiResponse = api.fetchResource(resUrl) + if (isFirstPage && !is_continued_fetch) { + firstResponseTimestamp = apiResponse.response.header("X-Page-Generated") + } + val d = apiResponse.data + if (apiResponse.response.code == 200) { + try { + val res = d!!.getJSONArray("results") + if (res.length() > 0) { + lastMediumTimestamp = res.getJSONObject(res.length() - 1).getString("created") + } + } catch (e: JSONException) { + e.printStackTrace() + } + } + return d + } + +} diff --git a/libpretixsync/src/main/java/eu/pretix/libpretixsync/sync/RevokedTicketSecretSyncAdapter.java b/libpretixsync/src/main/java/eu/pretix/libpretixsync/sync/RevokedTicketSecretSyncAdapter.java deleted file mode 100644 index 359f0b7e..00000000 --- a/libpretixsync/src/main/java/eu/pretix/libpretixsync/sync/RevokedTicketSecretSyncAdapter.java +++ /dev/null @@ -1,176 +0,0 @@ -package eu.pretix.libpretixsync.sync; - -import org.json.JSONArray; -import org.json.JSONException; -import org.json.JSONObject; - -import java.io.UnsupportedEncodingException; -import java.net.URLEncoder; -import java.util.List; -import java.util.concurrent.ExecutionException; - -import eu.pretix.libpretixsync.api.ApiException; -import eu.pretix.libpretixsync.api.PretixApi; -import eu.pretix.libpretixsync.api.ResourceNotModified; -import eu.pretix.libpretixsync.db.ResourceSyncStatus; -import eu.pretix.libpretixsync.db.RevokedTicketSecret; -import io.requery.BlockingEntityStore; -import io.requery.Persistable; -import io.requery.query.Tuple; -import io.requery.util.CloseableIterator; - -public class RevokedTicketSecretSyncAdapter extends BaseDownloadSyncAdapter { - private String firstResponseTimestamp; - private ResourceSyncStatus rlm; - - public RevokedTicketSecretSyncAdapter(BlockingEntityStore store, FileStorage fileStorage, String eventSlug, PretixApi api, String syncCycleId, SyncManager.ProgressFeedback feedback) { - super(store, fileStorage, eventSlug, api, syncCycleId, feedback); - } - - @Override - public void download() throws JSONException, ApiException, ExecutionException, InterruptedException { - boolean completed = false; - try { - super.download(); - completed = true; - } finally { - ResourceSyncStatus resourceSyncStatus = store.select(ResourceSyncStatus.class) - .where(ResourceSyncStatus.RESOURCE.eq(getResourceName())) - .and(ResourceSyncStatus.EVENT_SLUG.eq(eventSlug)) - .limit(1) - .get().firstOrNull(); - - // We need to cache the response timestamp of the *first* page in the result set to make - // sure we don't miss anything between this and the next run. - // - // If the download failed, completed will be false. In case this was a full fetch - // (i.e. no timestamp was stored beforehand) we will still store the timestamp to be - // able to continue properly. - if (firstResponseTimestamp != null) { - if (resourceSyncStatus == null) { - resourceSyncStatus = new ResourceSyncStatus(); - resourceSyncStatus.setResource(getResourceName()); - resourceSyncStatus.setEvent_slug(eventSlug); - if (completed) { - resourceSyncStatus.setStatus("complete"); - resourceSyncStatus.setLast_modified(firstResponseTimestamp); - store.upsert(resourceSyncStatus); - } - } else { - if (completed) { - resourceSyncStatus.setLast_modified(firstResponseTimestamp); - store.upsert(resourceSyncStatus); - } - } - } else if (completed && resourceSyncStatus != null) { - resourceSyncStatus.setStatus("complete"); - store.update(resourceSyncStatus); - } - firstResponseTimestamp = null; - } - } - - protected boolean deleteUnseen() { - return rlm == null; - } - - @Override - CloseableIterator getKnownIDsIterator() { - return store.select(RevokedTicketSecret.SERVER_ID) - .where(RevokedTicketSecret.EVENT_SLUG.eq(eventSlug)) - .get().iterator(); - } - - @Override - public void updateObject(RevokedTicketSecret obj, JSONObject jsonobj) throws JSONException { - obj.setServer_id(jsonobj.getLong("id")); - obj.setCreated(jsonobj.getString("created")); - obj.setSecret(jsonobj.getString("secret")); - obj.setJson_data(jsonobj.toString()); - } - - @Override - protected String getUrl() { - return api.eventResourceUrl(eventSlug, getResourceName()); - } - - @Override - String getResourceName() { - return "revokedsecrets"; - } - - @Override - Long getId(JSONObject obj) throws JSONException { - return obj.getLong("id"); - } - - @Override - Long getId(RevokedTicketSecret obj) { - return obj.getServer_id(); - } - - @Override - RevokedTicketSecret newEmptyObject() { - return new RevokedTicketSecret(); - } - - @Override - public CloseableIterator runBatch(List parameterBatch) { - return store.select(RevokedTicketSecret.class) - .where(RevokedTicketSecret.SERVER_ID.in(parameterBatch)) - .and(RevokedTicketSecret.EVENT_SLUG.eq(eventSlug)) - .get().iterator(); - } - - @Override - protected JSONObject downloadPage(String url, boolean isFirstPage) throws ApiException, ResourceNotModified { - if (isFirstPage) { - rlm = store.select(ResourceSyncStatus.class) - .where(ResourceSyncStatus.RESOURCE.eq(getResourceName())) - .and(ResourceSyncStatus.EVENT_SLUG.eq(eventSlug)) - .limit(1) - .get().firstOrNull(); - } - - if (rlm != null) { - // This resource has been fetched before. - // Diff to last time - - // Ordering is crucial here: Only because the server returns the objects in the - // order of modification we can be sure that we don't miss orders created in between our - // paginated requests. If an object were to be modified between our fetch of page 1 - // and 2 that originally wasn't part of the result set, we won't see it (as it will - // be inserted on page 1), but we'll see it the next time, and we will see some - // duplicates on page 2, but we don't care. The important part is that nothing gets - // lost "between the pages". If an order of page 2 gets modified and moves to page - // one while we fetch page 2, again, we won't see it and we'll see some duplicates, - // but the next sync will fix it since we always fetch our diff compared to the time - // of the first page. - try { - if (!url.contains("created_since")) { - if (url.contains("?")) { - url += "&"; - } else { - url += "?"; - } - url += "ordering=-created&created_since=" + URLEncoder.encode(rlm.getLast_modified(), "UTF-8"); - } - } catch (UnsupportedEncodingException e) { - e.printStackTrace(); - } - } - - PretixApi.ApiResponse apiResponse = api.fetchResource(url); - if (isFirstPage) { - try { - JSONArray results = apiResponse.getData().getJSONArray("results"); - if (results.length() > 0) { - firstResponseTimestamp = results.getJSONObject(0).getString("created"); - } - } catch (JSONException | NullPointerException e) { - e.printStackTrace(); - } - } - return apiResponse.getData(); - } -} diff --git a/libpretixsync/src/main/java/eu/pretix/libpretixsync/sync/RevokedTicketSecretSyncAdapter.kt b/libpretixsync/src/main/java/eu/pretix/libpretixsync/sync/RevokedTicketSecretSyncAdapter.kt new file mode 100644 index 00000000..d3e692d9 --- /dev/null +++ b/libpretixsync/src/main/java/eu/pretix/libpretixsync/sync/RevokedTicketSecretSyncAdapter.kt @@ -0,0 +1,210 @@ +package eu.pretix.libpretixsync.sync + +import app.cash.sqldelight.TransactionWithoutReturn +import app.cash.sqldelight.db.QueryResult +import eu.pretix.libpretixsync.api.ApiException +import eu.pretix.libpretixsync.api.PretixApi +import eu.pretix.libpretixsync.api.ResourceNotModified +import eu.pretix.libpretixsync.sqldelight.ResourceSyncStatus +import eu.pretix.libpretixsync.sqldelight.RevokedTicketSecret +import eu.pretix.libpretixsync.sqldelight.SyncDatabase +import eu.pretix.libpretixsync.sync.SyncManager.ProgressFeedback +import org.json.JSONException +import org.json.JSONObject +import java.io.UnsupportedEncodingException +import java.net.URLEncoder +import java.util.concurrent.ExecutionException + +class RevokedTicketSecretSyncAdapter( + db: SyncDatabase, + fileStorage: FileStorage, + eventSlug: String, + api: PretixApi, + syncCycleId: String, + feedback: ProgressFeedback?, +) : BaseDownloadSyncAdapter( + db = db, + api = api, + syncCycleId = syncCycleId, + eventSlug = eventSlug, + fileStorage = fileStorage, + feedback = feedback, +) { + + private var firstResponseTimestamp: String? = null + private var rlm: ResourceSyncStatus? = null + + override fun getResourceName(): String = "revokedsecrets" + + override fun getUrl(): String = api.eventResourceUrl(eventSlug, getResourceName()) + + override fun getId(obj: RevokedTicketSecret): Long = obj.server_id!! + + override fun getId(obj: JSONObject): Long = obj.getLong("id") + + override fun getJSON(obj: RevokedTicketSecret): JSONObject = JSONObject(obj.json_data!!) + + override fun queryKnownIDs(): MutableSet { + val res = mutableSetOf() + db.revokedTicketSecretQueries.selectServerIdsByEventSlug(eventSlug).execute { cursor -> + while (cursor.next().value) { + val id = cursor.getLong(0) + ?: throw RuntimeException("server_id column not available") + + res.add(id) + } + QueryResult.Unit + } + + return res + } + + override fun insert(jsonobj: JSONObject) { + db.revokedTicketSecretQueries.insert( + created = jsonobj.getString("created"), + event_slug = eventSlug, + json_data = jsonobj.toString(), + secret = jsonobj.getString("secret"), + server_id = jsonobj.getLong("id"), + ) + } + + override fun update(obj: RevokedTicketSecret, jsonobj: JSONObject) { + db.revokedTicketSecretQueries.updateFromJson( + created = jsonobj.getString("created"), + event_slug = eventSlug, + json_data = jsonobj.toString(), + secret = jsonobj.getString("secret"), + id = obj.id, + ) + } + + override fun delete(key: Long) { + db.revokedTicketSecretQueries.deleteByServerId(key) + } + + override fun deleteUnseen(): Boolean { + return rlm == null + } + + override fun runInTransaction(body: TransactionWithoutReturn.() -> Unit) { + db.revokedTicketSecretQueries.transaction(false, body) + } + + override fun runBatch(parameterBatch: List): List = + db.revokedTicketSecretQueries.selectByServerIdListAndEventSlug( + server_id = parameterBatch, + event_slug = eventSlug, + ).executeAsList() + + @Throws( + JSONException::class, + ApiException::class, + ExecutionException::class, + InterruptedException::class + ) + override fun download() { + var completed = false + try { + super.download() + completed = true + } finally { + val resourceSyncStatus = db.resourceSyncStatusQueries.selectByResourceAndEventSlug( + resource = getResourceName(), + event_slug = eventSlug, + ).executeAsOneOrNull() + + // We need to cache the response timestamp of the *first* page in the result set to make + // sure we don't miss anything between this and the next run. + // + // If the download failed, completed will be false. In case this was a full fetch + // (i.e. no timestamp was stored beforehand) we will still store the timestamp to be + // able to continue properly. + if (firstResponseTimestamp != null) { + if (resourceSyncStatus == null) { + if (completed) { + db.resourceSyncStatusQueries.insert( + event_slug = eventSlug, + last_modified = firstResponseTimestamp, + meta = null, + resource = getResourceName(), + status = "complete" + ) + } + } else { + if (completed) { + db.resourceSyncStatusQueries.updateLastModified( + last_modified = firstResponseTimestamp, + id = resourceSyncStatus.id, + ) + } + } + } else if (completed && resourceSyncStatus != null) { + db.resourceSyncStatusQueries.updateStatus( + status = "complete", + id = resourceSyncStatus.id, + ) + } + firstResponseTimestamp = null + } + } + + @Throws(ApiException::class, ResourceNotModified::class) + override fun downloadPage(url: String, isFirstPage: Boolean): JSONObject? { + + if (isFirstPage) { + rlm = db.resourceSyncStatusQueries.selectByResourceAndEventSlug( + resource = getResourceName(), + event_slug = eventSlug, + ).executeAsOneOrNull() + } + + var resUrl = url + rlm?.let { + // This resource has been fetched before. + // Diff to last time + + // Ordering is crucial here: Only because the server returns the objects in the + // order of modification we can be sure that we don't miss orders created in between our + // paginated requests. If an object were to be modified between our fetch of page 1 + // and 2 that originally wasn't part of the result set, we won't see it (as it will + // be inserted on page 1), but we'll see it the next time, and we will see some + // duplicates on page 2, but we don't care. The important part is that nothing gets + // lost "between the pages". If an order of page 2 gets modified and moves to page + // one while we fetch page 2, again, we won't see it and we'll see some duplicates, + // but the next sync will fix it since we always fetch our diff compared to the time + // of the first page. + + try { + if (!resUrl.contains("created_since")) { + resUrl += if (resUrl.contains("?")) { + "&" + } else { + "?" + } + resUrl += "ordering=-created&created_since=" + URLEncoder.encode( + it.last_modified, + "UTF-8" + ) + } + } catch (e: UnsupportedEncodingException) { + e.printStackTrace() + } + } + + val apiResponse = api.fetchResource(resUrl) + if (isFirstPage) { + try { + val results = apiResponse.data!!.getJSONArray("results") + if (results.length() > 0) { + firstResponseTimestamp = results.getJSONObject(0).getString("created") + } + } catch (e: JSONException) { + e.printStackTrace() + } catch (e: NullPointerException) { + e.printStackTrace() + } + } + return apiResponse.data + } +} diff --git a/libpretixsync/src/main/java/eu/pretix/libpretixsync/sync/SettingsSyncAdapter.java b/libpretixsync/src/main/java/eu/pretix/libpretixsync/sync/SettingsSyncAdapter.java deleted file mode 100644 index 95dc2132..00000000 --- a/libpretixsync/src/main/java/eu/pretix/libpretixsync/sync/SettingsSyncAdapter.java +++ /dev/null @@ -1,65 +0,0 @@ -package eu.pretix.libpretixsync.sync; - -import org.json.JSONArray; -import org.json.JSONException; -import org.json.JSONObject; - -import java.util.ArrayList; -import java.util.List; - -import eu.pretix.libpretixsync.api.PretixApi; -import eu.pretix.libpretixsync.db.Settings; -import io.requery.BlockingEntityStore; -import io.requery.Persistable; - -public class SettingsSyncAdapter extends BaseSingleObjectSyncAdapter { - - public SettingsSyncAdapter(BlockingEntityStore store, String eventSlug, String key, PretixApi api, String syncCycleId, SyncManager.ProgressFeedback feedback) { - super(store, eventSlug, key, api, syncCycleId, feedback); - } - - @Override - Settings getKnownObject() { - List is = store.select(Settings.class) - .where(Settings.SLUG.eq(eventSlug)) - .get().toList(); - if (is.size() == 0) { - return null; - } else if (is.size() == 1) { - return is.get(0); - } else { - // What's going on here? Let's delete and re-fetch - store.delete(is); - return null; - } - } - - @Override - public void updateObject(Settings obj, JSONObject jsonobj) throws JSONException { - obj.setSlug(eventSlug); - obj.setName(jsonobj.optString("invoice_address_from_name")); - obj.setAddress(jsonobj.optString("invoice_address_from")); - obj.setZipcode(jsonobj.optString("invoice_address_from_zipcode")); - obj.setCity(jsonobj.optString("invoice_address_from_city")); - obj.setCountry(jsonobj.optString("invoice_address_from_country")); - obj.setTax_id(jsonobj.optString("invoice_address_from_tax_id")); - obj.setVat_id(jsonobj.optString("invoice_address_from_vat_id")); - obj.setPretixpos_additional_receipt_text(jsonobj.optString("pretixpos_additional_receipt_text")); - obj.setJson_data(jsonobj.toString()); - } - - @Override - protected String getUrl() { - return api.eventResourceUrl(eventSlug, "settings"); - } - - @Override - String getResourceName() { - return "settings"; - } - - @Override - Settings newEmptyObject() { - return new Settings(); - } -} diff --git a/libpretixsync/src/main/java/eu/pretix/libpretixsync/sync/SettingsSyncAdapter.kt b/libpretixsync/src/main/java/eu/pretix/libpretixsync/sync/SettingsSyncAdapter.kt new file mode 100644 index 00000000..0fb1ec0f --- /dev/null +++ b/libpretixsync/src/main/java/eu/pretix/libpretixsync/sync/SettingsSyncAdapter.kt @@ -0,0 +1,78 @@ +package eu.pretix.libpretixsync.sync + +import app.cash.sqldelight.TransactionWithoutReturn +import eu.pretix.libpretixsync.api.PretixApi +import eu.pretix.libpretixsync.sqldelight.Settings +import eu.pretix.libpretixsync.sqldelight.SyncDatabase +import eu.pretix.libpretixsync.sync.SyncManager.ProgressFeedback +import org.json.JSONObject + +open class SettingsSyncAdapter( + db: SyncDatabase, + eventSlug: String, + key: String, + api: PretixApi, + syncCycleId: String, + feedback: ProgressFeedback? = null, +) : BaseSingleObjectSyncAdapter( + db = db, + eventSlug = eventSlug, + key = key, + api = api, + syncCycleId = syncCycleId, + feedback = feedback, +) { + override fun getKnownObject(): Settings? { + val known = db.settingsQueries.selectBySlug(eventSlug).executeAsList() + + return if (known.isEmpty()) { + null + } else if (known.size == 1) { + known[0] + } else { + // What's going on here? Let's delete and re-fetch + db.settingsQueries.deleteByEventSlug(eventSlug) + null + } + } + + override fun getResourceName(): String = "settings" + + override fun getUrl(): String = api.eventResourceUrl(eventSlug, "settings") + + override fun getJSON(obj: Settings): JSONObject = JSONObject(obj.json_data!!) + + override fun insert(jsonobj: JSONObject) { + db.settingsQueries.insert( + slug = eventSlug, + address = jsonobj.optString("invoice_address_from"), + city = jsonobj.optString("invoice_address_from_city"), + country = jsonobj.optString("invoice_address_from_country"), + json_data = jsonobj.toString(), + name = jsonobj.optString("invoice_address_from_name"), + pretixpos_additional_receipt_text = jsonobj.optString("pretixpos_additional_receipt_text"), + tax_id = jsonobj.optString("invoice_address_from_tax_id"), + vat_id = jsonobj.optString("invoice_address_from_vat_id"), + zipcode = jsonobj.optString("invoice_address_from_zipcode") + ) + } + + override fun update(obj: Settings, jsonobj: JSONObject) { + db.settingsQueries.updateFromJson( + address = jsonobj.optString("invoice_address_from"), + city = jsonobj.optString("invoice_address_from_city"), + country = jsonobj.optString("invoice_address_from_country"), + json_data = jsonobj.toString(), + name = jsonobj.optString("invoice_address_from_name"), + pretixpos_additional_receipt_text = jsonobj.optString("pretixpos_additional_receipt_text"), + tax_id = jsonobj.optString("invoice_address_from_tax_id"), + vat_id = jsonobj.optString("invoice_address_from_vat_id"), + zipcode = jsonobj.optString("invoice_address_from_zipcode"), + slug = obj.slug, + ) + } + + override fun runInTransaction(body: TransactionWithoutReturn.() -> Unit) { + db.settingsQueries.transaction(false, body) + } +} diff --git a/libpretixsync/src/main/java/eu/pretix/libpretixsync/sync/SubEventSyncAdapter.java b/libpretixsync/src/main/java/eu/pretix/libpretixsync/sync/SubEventSyncAdapter.java deleted file mode 100644 index c809f9f1..00000000 --- a/libpretixsync/src/main/java/eu/pretix/libpretixsync/sync/SubEventSyncAdapter.java +++ /dev/null @@ -1,82 +0,0 @@ -package eu.pretix.libpretixsync.sync; - -import org.joda.time.format.ISODateTimeFormat; -import org.json.JSONException; -import org.json.JSONObject; - -import java.util.List; - -import eu.pretix.libpretixsync.api.PretixApi; -import eu.pretix.libpretixsync.db.Migrations; -import eu.pretix.libpretixsync.db.SubEvent; -import eu.pretix.libpretixsync.utils.JSONUtils; -import io.requery.BlockingEntityStore; -import io.requery.Persistable; - -public class SubEventSyncAdapter extends BaseSingleObjectSyncAdapter { - - public SubEventSyncAdapter(BlockingEntityStore store, String eventSlug, String key, PretixApi api, String syncCycleId, SyncManager.ProgressFeedback feedback) { - super(store, eventSlug, key, api, syncCycleId, feedback); - } - - @Override - public void updateObject(SubEvent obj, JSONObject jsonobj) throws JSONException { - obj.setServer_id(jsonobj.getLong("id")); - obj.setEvent_slug(eventSlug); - obj.setDate_from(ISODateTimeFormat.dateTimeParser().parseDateTime(jsonobj.getString("date_from")).toDate()); - if (!jsonobj.isNull("date_to")) { - obj.setDate_to(ISODateTimeFormat.dateTimeParser().parseDateTime(jsonobj.getString("date_to")).toDate()); - } - obj.setActive(jsonobj.getBoolean("active")); - obj.setJson_data(jsonobj.toString()); - } - - SubEvent getKnownObject() { - List is = store.select(SubEvent.class) - .where(SubEvent.SERVER_ID.eq(Long.valueOf(key))) - .get().toList(); - if (is.size() == 0) { - return null; - } else if (is.size() == 1) { - return is.get(0); - } else { - // What's going on here? Let's delete and re-fetch - store.delete(is); - return null; - } - } - - @Override - String getResourceName() { - return "subevents"; - } - - @Override - SubEvent newEmptyObject() { - return new SubEvent(); - } - - - public void standaloneRefreshFromJSON(JSONObject data) throws JSONException { - SubEvent obj = store.select(SubEvent.class) - .where(SubEvent.SERVER_ID.eq(data.getLong("id"))) - .get().firstOr(newEmptyObject()); - JSONObject old = null; - if (obj.getId() != null) { - old = obj.getJSON(); - } - - // Store object - data.put("__libpretixsync_dbversion", Migrations.CURRENT_VERSION); - data.put("__libpretixsync_syncCycleId", syncCycleId); - if (old == null) { - updateObject(obj, data); - store.insert(obj); - } else { - if (!JSONUtils.similar(data, old)) { - updateObject(obj, data); - store.update(obj); - } - } - } -} diff --git a/libpretixsync/src/main/java/eu/pretix/libpretixsync/sync/SubEventSyncAdapter.kt b/libpretixsync/src/main/java/eu/pretix/libpretixsync/sync/SubEventSyncAdapter.kt new file mode 100644 index 00000000..cc17435d --- /dev/null +++ b/libpretixsync/src/main/java/eu/pretix/libpretixsync/sync/SubEventSyncAdapter.kt @@ -0,0 +1,109 @@ +package eu.pretix.libpretixsync.sync + +import app.cash.sqldelight.TransactionWithoutReturn +import eu.pretix.libpretixsync.api.PretixApi +import eu.pretix.libpretixsync.db.Migrations +import eu.pretix.libpretixsync.sqldelight.SubEvent +import eu.pretix.libpretixsync.sqldelight.SyncDatabase +import eu.pretix.libpretixsync.sync.SyncManager.ProgressFeedback +import eu.pretix.libpretixsync.utils.JSONUtils +import org.joda.time.format.ISODateTimeFormat +import org.json.JSONException +import org.json.JSONObject + +class SubEventSyncAdapter( + db: SyncDatabase, + eventSlug: String, + key: String, + api: PretixApi, + syncCycleId: String, + feedback: ProgressFeedback? = null, +) : BaseSingleObjectSyncAdapter( + db = db, + eventSlug = eventSlug, + key = key, + api = api, + syncCycleId = syncCycleId, + feedback = feedback, +) { + + override fun getKnownObject(): SubEvent? { + val known = db.subEventQueries.selectByServerId(key.toLong()).executeAsList() + + return if (known.isEmpty()) { + null + } else if (known.size == 1) { + known[0] + } else { + // What's going on here? Let's delete and re-fetch + db.subEventQueries.deleteByServerId(key.toLong()) + null + } + } + + override fun insert(jsonobj: JSONObject) { + val dateFrom = + ISODateTimeFormat.dateTimeParser().parseDateTime(jsonobj.getString("date_from")) + .toDate() + + val dateTo = if (!jsonobj.isNull("date_to")) { + ISODateTimeFormat.dateTimeParser().parseDateTime(jsonobj.getString("date_to")).toDate() + } else { + null + } + + db.subEventQueries.insert( + active = jsonobj.getBoolean("active"), + date_from = dateFrom, + date_to = dateTo, + event_slug = eventSlug, + json_data = jsonobj.toString(), + server_id = jsonobj.getLong("id"), + ) + } + + override fun update(obj: SubEvent, jsonobj: JSONObject) { + val dateFrom = + ISODateTimeFormat.dateTimeParser().parseDateTime(jsonobj.getString("date_from")) + .toDate() + + val dateTo = if (!jsonobj.isNull("date_to")) { + ISODateTimeFormat.dateTimeParser().parseDateTime(jsonobj.getString("date_to")).toDate() + } else { + null + } + + db.subEventQueries.updateFromJson( + active = jsonobj.getBoolean("active"), + date_from = dateFrom, + date_to = dateTo, + event_slug = eventSlug, + json_data = jsonobj.toString(), + id = obj.id, + ) + } + + override fun getResourceName(): String = "subevents" + + override fun getJSON(obj: SubEvent): JSONObject = JSONObject(obj.json_data!!) + + override fun runInTransaction(body: TransactionWithoutReturn.() -> Unit) { + db.subEventQueries.transaction(false, body) + } + + @Throws(JSONException::class) + fun standaloneRefreshFromJSON(data: JSONObject) { + // Store object + data.put("__libpretixsync_dbversion", Migrations.CURRENT_VERSION) + data.put("__libpretixsync_syncCycleId", syncCycleId) + val known = getKnownObject() + if (known == null) { + insert(data) + } else { + val old = JSONObject(known.json_data!!) + if (!JSONUtils.similar(data, old)) { + update(known, data) + } + } + } +} diff --git a/libpretixsync/src/main/java/eu/pretix/libpretixsync/sync/SyncManager.java b/libpretixsync/src/main/java/eu/pretix/libpretixsync/sync/SyncManager.java index 1f660320..7e9fe3be 100644 --- a/libpretixsync/src/main/java/eu/pretix/libpretixsync/sync/SyncManager.java +++ b/libpretixsync/src/main/java/eu/pretix/libpretixsync/sync/SyncManager.java @@ -1,7 +1,14 @@ package eu.pretix.libpretixsync.sync; import eu.pretix.libpretixsync.api.*; -import eu.pretix.libpretixsync.db.ReusableMedium; +import eu.pretix.libpretixsync.models.Question; +import eu.pretix.libpretixsync.models.db.QuestionExtensionsKt; +import eu.pretix.libpretixsync.sqldelight.Closing; +import eu.pretix.libpretixsync.sqldelight.ClosingExtensionsKt; +import eu.pretix.libpretixsync.sqldelight.QueuedCall; +import eu.pretix.libpretixsync.sqldelight.QueuedCheckIn; +import eu.pretix.libpretixsync.sqldelight.QueuedOrder; +import eu.pretix.libpretixsync.sqldelight.SyncDatabase; import eu.pretix.libpretixsync.utils.JSONUtils; import io.requery.sql.StatementExecutionException; @@ -16,18 +23,9 @@ import eu.pretix.libpretixsync.SentryInterface; import eu.pretix.libpretixsync.config.ConfigStore; import eu.pretix.libpretixsync.db.Answer; -import eu.pretix.libpretixsync.db.CheckIn; -import eu.pretix.libpretixsync.db.Closing; -import eu.pretix.libpretixsync.db.Order; -import eu.pretix.libpretixsync.db.OrderPosition; -import eu.pretix.libpretixsync.db.Question; -import eu.pretix.libpretixsync.db.QueuedCall; -import eu.pretix.libpretixsync.db.QueuedCheckIn; -import eu.pretix.libpretixsync.db.QueuedOrder; import eu.pretix.libpretixsync.db.Receipt; import eu.pretix.libpretixsync.db.ReceiptLine; import eu.pretix.libpretixsync.db.ReceiptPayment; -import eu.pretix.libpretixsync.db.ResourceSyncStatus; import io.requery.BlockingEntityStore; import io.requery.Persistable; @@ -42,6 +40,7 @@ public enum Profile { protected long upload_interval; protected long download_interval; protected BlockingEntityStore dataStore; + protected SyncDatabase db; protected FileStorage fileStorage; protected Profile profile; protected boolean with_pdf_data; @@ -99,6 +98,7 @@ public SyncManager( PretixApi api, SentryInterface sentry, BlockingEntityStore dataStore, + SyncDatabase db, FileStorage fileStorage, long upload_interval, long download_interval, @@ -122,6 +122,7 @@ public SyncManager( this.upload_interval = upload_interval; this.download_interval = download_interval; this.dataStore = dataStore; + this.db = db; this.fileStorage = fileStorage; this.profile = profile; this.with_pdf_data = with_pdf_data; @@ -369,7 +370,7 @@ protected void fetchDeviceInfo() throws ApiException, JSONException, ResourceNot configStore.setDeviceKnownGateID(gateID); if (vdata.has("medium_key_sets")) { - MediumKeySetSyncAdapter mkssa = new MediumKeySetSyncAdapter(dataStore, fileStorage, api, configStore.getSyncCycleId(), null, vdata.getJSONArray("medium_key_sets")); + MediumKeySetSyncAdapter mkssa = new MediumKeySetSyncAdapter(db, fileStorage, api, configStore.getSyncCycleId(), null, vdata.getJSONArray("medium_key_sets")); mkssa.download(); } @@ -395,7 +396,7 @@ protected void downloadData(ProgressFeedback feedback, Boolean skip_orders, Stri if (profile == Profile.PRETIXPOS) { try { - download(new CashierSyncAdapter(dataStore, fileStorage, api, configStore.getSyncCycleId(), feedback)); + download(new CashierSyncAdapter(db, fileStorage, api, configStore.getSyncCycleId(), feedback)); } catch (NotFoundApiException e) { // ignore, this is only supported from a later pretixpos-backend version } @@ -404,7 +405,7 @@ protected void downloadData(ProgressFeedback feedback, Boolean skip_orders, Stri } } - download(new AllSubEventsSyncAdapter(dataStore, fileStorage, api, configStore.getSyncCycleId(), feedback)); + download(new AllSubEventsSyncAdapter(db, fileStorage, api, configStore.getSyncCycleId(), feedback)); List slugs; if (overrideEventSlug != null) { slugs = new ArrayList<>(); @@ -418,58 +419,58 @@ protected void downloadData(ProgressFeedback feedback, Boolean skip_orders, Stri subEvent = overrideSubeventId; } try { - download(new EventSyncAdapter(dataStore, eventSlug, eventSlug, api, configStore.getSyncCycleId(), feedback)); + download(new EventSyncAdapter(db, eventSlug, eventSlug, api, configStore.getSyncCycleId(), feedback)); } catch (PermissionDeniedApiException e) { e.eventSlug = eventSlug; throw e; } - download(new ItemCategorySyncAdapter(dataStore, fileStorage, eventSlug, api, configStore.getSyncCycleId(), feedback)); - download(new ItemSyncAdapter(dataStore, fileStorage, eventSlug, api, configStore.getSyncCycleId(), feedback)); - download(new QuestionSyncAdapter(dataStore, fileStorage, eventSlug, api, configStore.getSyncCycleId(), feedback)); + download(new ItemCategorySyncAdapter(db, fileStorage, eventSlug, api, configStore.getSyncCycleId(), feedback)); + download(new ItemSyncAdapter(db, fileStorage, eventSlug, api, configStore.getSyncCycleId(), feedback)); + download(new QuestionSyncAdapter(db, fileStorage, eventSlug, api, configStore.getSyncCycleId(), feedback)); if (profile == Profile.PRETIXPOS) { - download(new QuotaSyncAdapter(dataStore, fileStorage, eventSlug, api, configStore.getSyncCycleId(), feedback, subEvent)); - download(new TaxRuleSyncAdapter(dataStore, fileStorage, eventSlug, api, configStore.getSyncCycleId(), feedback)); - download(new TicketLayoutSyncAdapter(dataStore, fileStorage, eventSlug, api, configStore.getSyncCycleId(), salesChannel, feedback)); + download(new QuotaSyncAdapter(db, fileStorage, eventSlug, api, configStore.getSyncCycleId(), feedback, subEvent)); + download(new TaxRuleSyncAdapter(db, fileStorage, eventSlug, api, configStore.getSyncCycleId(), feedback)); + download(new TicketLayoutSyncAdapter(db, fileStorage, eventSlug, api, configStore.getSyncCycleId(), feedback, salesChannel)); } - download(new BadgeLayoutSyncAdapter(dataStore, fileStorage, eventSlug, api, configStore.getSyncCycleId(), feedback)); - download(new BadgeLayoutItemSyncAdapter(dataStore, fileStorage, eventSlug, api, configStore.getSyncCycleId(), feedback)); - download(new CheckInListSyncAdapter(dataStore, fileStorage, eventSlug, api, configStore.getSyncCycleId(), feedback, subEvent)); + download(new BadgeLayoutSyncAdapter(db, fileStorage, eventSlug, api, configStore.getSyncCycleId(), feedback)); + download(new BadgeLayoutItemSyncAdapter(db, fileStorage, eventSlug, api, configStore.getSyncCycleId(), feedback)); + download(new CheckInListSyncAdapter(db, fileStorage, eventSlug, api, configStore.getSyncCycleId(), feedback, subEvent)); if (profile == Profile.PRETIXSCAN || profile == Profile.PRETIXSCAN_ONLINE) { // We don't need these on pretixPOS, so we can save some traffic try { - download(new RevokedTicketSecretSyncAdapter(dataStore, fileStorage, eventSlug, api, configStore.getSyncCycleId(), feedback)); + download(new RevokedTicketSecretSyncAdapter(db, fileStorage, eventSlug, api, configStore.getSyncCycleId(), feedback)); } catch (NotFoundApiException e) { // ignore, this is only supported from pretix 3.12. } try { - download(new BlockedTicketSecretSyncAdapter(dataStore, fileStorage, eventSlug, api, configStore.getSyncCycleId(), feedback)); + download(new BlockedTicketSecretSyncAdapter(db, fileStorage, eventSlug, api, configStore.getSyncCycleId(), feedback)); } catch (NotFoundApiException e) { // ignore, this is only supported from pretix 4.17. } } if (profile == Profile.PRETIXSCAN && !skip_orders) { - OrderSyncAdapter osa = new OrderSyncAdapter(dataStore, fileStorage, eventSlug, subEvent, with_pdf_data, false, api, configStore.getSyncCycleId(), feedback); + OrderSyncAdapter osa = new OrderSyncAdapter(db, fileStorage, eventSlug, subEvent, with_pdf_data, false, api, configStore.getSyncCycleId(), feedback); download(osa); try { - download(new ReusableMediaSyncAdapter(dataStore, fileStorage, eventSlug, api, configStore.getSyncCycleId(), feedback)); + download(new ReusableMediaSyncAdapter(db, fileStorage, api, configStore.getSyncCycleId(), feedback)); } catch (NotFoundApiException e) { // ignore, this is only supported from pretix 4.19. } } try { - download(new SettingsSyncAdapter(dataStore, eventSlug, eventSlug, api, configStore.getSyncCycleId(), feedback)); + download(new SettingsSyncAdapter(db, eventSlug, eventSlug, api, configStore.getSyncCycleId(), feedback)); } catch (ApiException e) { // Older pretix installations // We don't need these on pretixSCAN, so we can save some traffic if (profile == Profile.PRETIXPOS) { - download(new InvoiceSettingsSyncAdapter(dataStore, eventSlug, eventSlug, api, configStore.getSyncCycleId(), feedback)); + download(new InvoiceSettingsSyncAdapter(db, eventSlug, eventSlug, api, configStore.getSyncCycleId(), feedback)); } } } if (profile == Profile.PRETIXSCAN && !skip_orders && overrideEventSlug == null) { - OrderCleanup oc = new OrderCleanup(dataStore, fileStorage, api, configStore.getSyncCycleId(), feedback); + OrderCleanup oc = new OrderCleanup(db, fileStorage, api, configStore.getSyncCycleId(), feedback); if ((System.currentTimeMillis() - configStore.getLastCleanup()) > 3600 * 1000 * 12) { for (String eventSlug : configStore.getSynchronizedEvents()) { oc.deleteOldSubevents(eventSlug, overrideSubeventId > 0L ? overrideSubeventId : configStore.getSelectedSubeventForEvent(eventSlug)); @@ -479,12 +480,12 @@ protected void downloadData(ProgressFeedback feedback, Boolean skip_orders, Stri configStore.setLastCleanup(System.currentTimeMillis()); } } else if (profile == Profile.PRETIXSCAN_ONLINE && overrideEventSlug == null) { - dataStore.delete(CheckIn.class).get().value(); - dataStore.delete(OrderPosition.class).get().value(); - dataStore.delete(Order.class).get().value(); - dataStore.delete(ResourceSyncStatus.class).where(ResourceSyncStatus.RESOURCE.like("order%")).get().value(); + db.getCompatQueries().truncateCheckIn(); + db.getCompatQueries().truncateOrderPosition(); + db.getCompatQueries().truncateOrder(); + db.getResourceSyncStatusQueries().deleteByResourceFilter("order%"); if ((System.currentTimeMillis() - configStore.getLastCleanup()) > 3600 * 1000 * 12) { - OrderCleanup oc = new OrderCleanup(dataStore, fileStorage, api, configStore.getSyncCycleId(), feedback); + OrderCleanup oc = new OrderCleanup(db, fileStorage, api, configStore.getSyncCycleId(), feedback); oc.deleteOldPdfImages(); configStore.setLastCleanup(System.currentTimeMillis()); } @@ -492,12 +493,11 @@ protected void downloadData(ProgressFeedback feedback, Boolean skip_orders, Stri } catch (DeviceAccessRevokedException e) { - int deleted = 0; - deleted += dataStore.delete(CheckIn.class).get().value(); - deleted += dataStore.delete(OrderPosition.class).get().value(); - deleted += dataStore.delete(Order.class).get().value(); - deleted += dataStore.delete(ReusableMedium.class).get().value(); - deleted += dataStore.delete(ResourceSyncStatus.class).get().value(); + db.getCompatQueries().truncateCheckIn(); + db.getCompatQueries().truncateOrderPosition(); + db.getCompatQueries().truncateOrder(); + db.getCompatQueries().truncateReusableMedium(); + db.getCompatQueries().truncateResourceSyncStatus(); throw new SyncException(e.getMessage()); } catch (JSONException e) { e.printStackTrace(); @@ -517,8 +517,9 @@ protected void downloadData(ProgressFeedback feedback, Boolean skip_orders, Stri protected void uploadQueuedCalls(ProgressFeedback feedback) throws SyncException { sentry.addBreadcrumb("sync.queue", "Start queuedcall upload"); - List calls = dataStore.select(QueuedCall.class) - .get().toList(); + + + List calls = db.getQueuedCallQueries().selectAll().executeAsList(); String url = ""; int i = 0; for (QueuedCall call : calls) { @@ -527,14 +528,14 @@ protected void uploadQueuedCalls(ProgressFeedback feedback) throws SyncException feedback.postFeedback("Uploading queued calls (" + i + "/" + calls.size() + ") …"); } i++; - url = call.url; + url = call.getUrl(); PretixApi.ApiResponse response = api.postResource( - call.url, - new JSONObject(call.body), - call.idempotency_key + call.getUrl(), + new JSONObject(call.getBody()), + call.getIdempotency_key() ); if (response.getResponse().code() < 500) { - dataStore.delete(call); + db.getQueuedCallQueries().delete(call.getId()); if (response.getResponse().code() >= 400) { sentry.captureException(new ApiException("Received response (" + response.getResponse().code() + ") for queued call: " + response.getData().toString())); // We ignore 400s, because we can't do something about them @@ -548,7 +549,7 @@ protected void uploadQueuedCalls(ProgressFeedback feedback) throws SyncException } catch (NotFoundApiException e) { if (url.contains("/failed_checkins/") || url.contains("/printlog/")) { // ignore this one: old pretix systems don't have it - dataStore.delete(call); + db.getQueuedCallQueries().delete(call.getId()); } else { sentry.addBreadcrumb("sync.queue", "API Error: " + e.getMessage()); throw new SyncException(e.getMessage()); @@ -577,10 +578,13 @@ protected void uploadReceipts(ProgressFeedback feedback) throws SyncException { feedback.postFeedback("Uploading receipts (" + i + "/" + receipts.size() + ") …"); } i++; + + JSONObject data = receipt.toJSON(); JSONArray lines = new JSONArray(); JSONArray payments = new JSONArray(); for (ReceiptLine line : receipt.getLines()) { + // TODO: Manually add addon_to.positionid when switching to SQLDelight lines.put(line.toJSON()); } for (ReceiptPayment payment : receipt.getPayments()) { @@ -613,10 +617,7 @@ protected void uploadReceipts(ProgressFeedback feedback) throws SyncException { protected void uploadOrders(ProgressFeedback feedback) throws SyncException { sentry.addBreadcrumb("sync.queue", "Start order upload"); - List orders = dataStore.select(QueuedOrder.class) - .where(QueuedOrder.ERROR.isNull()) - .and(QueuedOrder.LOCKED.eq(false)) - .get().toList(); + List orders = db.getQueuedOrderQueries().selectUnlockedWithoutError().executeAsList(); try { int i = 0; @@ -626,8 +627,7 @@ protected void uploadOrders(ProgressFeedback feedback) throws SyncException { } i++; - qo.setLocked(true); - dataStore.update(qo, QueuedOrder.LOCKED); + db.getCompatQueries().lockQueuedOrder(qo.getId()); Long startedAt = System.currentTimeMillis(); PretixApi.ApiResponse resp = api.postResource( api.eventResourceUrl(qo.getEvent_slug(), "orders") + "?pdf_data=true&force=true", @@ -635,18 +635,15 @@ protected void uploadOrders(ProgressFeedback feedback) throws SyncException { qo.getIdempotency_key() ); if (resp.getResponse().code() == 201) { - Receipt r = qo.getReceipt(); - r.setOrder_code(resp.getData().getString("code")); - dataStore.update(r, Receipt.ORDER_CODE); - dataStore.delete(qo); - (new OrderSyncAdapter(dataStore, fileStorage, qo.getEvent_slug(), null, true, true, api, configStore.getSyncCycleId(), null)).standaloneRefreshFromJSON(resp.getData()); + db.getReceiptQueries().updateOrderCode(resp.getData().getString("code"), qo.getReceipt()); + db.getQueuedOrderQueries().delete(qo.getId()); + (new OrderSyncAdapter(db, fileStorage, qo.getEvent_slug(), null, true, true, api, configStore.getSyncCycleId(), null)).standaloneRefreshFromJSON(resp.getData()); if (connectivityFeedback != null) { connectivityFeedback.recordSuccess(System.currentTimeMillis() - startedAt); } } else if (resp.getResponse().code() == 400) { // TODO: User feedback or log in some way? - qo.setError(resp.getData().toString()); - dataStore.update(qo); + db.getQueuedOrderQueries().updateError(resp.getData().toString(), qo.getId()); } } } catch (JSONException e) { @@ -669,10 +666,7 @@ protected void uploadOrders(ProgressFeedback feedback) throws SyncException { protected void uploadClosings(ProgressFeedback feedback) throws SyncException { sentry.addBreadcrumb("sync.queue", "Start closings upload"); - List closings = dataStore.select(Closing.class) - .where(Closing.OPEN.eq(false)) - .and(Closing.SERVER_ID.isNull()) - .get().toList(); + List closings = db.getClosingQueries().selectClosedWithoutServerId().executeAsList(); try { int i = 0; @@ -683,11 +677,10 @@ protected void uploadClosings(ProgressFeedback feedback) throws SyncException { i++; PretixApi.ApiResponse response = api.postResource( api.organizerResourceUrl("posdevices/" + configStore.getPosId() + "/closings"), - closing.toJSON() + ClosingExtensionsKt.toJSON(closing) ); if (response.getResponse().code() == 201) { - closing.setServer_id(response.getData().getLong("closing_id")); - dataStore.update(closing); + db.getClosingQueries().updateServerId(response.getData().getLong("closing_id"), closing.getId()); } else { throw new SyncException(response.getData().toString()); } @@ -706,8 +699,7 @@ protected void uploadClosings(ProgressFeedback feedback) throws SyncException { protected void uploadCheckins(ProgressFeedback feedback) throws SyncException { sentry.addBreadcrumb("sync.queue", "Start check-in upload"); - List queued = dataStore.select(QueuedCheckIn.class) - .get().toList(); + List queued = db.getQueuedCheckInQueries().selectAll().executeAsList(); try { int i = 0; @@ -721,8 +713,18 @@ protected void uploadCheckins(ProgressFeedback feedback) throws SyncException { JSONArray ja = new JSONArray(qci.getAnswers()); for (int j = 0; j < ja.length(); j++) { JSONObject jo = ja.getJSONObject(j); - Question q = new Question(); - q.setServer_id(jo.getLong("question")); + + Question q = QuestionExtensionsKt.toModel( + new eu.pretix.libpretixsync.sqldelight.Question( + -1L, + null, + "{}", + -1L, + false, + jo.getLong("question") + ) + ); + answers.add(new Answer(q, jo.getString("answer"), null)); } } catch (JSONException e) { @@ -736,9 +738,9 @@ protected void uploadCheckins(ProgressFeedback feedback) throws SyncException { } if (qci.getDatetime_string() == null || qci.getDatetime_string().equals("")) { // Backwards compatibility - ar = api.redeem(qci.getEvent_slug(), qci.getSecret(), qci.getDatetime(), true, qci.getNonce(), answers, qci.checkinListId, false, false, qci.getType(), st, null, false); + ar = api.redeem(qci.getEvent_slug(), qci.getSecret(), qci.getDatetime(), true, qci.getNonce(), answers, qci.getCheckinListId(), false, false, qci.getType(), st, null, false); } else { - ar = api.redeem(qci.getEvent_slug(), qci.getSecret(), qci.getDatetime_string(), true, qci.getNonce(), answers, qci.checkinListId, false, false, qci.getType(), st, null, false); + ar = api.redeem(qci.getEvent_slug(), qci.getSecret(), qci.getDatetime_string(), true, qci.getNonce(), answers, qci.getCheckinListId(), false, false, qci.getType(), st, null, false); } if (connectivityFeedback != null) { connectivityFeedback.recordSuccess(System.currentTimeMillis() - startedAt); @@ -746,12 +748,12 @@ protected void uploadCheckins(ProgressFeedback feedback) throws SyncException { JSONObject response = ar.getData(); String status = response.optString("status", null); if ("ok".equals(status)) { - dataStore.delete(qci); + db.getQueuedCheckInQueries().delete(qci.getId()); } else if (ar.getResponse().code() == 404 || ar.getResponse().code() == 400) { // There's no point in re-trying a 404 or 400 since it won't change on later uploads. // Modern pretix versions already log this case and handle it if possible, nothing // we can do here. - dataStore.delete(qci); + db.getQueuedCheckInQueries().delete(qci.getId()); } // Else: will be retried later } } catch (JSONException e) { diff --git a/libpretixsync/src/main/java/eu/pretix/libpretixsync/sync/TaxRuleSyncAdapter.java b/libpretixsync/src/main/java/eu/pretix/libpretixsync/sync/TaxRuleSyncAdapter.java deleted file mode 100644 index ec4098d9..00000000 --- a/libpretixsync/src/main/java/eu/pretix/libpretixsync/sync/TaxRuleSyncAdapter.java +++ /dev/null @@ -1,63 +0,0 @@ -package eu.pretix.libpretixsync.sync; - -import org.json.JSONException; -import org.json.JSONObject; - -import java.util.Iterator; -import java.util.List; - -import eu.pretix.libpretixsync.api.PretixApi; -import eu.pretix.libpretixsync.db.TaxRule; -import io.requery.BlockingEntityStore; -import io.requery.Persistable; -import io.requery.query.Tuple; -import io.requery.util.CloseableIterator; - -public class TaxRuleSyncAdapter extends BaseConditionalSyncAdapter { - - public TaxRuleSyncAdapter(BlockingEntityStore store, FileStorage fileStorage, String eventSlug, PretixApi api, String syncCycleId, SyncManager.ProgressFeedback feedback) { - super(store, fileStorage, eventSlug, api, syncCycleId, feedback); - } - - @Override - public void updateObject(TaxRule obj, JSONObject jsonobj) throws JSONException { - obj.setEvent_slug(eventSlug); - obj.setServer_id(jsonobj.getLong("id")); - obj.setJson_data(jsonobj.toString()); - } - - @Override - public CloseableIterator runBatch(List ids) { - return store.select(TaxRule.class) - .where(TaxRule.EVENT_SLUG.eq(eventSlug)) - .and(TaxRule.SERVER_ID.in(ids)) - .get().iterator(); - } - - @Override - CloseableIterator getKnownIDsIterator() { - return store.select(TaxRule.SERVER_ID) - .where(TaxRule.EVENT_SLUG.eq(eventSlug)) - .get().iterator(); - } - - @Override - String getResourceName() { - return "taxrules"; - } - - @Override - Long getId(JSONObject obj) throws JSONException { - return obj.getLong("id"); - } - - @Override - Long getId(TaxRule obj) { - return obj.getServer_id(); - } - - @Override - TaxRule newEmptyObject() { - return new TaxRule(); - } -} diff --git a/libpretixsync/src/main/java/eu/pretix/libpretixsync/sync/TaxRuleSyncAdapter.kt b/libpretixsync/src/main/java/eu/pretix/libpretixsync/sync/TaxRuleSyncAdapter.kt new file mode 100644 index 00000000..f0293ae5 --- /dev/null +++ b/libpretixsync/src/main/java/eu/pretix/libpretixsync/sync/TaxRuleSyncAdapter.kt @@ -0,0 +1,80 @@ +package eu.pretix.libpretixsync.sync + +import app.cash.sqldelight.TransactionWithoutReturn +import app.cash.sqldelight.db.QueryResult +import eu.pretix.libpretixsync.api.PretixApi +import eu.pretix.libpretixsync.sqldelight.SyncDatabase +import eu.pretix.libpretixsync.sqldelight.TaxRule +import eu.pretix.libpretixsync.sync.SyncManager.ProgressFeedback +import org.json.JSONObject + +class TaxRuleSyncAdapter( + db: SyncDatabase, + fileStorage: FileStorage, + eventSlug: String, + api: PretixApi, + syncCycleId: String, + feedback: ProgressFeedback?, +) : BaseConditionalSyncAdapter( + db = db, + fileStorage = fileStorage, + eventSlug = eventSlug, + api = api, + syncCycleId = syncCycleId, + feedback = feedback, +) { + + override fun getResourceName(): String = "taxrules" + + override fun getId(obj: TaxRule): Long = obj.server_id!! + + override fun getId(obj: JSONObject): Long = obj.getLong("id") + + override fun getJSON(obj: TaxRule): JSONObject = JSONObject(obj.json_data!!) + + override fun queryKnownIDs(): MutableSet { + val res = mutableSetOf() + db.taxRuleQueries.selectServerIdsByEventSlug(eventSlug).execute { cursor -> + while (cursor.next().value) { + val id = cursor.getLong(0) + ?: throw RuntimeException("server_id column not available") + + res.add(id) + } + QueryResult.Unit + } + + return res + } + + override fun insert(jsonobj: JSONObject) { + db.taxRuleQueries.insert( + event_slug = eventSlug, + json_data = jsonobj.toString(), + server_id = jsonobj.getLong("id"), + ) + } + + override fun update(obj: TaxRule, jsonobj: JSONObject) { + db.taxRuleQueries.updateFromJson( + event_slug = eventSlug, + json_data = jsonobj.toString(), + id = obj.id, + ) + } + + override fun delete(key: Long) { + db.taxRuleQueries.deleteByServerId(key) + } + + override fun runInTransaction(body: TransactionWithoutReturn.() -> Unit) { + db.taxRuleQueries.transaction(false, body) + } + + override fun runBatch(parameterBatch: List): List = + db.taxRuleQueries.selectByServerIdListAndEventSlug( + server_id = parameterBatch, + event_slug = eventSlug, + ).executeAsList() + +} diff --git a/libpretixsync/src/main/java/eu/pretix/libpretixsync/sync/TicketLayoutSyncAdapter.java b/libpretixsync/src/main/java/eu/pretix/libpretixsync/sync/TicketLayoutSyncAdapter.java deleted file mode 100644 index 4eaafd23..00000000 --- a/libpretixsync/src/main/java/eu/pretix/libpretixsync/sync/TicketLayoutSyncAdapter.java +++ /dev/null @@ -1,208 +0,0 @@ -package eu.pretix.libpretixsync.sync; - -import org.json.JSONArray; -import org.json.JSONException; -import org.json.JSONObject; - -import java.io.IOException; -import java.io.InputStream; -import java.io.OutputStream; -import java.util.ArrayList; -import java.util.List; - -import eu.pretix.libpretixsync.api.ApiException; -import eu.pretix.libpretixsync.api.PretixApi; -import eu.pretix.libpretixsync.db.Item; -import eu.pretix.libpretixsync.db.TicketLayout; -import eu.pretix.libpretixsync.utils.HashUtils; -import io.requery.BlockingEntityStore; -import io.requery.Persistable; -import io.requery.query.Tuple; -import io.requery.util.CloseableIterator; - -public class TicketLayoutSyncAdapter extends BaseDownloadSyncAdapter { - String salesChannel = "pretixpos"; - - public TicketLayoutSyncAdapter(BlockingEntityStore store, FileStorage fileStorage, String eventSlug, PretixApi api, String syncCycleId, String salesChannel, SyncManager.ProgressFeedback feedback) { - super(store, fileStorage, eventSlug, api, syncCycleId, feedback); - this.salesChannel = salesChannel; - } - - @Override - public void updateObject(TicketLayout obj, JSONObject jsonobj) throws JSONException { - obj.setEvent_slug(eventSlug); - obj.setIs_default(jsonobj.getBoolean("default")); - obj.setServer_id(jsonobj.getLong("id")); - obj.setJson_data(jsonobj.toString()); - - // itemids will be a list of all item IDs where we *could* assign this to through either - // channel - List itemids_web = new ArrayList<>(); - List itemids_pretixpos = new ArrayList<>(); - - // Iterate over all items this layout is assigned to - JSONArray assignmentarr = jsonobj.getJSONArray("item_assignments"); - for (int i = 0; i < assignmentarr.length(); i++) { - Long item = assignmentarr.getJSONObject(i).getLong("item"); - String sc = assignmentarr.getJSONObject(i).optString("sales_channel", "web"); - if (sc == null) { - sc = "web"; - } - - if (sc.equals("web")) { - itemids_web.add(item); - - Item itemobj = store.select(Item.class).where( - Item.SERVER_ID.eq(item) - ).get().firstOrNull(); - if (itemobj != null) { - itemobj.setTicket_layout_id(obj.getServer_id()); - store.update(itemobj, Item.TICKET_LAYOUT_ID); - } - } else if (sc.equals(salesChannel)) { - itemids_pretixpos.add(item); - - Item itemobj = store.select(Item.class).where( - Item.SERVER_ID.eq(item) - ).get().firstOrNull(); - if (itemobj != null) { - itemobj.setTicket_layout_pretixpos_id(obj.getServer_id()); - store.update(itemobj, Item.TICKET_LAYOUT_PRETIXPOS_ID); - } - } - } - - List items_to_remove_web; - if (!itemids_web.isEmpty()) { - // Look if there are any items in the local database assigned to this layout even though - // they should not be any more. - items_to_remove_web = store.select(Item.class).where( - Item.SERVER_ID.notIn(itemids_web).and( - Item.TICKET_LAYOUT_ID.eq(obj.getServer_id()) - ) - ).get().toList(); - } else { - // Look if there are any items in the local database assigned to this layout even though - // they should not be any more. - items_to_remove_web = store.select(Item.class).where( - Item.TICKET_LAYOUT_ID.eq(obj.getServer_id()) - ).get().toList(); - } - for (Item item : items_to_remove_web) { - item.setTicket_layout_id(null); - store.update(item, Item.TICKET_LAYOUT_ID); - } - - List items_to_remove_pretixpos; - if (!itemids_pretixpos.isEmpty()) { - // Look if there are any items in the local database assigned to this layout even though - // they should not be any more. - items_to_remove_pretixpos = store.select(Item.class).where( - Item.SERVER_ID.notIn(itemids_pretixpos).and( - Item.TICKET_LAYOUT_PRETIXPOS_ID.eq(obj.getServer_id()) - ) - ).get().toList(); - } else { - // Look if there are any items in the local database assigned to this layout even though - // they should not be any more. - items_to_remove_pretixpos = store.select(Item.class).where( - Item.TICKET_LAYOUT_PRETIXPOS_ID.eq(obj.getServer_id()) - ).get().toList(); - } - for (Item item : items_to_remove_pretixpos) { - item.setTicket_layout_pretixpos_id(null); - store.update(item, Item.TICKET_LAYOUT_PRETIXPOS_ID); - } - - String remote_filename = jsonobj.optString("background"); - if (remote_filename != null && remote_filename.startsWith("http")) { - String hash = HashUtils.toSHA1(remote_filename.getBytes()); - String local_filename = "ticketlayout_" + obj.getServer_id() + "_" + hash + ".pdf"; - if (obj.getBackground_filename() != null && !obj.getBackground_filename().equals(local_filename)) { - fileStorage.delete(obj.getBackground_filename()); - obj.setBackground_filename(null); - } - if (!fileStorage.contains(local_filename)) { - try { - PretixApi.ApiResponse file = api.downloadFile(remote_filename); - OutputStream os = fileStorage.writeStream(local_filename); - InputStream is = file.getResponse().body().byteStream(); - byte[] buffer = new byte[1444]; - int byteread; - while ((byteread = is.read(buffer)) != -1) { - os.write(buffer, 0, byteread); - } - is.close(); - os.close(); - obj.setBackground_filename(local_filename); - } catch (ApiException e) { - // TODO: What to do? - e.printStackTrace(); - } catch (IOException e) { - // TODO: What to do? - e.printStackTrace(); - fileStorage.delete(local_filename); - } - } else { - obj.setBackground_filename(local_filename); - } - } else { - if (obj.getBackground_filename() != null) { - fileStorage.delete(obj.getBackground_filename()); - obj.setBackground_filename(null); - } - } - } - - @Override - protected void prepareDelete(TicketLayout obj) { - super.prepareDelete(obj); - if (obj.getBackground_filename() != null) { - fileStorage.delete(obj.getBackground_filename()); - } - } - - @Override - public CloseableIterator runBatch(List ids) { - return store.select(TicketLayout.class) - .where(TicketLayout.EVENT_SLUG.eq(eventSlug)) - .and(TicketLayout.SERVER_ID.in(ids)) - .get().iterator(); - } - - @Override - CloseableIterator getKnownIDsIterator() { - return store.select(TicketLayout.SERVER_ID) - .where(TicketLayout.EVENT_SLUG.eq(eventSlug)) - .get().iterator(); - } - - @Override - String getResourceName() { - return "ticketlayouts"; - } - - protected JSONObject preprocessObject(JSONObject obj) { - try { - obj.put("_written_after_20200123", true); // Trigger full resyncronisation after a bugfix - } catch (JSONException e) { - e.printStackTrace(); - } - return obj; - } - - @Override - Long getId(JSONObject obj) throws JSONException { - return obj.getLong("id"); - } - - @Override - Long getId(TicketLayout obj) { - return obj.getServer_id(); - } - - @Override - TicketLayout newEmptyObject() { - return new TicketLayout(); - } -} diff --git a/libpretixsync/src/main/java/eu/pretix/libpretixsync/sync/TicketLayoutSyncAdapter.kt b/libpretixsync/src/main/java/eu/pretix/libpretixsync/sync/TicketLayoutSyncAdapter.kt new file mode 100644 index 00000000..0125edd8 --- /dev/null +++ b/libpretixsync/src/main/java/eu/pretix/libpretixsync/sync/TicketLayoutSyncAdapter.kt @@ -0,0 +1,234 @@ +package eu.pretix.libpretixsync.sync + +import app.cash.sqldelight.TransactionWithoutReturn +import app.cash.sqldelight.db.QueryResult +import eu.pretix.libpretixsync.api.ApiException +import eu.pretix.libpretixsync.api.PretixApi +import eu.pretix.libpretixsync.sqldelight.Item +import eu.pretix.libpretixsync.sqldelight.SyncDatabase +import eu.pretix.libpretixsync.sqldelight.TicketLayout +import eu.pretix.libpretixsync.sync.SyncManager.ProgressFeedback +import eu.pretix.libpretixsync.utils.HashUtils +import org.json.JSONArray +import org.json.JSONObject +import java.io.IOException + +class TicketLayoutSyncAdapter( + db: SyncDatabase, + fileStorage: FileStorage, + eventSlug: String, + api: PretixApi, + syncCycleId: String, + feedback: ProgressFeedback?, + private val salesChannel: String = "pretixpos", +) : BaseDownloadSyncAdapter( + db = db, + api = api, + syncCycleId = syncCycleId, + eventSlug = eventSlug, + fileStorage = fileStorage, + feedback = feedback, +) { + override fun getResourceName(): String = "ticketlayouts" + + override fun getJSON(obj: TicketLayout): JSONObject = JSONObject(obj.json_data!!) + + override fun getId(obj: JSONObject): Long = obj.getLong("id") + + override fun getId(obj: TicketLayout): Long = obj.server_id!! + + override fun queryKnownIDs(): MutableSet { + val res = mutableSetOf() + db.ticketLayoutQueries.selectServerIdsByEventSlug(eventSlug).execute { cursor -> + while (cursor.next().value) { + val id = cursor.getLong(0) + ?: throw RuntimeException("server_id column not available") + + res.add(id) + } + QueryResult.Unit + } + + return res + } + + override fun insert(jsonobj: JSONObject) { + val serverId = jsonobj.getLong("id") + + // Iterate over all items this layout is assigned to + processItems(serverId, jsonobj.getJSONArray("item_assignments")) + + val backgroundFilename = processBackground(jsonobj, null) + + db.ticketLayoutQueries.insert( + background_filename = backgroundFilename, + event_slug = eventSlug, + is_default = jsonobj.getBoolean("default"), + json_data = jsonobj.toString(), + server_id = serverId, + ) + } + + override fun update(obj: TicketLayout, jsonobj: JSONObject) { + val serverId = jsonobj.getLong("id") + + // Iterate over all items this layout is assigned to + processItems(serverId, jsonobj.getJSONArray("item_assignments")) + + val backgroundFilename = processBackground(jsonobj, null) + + db.ticketLayoutQueries.updateFromJson( + background_filename = backgroundFilename, + event_slug = eventSlug, + is_default = jsonobj.getBoolean("default"), + json_data = jsonobj.toString(), + id = obj.id, + ) + } + + private fun processItems(serverId: Long, assignmentarr: JSONArray) { + // itemids will be a list of all item IDs where we *could* assign this to through either + // channel + val itemids_web: MutableList = ArrayList() + val itemids_pretixpos: MutableList = ArrayList() + + for (i in 0 until assignmentarr.length()) { + val item = assignmentarr.getJSONObject(i).getLong("item") + var sc = assignmentarr.getJSONObject(i).optString("sales_channel", "web") + if (sc == null) { + sc = "web" + } + + if (sc == "web") { + itemids_web.add(item) + + val itemobj = db.itemQueries.selectByServerId(item).executeAsOneOrNull() + if (itemobj != null) { + db.itemQueries.updateTicketLayoutId( + ticket_layout_id = serverId, + id = itemobj.id + ) + } + } else if (sc == salesChannel) { + itemids_pretixpos.add(item) + + val itemobj = db.itemQueries.selectByServerId(item).executeAsOneOrNull() + if (itemobj != null) { + db.itemQueries.updateTicketLayoutPretixposId( + ticket_layout_pretixpos_id = serverId, + id = itemobj.id + ) + } + } + } + + val items_to_remove_web: List = if (itemids_web.isNotEmpty()) { + // Look if there are any items in the local database assigned to this layout even though + // they should not be any more. + db.itemQueries.getWithOutdatedTicketLayoutId( + server_id_not_in = itemids_web, + ticket_layout_id = serverId, + ).executeAsList() + } else { + // Look if there are any items in the local database assigned to this layout even though + // they should not be any more. + db.itemQueries.selectByTicketLayoutId( + ticket_layout_id = serverId, + ).executeAsList() + } + for (item in items_to_remove_web) { + db.itemQueries.updateTicketLayoutId( + ticket_layout_id = null, + id = item.id, + ) + } + + val items_to_remove_pretixpos: List = if (itemids_pretixpos.isNotEmpty()) { + // Look if there are any items in the local database assigned to this layout even though + // they should not be any more. + db.itemQueries.getWithOutdatedTicketLayoutPretixposId( + server_id_not_in = itemids_pretixpos, + ticket_layout_pretixpos_id = serverId, + ).executeAsList() + } else { + // Look if there are any items in the local database assigned to this layout even though + // they should not be any more. + db.itemQueries.selectByTicketLayoutPretixposId( + ticket_layout_pretixpos_id = serverId, + ).executeAsList() + } + for (item in items_to_remove_pretixpos) { + db.itemQueries.updateTicketLayoutPretixposId( + ticket_layout_pretixpos_id = null, + id = item.id, + ) + } + } + + private fun processBackground(jsonobj: JSONObject, oldFilename: String?): String? { + val remote_filename = jsonobj.optString("background") + var result: String? = null + + if (remote_filename != null && remote_filename.startsWith("http")) { + val hash = HashUtils.toSHA1(remote_filename.toByteArray()) + val local_filename = "ticketlayout_" + jsonobj.getLong("id") + "_" + hash + ".pdf" + if (oldFilename != null && oldFilename != local_filename) { + fileStorage.delete(oldFilename) + result = null + } + if (!fileStorage.contains(local_filename)) { + try { + val file = api.downloadFile(remote_filename) + val os = fileStorage.writeStream(local_filename) + val `is` = file.response.body!!.byteStream() + val buffer = ByteArray(1444) + var byteread: Int + while ((`is`.read(buffer).also { byteread = it }) != -1) { + os.write(buffer, 0, byteread) + } + `is`.close() + os.close() + result = local_filename + } catch (e: ApiException) { + // TODO: What to do? + e.printStackTrace() + } catch (e: IOException) { + // TODO: What to do? + e.printStackTrace() + fileStorage.delete(local_filename) + } + } else { + result = local_filename + } + } else { + if (oldFilename != null) { + fileStorage.delete(oldFilename) + result = null + } + } + + return result + } + + override fun delete(key: Long) { + db.ticketLayoutQueries.deleteByServerId(key) + } + + override fun prepareDelete(obj: TicketLayout) { + super.prepareDelete(obj) + if (obj.background_filename != null) { + fileStorage.delete(obj.background_filename) + } + } + + override fun runInTransaction(body: TransactionWithoutReturn.() -> Unit) { + db.ticketLayoutQueries.transaction(false, body) + } + + override fun runBatch(parameterBatch: List): List = + db.ticketLayoutQueries.selectByServerIdListAndEventSlug( + server_id = parameterBatch, + event_slug = eventSlug, + ).executeAsList() + +} diff --git a/libpretixsync/src/main/sqldelight/common/eu/pretix/libpretixsync/sqldelight/BadgeLayout.sq b/libpretixsync/src/main/sqldelight/common/eu/pretix/libpretixsync/sqldelight/BadgeLayout.sq new file mode 100644 index 00000000..5ce4604f --- /dev/null +++ b/libpretixsync/src/main/sqldelight/common/eu/pretix/libpretixsync/sqldelight/BadgeLayout.sq @@ -0,0 +1,54 @@ +selectById: +SELECT * +FROM BadgeLayout +WHERE id = ?; + +selectByEventSlug: +SELECT * +FROM BadgeLayout +WHERE event_slug = ?; + +selectByServerIdListAndEventSlug: +SELECT * +FROM BadgeLayout +WHERE server_id IN ? AND event_slug = ?; + +selectServerIdsByEventSlug: +SELECT server_id +FROM BadgeLayout +WHERE event_slug = ?; + +deleteByServerId: +DELETE FROM BadgeLayout +WHERE server_id = ?; + +selectDefaultForEventSlug: +SELECT * +FROM BadgeLayout +WHERE + CAST(is_default AS INTEGER) = 1 + AND event_slug = ?; + +insert: +INSERT INTO BadgeLayout( + background_filename, + event_slug, + is_default, + json_data, + server_id +) VALUES( + ?, + ?, + ?, + ?, + ? +); + +updateFromJson: +UPDATE BadgeLayout +SET + background_filename = ?, + event_slug = ?, + is_default = ?, + json_data = ? +WHERE id = ?; diff --git a/libpretixsync/src/main/sqldelight/common/eu/pretix/libpretixsync/sqldelight/BadgeLayoutItem.sq b/libpretixsync/src/main/sqldelight/common/eu/pretix/libpretixsync/sqldelight/BadgeLayoutItem.sq new file mode 100644 index 00000000..80c26c46 --- /dev/null +++ b/libpretixsync/src/main/sqldelight/common/eu/pretix/libpretixsync/sqldelight/BadgeLayoutItem.sq @@ -0,0 +1,47 @@ +selectByItemId: +SELECT * +FROM BadgeLayoutItem +WHERE item = :item_id; + +selectByEventSlug: +SELECT BadgeLayoutItem.* +FROM BadgeLayoutItem +LEFT JOIN BadgeLayout ON BadgeLayoutItem.layout = BadgeLayout.id +WHERE BadgeLayout.event_slug = ?; + +selectServerIdsByEventSlug: +SELECT BadgeLayoutItem.server_id +FROM BadgeLayoutItem +LEFT JOIN Item ON BadgeLayoutItem.item = Item.id +WHERE Item.event_slug = ?; + +selectByServerIdListAndEventSlug: +SELECT BadgeLayoutItem.* +FROM BadgeLayoutItem +LEFT JOIN Item ON BadgeLayoutItem.item = Item.id +WHERE Item.event_slug = ? AND BadgeLayoutItem.server_id IN ?; + +deleteByServerId: +DELETE FROM BadgeLayoutItem +WHERE server_id = ?; + +insert: +INSERT INTO BadgeLayoutItem( + json_data, + server_id, + item, + layout +) VALUES( + ?, + ?, + ?, + ? +); + +updateFromJson: +UPDATE BadgeLayoutItem +SET + json_data = ?, + item = ?, + layout = ? +WHERE id = ?; diff --git a/libpretixsync/src/main/sqldelight/common/eu/pretix/libpretixsync/sqldelight/BlockedTicketSecret.sq b/libpretixsync/src/main/sqldelight/common/eu/pretix/libpretixsync/sqldelight/BlockedTicketSecret.sq new file mode 100644 index 00000000..d2e7cba2 --- /dev/null +++ b/libpretixsync/src/main/sqldelight/common/eu/pretix/libpretixsync/sqldelight/BlockedTicketSecret.sq @@ -0,0 +1,54 @@ +selectByServerId: +SELECT * +FROM BlockedTicketSecret +WHERE server_id = ?; + +selectByServerIdListAndEventSlug: +SELECT * +FROM BlockedTicketSecret +WHERE server_id IN ? AND event_slug = ?; + +selectServerIdsByEventSlug: +SELECT server_id +FROM BlockedTicketSecret +WHERE event_slug = ?; + +countBlockedForSecret: +SELECT COUNT(*) +FROM BlockedTicketSecret +WHERE CAST(blocked AS INTEGER) = 1 AND secret = ?; + +deleteByServerId: +DELETE FROM BlockedTicketSecret +WHERE server_id = ?; + +deleteNotBlocked: +DELETE FROM BlockedTicketSecret +WHERE CAST(blocked AS INTEGER) = 0; + +insert: +INSERT INTO BlockedTicketSecret( + blocked, + event_slug, + json_data, + secret, + server_id, + updated +) VALUES( + ?, + ?, + ?, + ?, + ?, + ? +); + +updateFromJson: +UPDATE BlockedTicketSecret +SET + blocked = ?, + event_slug = ?, + json_data = ?, + secret = ?, + updated = ? +WHERE id = ?; diff --git a/libpretixsync/src/main/sqldelight/common/eu/pretix/libpretixsync/sqldelight/CachedPdfImage.sq b/libpretixsync/src/main/sqldelight/common/eu/pretix/libpretixsync/sqldelight/CachedPdfImage.sq new file mode 100644 index 00000000..87695726 --- /dev/null +++ b/libpretixsync/src/main/sqldelight/common/eu/pretix/libpretixsync/sqldelight/CachedPdfImage.sq @@ -0,0 +1,45 @@ +selectForOrderPosition: +SELECT * +FROM CachedPdfImage +WHERE orderposition_id = :order_position_server_id; + +selectForOrderPositionAndKey: +SELECT * +FROM CachedPdfImage +WHERE + orderposition_id = :order_position_server_id + AND key = :key; + +updateEtag: +UPDATE CachedPdfImage +SET + etag = ? +WHERE id = ?; + +insert: +INSERT INTO CachedPdfImage( + etag, + key, + orderposition_id +) +VALUES ( + :etag, + :key, + :order_position_server_id +); + +deleteUnseen: +DELETE FROM CachedPdfImage +WHERE + orderposition_id = :order_position_server_id + AND etag NOT IN :seen_etags; + +deleteOld: +DELETE FROM CachedPdfImage +WHERE + orderposition_id NOT IN (SELECT server_id FROM OrderPosition); + +countEtag: +SELECT COUNT(*) +FROM CachedPdfImage +WHERE etag = ?; diff --git a/libpretixsync/src/main/sqldelight/common/eu/pretix/libpretixsync/sqldelight/Cashier.sq b/libpretixsync/src/main/sqldelight/common/eu/pretix/libpretixsync/sqldelight/Cashier.sq new file mode 100644 index 00000000..9008d204 --- /dev/null +++ b/libpretixsync/src/main/sqldelight/common/eu/pretix/libpretixsync/sqldelight/Cashier.sq @@ -0,0 +1,51 @@ +selectByServerId: +SELECT * +FROM Cashier +WHERE server_id = ?; + +selectById: +SELECT * +FROM Cashier +WHERE id = ?; + +selectByServerIdList: +SELECT * +FROM Cashier +WHERE server_id IN ?; + +selectServerIds: +SELECT server_id +FROM Cashier; + +deleteByServerId: +DELETE FROM Cashier +WHERE server_id = ?; + +insert: +INSERT INTO Cashier +( + active, + json_data, + name, + pin, + server_id, + userid +) +VALUES ( + ?, + ?, + ?, + ?, + ?, + ? +); + +updateFromJson: +UPDATE Cashier +SET + active = ?, + json_data = ?, + name = ?, + pin = ?, + userid = ? +WHERE id = ?; diff --git a/libpretixsync/src/main/sqldelight/common/eu/pretix/libpretixsync/sqldelight/CheckIn.sq b/libpretixsync/src/main/sqldelight/common/eu/pretix/libpretixsync/sqldelight/CheckIn.sq new file mode 100644 index 00000000..8a892cfb --- /dev/null +++ b/libpretixsync/src/main/sqldelight/common/eu/pretix/libpretixsync/sqldelight/CheckIn.sq @@ -0,0 +1,77 @@ +selectById: +SELECT * +FROM CheckIn +WHERE id = ?; + +selectByPositionId: +SELECT * +FROM CheckIn +WHERE position = :postion_id; + +selectPositionIdByListIdAndType: +SELECT position +FROM CheckIn +WHERE listId = :list_server_id AND type = :type; + +count: +SELECT COUNT(*) +FROM CheckIn; + +deleteById: +DELETE FROM CheckIn WHERE id = ?; + +deleteByIdList: +DELETE FROM CheckIn WHERE id IN ?; + +insert: +INSERT INTO CheckIn ( + datetime, + json_data, + listId, + position, + server_id, + type +) +VALUES ( + ?, + ?, + ?, + ?, + ?, + ? +); + +updateFromJson: +UPDATE CheckIn +SET + datetime = ?, + json_data = ?, + listId = ?, + position = ?, + type = ? +WHERE id = ?; + +selectForOrders: +SELECT CheckIn.* +FROM CheckIn +LEFT JOIN OrderPosition ON CheckIn.position = OrderPosition.id +LEFT JOIN orders ON OrderPosition.order_ref = orders.id +-- Doing this WHERE IN even though we have a JOIN is entirely redundant. +-- But we know that ``ids`` is of small size and this will trick SQLite into a +-- more efficient query plan that avoids a full table scan :) +WHERE position IN ( + SELECT OrderPosition.id + FROM OrderPosition + WHERE OrderPosition.order_ref IN ( + SELECT orders.id + FROM orders + WHERE orders.code IN :order_codes + ) +); + +-- for tests only: +testCountByOrderPositionSecret: +SELECT COUNT(*) +FROM CheckIn +LEFT JOIN OrderPosition ON CheckIn.position = OrderPosition.id +WHERE OrderPosition.secret = :secret; diff --git a/libpretixsync/src/main/sqldelight/common/eu/pretix/libpretixsync/sqldelight/CheckInList.sq b/libpretixsync/src/main/sqldelight/common/eu/pretix/libpretixsync/sqldelight/CheckInList.sq new file mode 100644 index 00000000..d0bbefc7 --- /dev/null +++ b/libpretixsync/src/main/sqldelight/common/eu/pretix/libpretixsync/sqldelight/CheckInList.sq @@ -0,0 +1,99 @@ +selectByServerId: +SELECT * +FROM CheckInList +WHERE server_id = ?; + +selectByEventSlug: +SELECT * +FROM CheckInList +WHERE event_slug = ?; + +selectByServerIdAndEventSlug: +SELECT * +FROM CheckInList +WHERE server_id = ? AND event_slug = ?; + +selectByServerIdListAndEventSlug: +SELECT * +FROM CheckInList +WHERE server_id IN ? AND event_slug = ?; + +selectServerIdsByEventSlug: +SELECT server_id +FROM CheckInList +WHERE event_slug = ?; + +deleteByServerId: +DELETE FROM CheckInList +WHERE server_id = ?; + +insert: +INSERT INTO CheckInList( + all_items, + event_slug, + include_pending, + json_data, + name, + server_id, + subevent_id +) VALUES( + ?, + ?, + ?, + ?, + ?, + ?, + ? +); + +updateFromJson: +UPDATE CheckInList +SET + all_items = ?, + event_slug = ?, + include_pending = ?, + json_data = ?, + name = ?, + subevent_id = ? +WHERE id = ?; + +selectRelationsForList: +SELECT * +FROM CheckInList_Item +WHERE CheckInListId = :checkin_list_id; + +insertItemRelation: +INSERT INTO CheckInList_Item( + ItemId, + CheckInListId +) VALUES( + :item_id, + :checkin_list_id +); + +deleteItemRelation: +DELETE FROM CheckInList_Item +WHERE ItemId = :item_id AND CheckInListId = :checkin_list_id; + +deleteItemRelationsForList: +DELETE FROM CheckInList_Item +WHERE CheckInListId = :checkin_list_id; + +selectItemIdsForList: +-- We must select at least two columns, otherwise we get different result types between +-- SQLite and Postgres since the ItemId column is only defined as NOT NULL in Postgres +SELECT ItemId AS id, CheckInListId +FROM CheckInList_Item +WHERE CheckInListId = :checkin_list_id; + +checkIfItemIsInList: +SELECT COUNT(*) +FROM CheckInList_Item +WHERE CheckInListId = :checkin_list_id AND ItemId = :item_id; + +-- for tests only: +testUpdateJsonData: +UPDATE CheckInList +SET + json_data = ? +WHERE id = ?; diff --git a/libpretixsync/src/main/sqldelight/common/eu/pretix/libpretixsync/sqldelight/Closing.sq b/libpretixsync/src/main/sqldelight/common/eu/pretix/libpretixsync/sqldelight/Closing.sq new file mode 100644 index 00000000..1dc8ec2e --- /dev/null +++ b/libpretixsync/src/main/sqldelight/common/eu/pretix/libpretixsync/sqldelight/Closing.sq @@ -0,0 +1,55 @@ +selectById: +SELECT * +FROM Closing +WHERE id = ?; + +selectByServerId: +SELECT * +FROM Closing +WHERE server_id = ?; + +selectClosedWithoutServerId: +SELECT * +FROM Closing +WHERE + CAST(open AS INTEGER) = 0 + AND server_id IS NULL; + +insert: +INSERT INTO Closing ( + cash_counted, + cashier_name, + cashier_numericid, + cashier_userid, + datetime, + dsfinvk_uploaded, + first_receipt, + invoice_settings, + json_data, + last_receipt, + open, + payment_sum, + payment_sum_cash, + server_id +) VALUES ( + ?, + ?, + ?, + ?, + ?, + ?, + ?, + ?, + ?, + ?, + ?, + ?, + ?, + ? +); + +updateServerId: +UPDATE Closing +SET + server_id = ? +WHERE id = ?; diff --git a/libpretixsync/src/main/sqldelight/common/eu/pretix/libpretixsync/sqldelight/Event.sq b/libpretixsync/src/main/sqldelight/common/eu/pretix/libpretixsync/sqldelight/Event.sq new file mode 100644 index 00000000..3e0d13c0 --- /dev/null +++ b/libpretixsync/src/main/sqldelight/common/eu/pretix/libpretixsync/sqldelight/Event.sq @@ -0,0 +1,53 @@ +selectById: +SELECT * +FROM Event +WHERE id = ?; + +selectSlugs: +SELECT slug +FROM Event; + +selectBySlug: +SELECT * +FROM Event +WHERE slug = ?; + +selectBySlugList: +SELECT * +FROM Event +WHERE slug IN ?; + +deleteBySlug: +DELETE FROM Event +WHERE slug = ?; + +insert: +INSERT INTO Event ( + currency, + date_from, + date_to, + has_subevents, + json_data, + live, + slug +) +VALUES ( + ?, + ?, + ?, + ?, + ?, + ?, + ? +); + +updateFromJson: +UPDATE Event +SET + currency = ?, + date_from = ?, + date_to = ?, + has_subevents = ?, + json_data = ?, + live = ? +WHERE slug = ?; diff --git a/libpretixsync/src/main/sqldelight/common/eu/pretix/libpretixsync/sqldelight/Item.sq b/libpretixsync/src/main/sqldelight/common/eu/pretix/libpretixsync/sqldelight/Item.sq new file mode 100644 index 00000000..bfe8bef9 --- /dev/null +++ b/libpretixsync/src/main/sqldelight/common/eu/pretix/libpretixsync/sqldelight/Item.sq @@ -0,0 +1,120 @@ +selectAll: +SELECT * +FROM Item; + +selectByServerId: +SELECT * +FROM Item +WHERE server_id = ?; + +selectById: +SELECT * +FROM Item +WHERE id = ?; + +selectByEventSlug: +SELECT * +FROM Item +WHERE event_slug = ?; + +selectByServerIdAndEventSlug: +SELECT * +FROM Item +WHERE server_id = ? AND event_slug = ?; + +selectByServerIdListAndEventSlug: +SELECT * +FROM Item +WHERE server_id IN ? AND event_slug = ?; + +selectServerIdsByEventSlug: +SELECT server_id +FROM Item +WHERE event_slug = ?; + +selectByTicketLayoutId: +SELECT * +FROM Item +WHERE ticket_layout_id = ?; + +selectByTicketLayoutPretixposId: +SELECT * +FROM Item +WHERE ticket_layout_pretixpos_id = ?; + +selectForCheckInList: +SELECT Item.* +FROM Item +WHERE Item.id IN ( + SELECT CheckInList_Item.ItemId + FROM CheckInList_Item + WHERE CheckInList_Item.CheckInListId = :check_in_list_id +); + +getWithOutdatedTicketLayoutId: +SELECT * +FROM Item +WHERE server_id NOT IN :server_id_not_in AND ticket_layout_id = ?; + +getWithOutdatedTicketLayoutPretixposId: +SELECT * +FROM Item +WHERE server_id NOT IN :server_id_not_in AND ticket_layout_pretixpos_id = ?; + +updateTicketLayoutId: +UPDATE Item +SET + ticket_layout_id = ? +WHERE id = ?; + +updateTicketLayoutPretixposId: +UPDATE Item +SET + ticket_layout_pretixpos_id = ? +WHERE id = ?; + +deleteByServerId: +DELETE FROM Item +WHERE server_id = ?; + +insert: +INSERT INTO Item +( + active, + admission, + category_id, + checkin_text, + event_slug, + json_data, + picture_filename, + position, + server_id, + ticket_layout_id, + ticket_layout_pretixpos_id +) +VALUES ( + ?, + ?, + ?, + ?, + ?, + ?, + ?, + ?, + ?, + ?, + ? +); + +updateFromJson: +UPDATE Item +SET + active = ?, + admission = ?, + category_id = ?, + checkin_text = ?, + event_slug = ?, + json_data = ?, + picture_filename = ?, + position = ? +WHERE id = ?; diff --git a/libpretixsync/src/main/sqldelight/common/eu/pretix/libpretixsync/sqldelight/ItemCategory.sq b/libpretixsync/src/main/sqldelight/common/eu/pretix/libpretixsync/sqldelight/ItemCategory.sq new file mode 100644 index 00000000..46590fdb --- /dev/null +++ b/libpretixsync/src/main/sqldelight/common/eu/pretix/libpretixsync/sqldelight/ItemCategory.sq @@ -0,0 +1,54 @@ +selectById: +SELECT * +FROM ItemCategory +WHERE id = ?; + +selectByServerId: +SELECT * +FROM ItemCategory +WHERE server_id = ?; + +selectByEventSlug: +SELECT * +FROM ItemCategory +WHERE event_slug = ?; + +selectByServerIdListAndEventSlug: +SELECT * +FROM ItemCategory +WHERE server_id IN ? AND event_slug = ?; + +selectServerIdsByEventSlug: +SELECT server_id +FROM ItemCategory +WHERE event_slug = ?; + +deleteByServerId: +DELETE FROM ItemCategory +WHERE server_id = ?; + +insert: +INSERT INTO ItemCategory +( + event_slug, + is_addon, + json_data, + "position", + server_id +) +VALUES ( + ?, + ?, + ?, + ?, + ? +); + +updateFromJson: +UPDATE ItemCategory +SET + event_slug = ?, + is_addon = ?, + json_data = ?, + "position" = ? +WHERE id = ?; diff --git a/libpretixsync/src/main/sqldelight/common/eu/pretix/libpretixsync/sqldelight/MediumKeySet.sq b/libpretixsync/src/main/sqldelight/common/eu/pretix/libpretixsync/sqldelight/MediumKeySet.sq new file mode 100644 index 00000000..389009e2 --- /dev/null +++ b/libpretixsync/src/main/sqldelight/common/eu/pretix/libpretixsync/sqldelight/MediumKeySet.sq @@ -0,0 +1,48 @@ +selectByPublicId: +SELECT * +FROM MediumKeySet +WHERE public_id = ?; + +selectPublicIds: +SELECT public_id +FROM MediumKeySet; + +selectByPublicIdList: +SELECT * +FROM MediumKeySet +WHERE public_id IN ?; + +deleteByPublicId: +DELETE FROM MediumKeySet +WHERE public_id = ?; + +insert: +INSERT INTO MediumKeySet( + active, + diversification_key, + json_data, + media_type, + organizer, + public_id, + uid_key +) VALUES ( + ?, + ?, + ?, + ?, + ?, + ?, + ? +); + +updateFromJson: +UPDATE MediumKeySet +SET + active = ?, + diversification_key = ?, + json_data = ?, + media_type = ?, + organizer = ?, + public_id = ?, + uid_key = ? +WHERE id = ?; diff --git a/libpretixsync/src/main/sqldelight/common/eu/pretix/libpretixsync/sqldelight/Order.sq b/libpretixsync/src/main/sqldelight/common/eu/pretix/libpretixsync/sqldelight/Order.sq new file mode 100644 index 00000000..8bf04b9f --- /dev/null +++ b/libpretixsync/src/main/sqldelight/common/eu/pretix/libpretixsync/sqldelight/Order.sq @@ -0,0 +1,65 @@ +selectById: +SELECT * +FROM orders +WHERE id = ?; + +selectByCode: +SELECT * +FROM orders +WHERE code = ?; + +selectByCodeList: +SELECT * +FROM orders +WHERE code IN ?; + +count: +SELECT COUNT(*) +FROM orders; + +countForEventSlug: +SELECT COUNT(*) +FROM orders +WHERE event_slug = ?; + +deleteByCode: +DELETE FROM orders +WHERE code = ?; + +insert: +INSERT INTO orders ( + checkin_attention, + checkin_text, + code, + deleteAfterTimestamp, + email, + event_slug, + json_data, + status, + valid_if_pending +) +VALUES ( + ?, + ?, + ?, + ?, + ?, + ?, + ?, + ?, + ? +); + +updateFromJson: +UPDATE orders +SET + checkin_attention = ?, + checkin_text = ?, + code = ?, + deleteAfterTimestamp = ?, + email = ?, + event_slug = ?, + json_data = ?, + status = ?, + valid_if_pending = ? +WHERE id = ?; diff --git a/libpretixsync/src/main/sqldelight/common/eu/pretix/libpretixsync/sqldelight/OrderCleanup.sq b/libpretixsync/src/main/sqldelight/common/eu/pretix/libpretixsync/sqldelight/OrderCleanup.sq new file mode 100644 index 00000000..424d2925 --- /dev/null +++ b/libpretixsync/src/main/sqldelight/common/eu/pretix/libpretixsync/sqldelight/OrderCleanup.sq @@ -0,0 +1,71 @@ +countOrders: +SELECT COUNT(*) +FROM orders +WHERE + event_slug = :event_slug + AND (deleteAfterTimestamp IS NULL OR deleteAfterTimestamp < 1); + +selectOrderIds: +SELECT id +FROM orders +WHERE + event_slug = :event_slug + AND (deleteAfterTimestamp IS NULL OR deleteAfterTimestamp < 1) +LIMIT 100; + +selectSubEventIdsForOrder: +SELECT subevent_id +FROM OrderPosition +WHERE order_ref = :order_id; + +updateDeleteAfterTimestamp: +UPDATE orders +SET + deleteAfterTimestamp = :delete_after_timestamp +WHERE id = :id; + +selectOrderIdsToDelete: +SELECT orders.id +FROM orders +WHERE + (deleteAfterTimestamp < :current_timestamp AND deleteAfterTimestamp < 1) + AND (orders.id NOT IN ( + SELECT order_ref + FROM OrderPosition + WHERE OrderPosition.subevent_id = :sub_event_id + )) +LIMIT 200; + +countOrdersByIdList: +SELECT COUNT(*) +FROM orders +WHERE id IN :ids; + +deleteOrders: +DELETE FROM orders +WHERE id IN :ids; + +selectOldEventSlugs: +SELECT event_slug +FROM orders +WHERE event_slug NOT IN :keep_slugs +GROUP BY orders.event_slug +ORDER BY orders.event_slug; + +selectOrderIdsForOldEvent: +SELECT id +FROM orders +WHERE event_slug = ? +LIMIT 200; + +-- WITH deleted_rows AS ( +-- DELETE FROM MyTable RETURNING * +-- ) +-- SELECT COUNT(*) FROM deleted_rows; + +-- store.select(Order.ID) +-- .where(Order.DELETE_AFTER_TIMESTAMP.lt(System.currentTimeMillis()). +-- and(Order.DELETE_AFTER_TIMESTAMP.gt(1L))) +-- .and(Order.ID.notIn(store.select(OrderPosition.ORDER_ID). +-- from(OrderPosition::class.java).where(OrderPosition.SUBEVENT_ID.eq(subeventId)))). +-- limit(200).get().toList() diff --git a/libpretixsync/src/main/sqldelight/common/eu/pretix/libpretixsync/sqldelight/OrderPosition.sq b/libpretixsync/src/main/sqldelight/common/eu/pretix/libpretixsync/sqldelight/OrderPosition.sq new file mode 100644 index 00000000..ba44fa6e --- /dev/null +++ b/libpretixsync/src/main/sqldelight/common/eu/pretix/libpretixsync/sqldelight/OrderPosition.sq @@ -0,0 +1,82 @@ +selectById: +SELECT * +FROM OrderPosition +WHERE id = ?; + +selectByIdList: +SELECT * +FROM OrderPosition +WHERE id IN ?; + +selectByServerId: +SELECT * +FROM OrderPosition +WHERE server_id = ?; + +selectForOrder: +SELECT * +FROM OrderPosition +WHERE order_ref = :order_id; + +selectBySecretAndEventSlugs: +SELECT OrderPosition.* +FROM OrderPosition +LEFT JOIN orders ON OrderPosition.order_ref = orders.id +WHERE OrderPosition.secret = :secret +AND orders.event_slug IN :event_slugs; + +selectByServerIdAndEventSlugs: +SELECT OrderPosition.* +FROM OrderPosition +LEFT JOIN orders ON OrderPosition.order_ref = orders.id +WHERE OrderPosition.server_id = :server_id +AND orders.event_slug IN :event_slugs; + +count: +SELECT COUNT(*) +FROM OrderPosition; + +deleteByServerIdList: +DELETE FROM OrderPosition +WHERE server_id IN ?; + +insert: +INSERT INTO OrderPosition ( + attendee_email, + attendee_name, + item, + json_data, + order_ref, + positionid, + secret, + server_id, + subevent_id, + variation_id +) +VALUES ( + ?, + ?, + ?, + ?, + ?, + ?, + ?, + ?, + ?, + ? +); + +updateFromJson: +UPDATE OrderPosition +SET + attendee_email = ?, + attendee_name = ?, + item = ?, + json_data = ?, + order_ref = ?, + positionid = ?, + secret = ?, + server_id = ?, + subevent_id = ?, + variation_id = ? +WHERE id = ?; diff --git a/libpretixsync/src/main/sqldelight/common/eu/pretix/libpretixsync/sqldelight/Question.sq b/libpretixsync/src/main/sqldelight/common/eu/pretix/libpretixsync/sqldelight/Question.sq new file mode 100644 index 00000000..86ff3c5c --- /dev/null +++ b/libpretixsync/src/main/sqldelight/common/eu/pretix/libpretixsync/sqldelight/Question.sq @@ -0,0 +1,73 @@ +selectByServerId: +SELECT * +FROM Question +WHERE server_id = ?; + +selectForItem: +SELECT Question.* +FROM Question +WHERE Question.id IN ( + SELECT Question_Item.QuestionId + FROM Question_Item + WHERE Question_Item.ItemId = :item_id +); + +selectByServerIdListAndEventSlug: +SELECT * +FROM Question +WHERE server_id IN ? AND event_slug = ?; + +selectServerIdsByEventSlug: +SELECT server_id +FROM Question +WHERE event_slug = ?; + +deleteByServerId: +DELETE FROM Question +WHERE server_id = ?; + +insert: +INSERT INTO Question( + event_slug, + json_data, + "position", + required, + server_id +) VALUES( + ?, + ?, + ?, + ?, + ? +); + +updateFromJson: +UPDATE Question +SET + event_slug = ?, + json_data = ?, + "position" = ?, + required = ? +WHERE id = ?; + +selectRelationsForQuestion: +SELECT * +FROM Question_Item +WHERE QuestionId = :question_id; + +insertItemRelation: +INSERT INTO Question_Item( + ItemId, + QuestionId +) VALUES( + :item_id, + :question_id +); + +deleteItemRelation: +DELETE FROM Question_Item +WHERE ItemId = :item_id AND QuestionId = :question_id; + +deleteItemRelationsForQuestion: +DELETE FROM Question_Item +WHERE QuestionId = :question_id; diff --git a/libpretixsync/src/main/sqldelight/common/eu/pretix/libpretixsync/sqldelight/QueuedCall.sq b/libpretixsync/src/main/sqldelight/common/eu/pretix/libpretixsync/sqldelight/QueuedCall.sq new file mode 100644 index 00000000..7065ace5 --- /dev/null +++ b/libpretixsync/src/main/sqldelight/common/eu/pretix/libpretixsync/sqldelight/QueuedCall.sq @@ -0,0 +1,24 @@ +selectAll: +SELECT * +FROM QueuedCall; + +selectById: +SELECT * +FROM QueuedCall +WHERE id = ?; + +delete: +DELETE FROM QueuedCall +WHERE id = ?; + +insert: +INSERT INTO QueuedCall ( + body, + idempotency_key, + url +) +VALUES ( + ?, + ?, + ? +); diff --git a/libpretixsync/src/main/sqldelight/common/eu/pretix/libpretixsync/sqldelight/QueuedCheckIn.sq b/libpretixsync/src/main/sqldelight/common/eu/pretix/libpretixsync/sqldelight/QueuedCheckIn.sq new file mode 100644 index 00000000..94bbba89 --- /dev/null +++ b/libpretixsync/src/main/sqldelight/common/eu/pretix/libpretixsync/sqldelight/QueuedCheckIn.sq @@ -0,0 +1,50 @@ +selectAll: +SELECT * +FROM QueuedCheckIn; + +selectById: +SELECT * +FROM QueuedCheckIn +WHERE id = ?; + +selectBySecret: +SELECT * +FROM QueuedCheckIn +WHERE secret = ?; + +count: +SELECT COUNT(*) +FROM QueuedCheckIn; + +countForSecretAndLists: +SELECT COUNT(*) +FROM QueuedCheckIn +WHERE secret = :secret AND checkinListId IN :checkin_list_ids; + +delete: +DELETE FROM QueuedCheckIn +WHERE id = ?; + +insert: +INSERT INTO QueuedCheckIn ( + answers, + checkinListId, + datetime, + datetime_string, + event_slug, + nonce, + secret, + source_type, + type +) +VALUES ( + ?, + ?, + ?, + ?, + ?, + ?, + ?, + ?, + ? +); diff --git a/libpretixsync/src/main/sqldelight/common/eu/pretix/libpretixsync/sqldelight/QueuedOrder.sq b/libpretixsync/src/main/sqldelight/common/eu/pretix/libpretixsync/sqldelight/QueuedOrder.sq new file mode 100644 index 00000000..a50afd55 --- /dev/null +++ b/libpretixsync/src/main/sqldelight/common/eu/pretix/libpretixsync/sqldelight/QueuedOrder.sq @@ -0,0 +1,38 @@ +selectById: +SELECT * +FROM QueuedOrder +WHERE id = ?; + +selectUnlockedWithoutError: +SELECT * +FROM QueuedOrder +WHERE CAST(locked AS INTEGER) = 0 AND error IS NULL; + +delete: +DELETE FROM QueuedOrder +WHERE id = ?; + +insert: +INSERT INTO QueuedOrder +( + error, + event_slug, + idempotency_key, + locked, + payload, + receipt +) +VALUES( + ?, + ?, + ?, + ?, + ?, + ? +); + +updateError: +UPDATE QueuedOrder +SET + error = ? +WHERE id = ?; diff --git a/libpretixsync/src/main/sqldelight/common/eu/pretix/libpretixsync/sqldelight/Quota.sq b/libpretixsync/src/main/sqldelight/common/eu/pretix/libpretixsync/sqldelight/Quota.sq new file mode 100644 index 00000000..73a5a2a2 --- /dev/null +++ b/libpretixsync/src/main/sqldelight/common/eu/pretix/libpretixsync/sqldelight/Quota.sq @@ -0,0 +1,80 @@ +selectById: +SELECT * +FROM Quota +WHERE id = ?; + +selectByServerId: +SELECT * +FROM Quota +WHERE server_id = ?; + +selectByServerIdListAndEventSlug: +SELECT * +FROM Quota +WHERE server_id IN ? AND event_slug = ?; + +selectServerIdsByEventSlug: +SELECT server_id +FROM Quota +WHERE event_slug = ?; + +selectServerIdsByEventSlugAndSubEvent: +SELECT server_id +FROM Quota +WHERE event_slug = ? AND subevent_id = ?; + +deleteByServerId: +DELETE FROM Quota +WHERE server_id = ?; + +insert: +INSERT INTO Quota( + available, + available_number, + event_slug, + json_data, + server_id, + size, + subevent_id +) VALUES( + ?, + ?, + ?, + ?, + ?, + ?, + ? +); + +updateFromJson: +UPDATE Quota +SET + available = ?, + available_number = ?, + event_slug = ?, + json_data = ?, + size = ?, + subevent_id = ? +WHERE id = ?; + +selectRelationsForQuota: +SELECT * +FROM Quota_Item +WHERE QuotaId = :quota_id; + +insertItemRelation: +INSERT INTO Quota_Item( + ItemId, + QuotaId +) VALUES( + :item_id, + :quota_id +); + +deleteItemRelation: +DELETE FROM Quota_Item +WHERE ItemId = :item_id AND QuotaId = :quota_id; + +deleteItemRelationsForQuota: +DELETE FROM Quota_Item +WHERE QuotaId = :quota_id; diff --git a/libpretixsync/src/main/sqldelight/common/eu/pretix/libpretixsync/sqldelight/Receipt.sq b/libpretixsync/src/main/sqldelight/common/eu/pretix/libpretixsync/sqldelight/Receipt.sq new file mode 100644 index 00000000..303d1c72 --- /dev/null +++ b/libpretixsync/src/main/sqldelight/common/eu/pretix/libpretixsync/sqldelight/Receipt.sq @@ -0,0 +1,64 @@ +selectById: +SELECT * +FROM Receipt +WHERE id = ?; + +insert: +INSERT INTO Receipt ( + id, + additional_text, + canceled, + cashier_name, + cashier_numericid, + cashier_userid, + chosen_cart_id, + closing, + currency, + datetime_closed, + datetime_opened, + email_to, + event_slug, + fiscalisation_data, + fiscalisation_qr, + fiscalisation_text, + open, + order_code, + payment_data, + payment_type, + printed, + server_id, + started, + training +) +VALUES ( + ?, + ?, + ?, + ?, + ?, + ?, + ?, + ?, + ?, + ?, + ?, + ?, + ?, + ?, + ?, + ?, + ?, + ?, + ?, + ?, + ?, + ?, + ?, + ? +); + +updateOrderCode: +UPDATE Receipt +SET + order_code = ? +WHERE id = ?; diff --git a/libpretixsync/src/main/sqldelight/common/eu/pretix/libpretixsync/sqldelight/ReceiptLine.sq b/libpretixsync/src/main/sqldelight/common/eu/pretix/libpretixsync/sqldelight/ReceiptLine.sq new file mode 100644 index 00000000..7767a499 --- /dev/null +++ b/libpretixsync/src/main/sqldelight/common/eu/pretix/libpretixsync/sqldelight/ReceiptLine.sq @@ -0,0 +1,101 @@ +selectById: +SELECT * +FROM ReceiptLine +WHERE id = ?; + +selectByIdList: +SELECT * +FROM ReceiptLine +WHERE id IN ?; + +insert: +INSERT INTO ReceiptLine ( + addon_to, + answers, + attendee_city, + attendee_company, + attendee_country, + attendee_email, + attendee_name, + attendee_street, + attendee_zipcode, + canceled, + canceled_because_of_receipt, + cart_expires, + cart_id, + cart_position_id, + created, + custom_price_input, + event_date_from, + event_date_to, + gift_card_id, + gift_card_secret, + is_bundled, + item_id, + listed_price, + positionid, + price, + price_after_voucher, + price_calculated_from_net, + receipt, + remote_error, + requested_valid_from, + sale_text, + seat_guid, + seat_name, + secret, + subevent_id, + subevent_text, + tax_rate, + tax_rule, + tax_value, + type, + use_reusable_medium, + variation_id, + voucher_code +) +VALUES ( + ?, + ?, + ?, + ?, + ?, + ?, + ?, + ?, + ?, + ?, + ?, + ?, + ?, + ?, + ?, + ?, + ?, + ?, + ?, + ?, + ?, + ?, + ?, + ?, + ?, + ?, + ?, + ?, + ?, + ?, + ?, + ?, + ?, + ?, + ?, + ?, + ?, + ?, + ?, + ?, + ?, + ?, + ? +); diff --git a/libpretixsync/src/main/sqldelight/common/eu/pretix/libpretixsync/sqldelight/ReceiptPayment.sq b/libpretixsync/src/main/sqldelight/common/eu/pretix/libpretixsync/sqldelight/ReceiptPayment.sq new file mode 100644 index 00000000..cd6e99db --- /dev/null +++ b/libpretixsync/src/main/sqldelight/common/eu/pretix/libpretixsync/sqldelight/ReceiptPayment.sq @@ -0,0 +1,19 @@ +selectById: +SELECT * +FROM ReceiptPayment +WHERE id = ?; + +insert: +INSERT INTO ReceiptPayment ( + amount, + detailsJson, + payment_type, + receipt, + status +) VALUES ( + ?, + ?, + ?, + ?, + ? +); diff --git a/libpretixsync/src/main/sqldelight/common/eu/pretix/libpretixsync/sqldelight/ResourceSyncStatus.sq b/libpretixsync/src/main/sqldelight/common/eu/pretix/libpretixsync/sqldelight/ResourceSyncStatus.sq new file mode 100644 index 00000000..3b1b5589 --- /dev/null +++ b/libpretixsync/src/main/sqldelight/common/eu/pretix/libpretixsync/sqldelight/ResourceSyncStatus.sq @@ -0,0 +1,58 @@ +selectByResource: +SELECT * +FROM ResourceSyncStatus +WHERE resource = ?; + +selectByResourceAndEventSlug: +SELECT * +FROM ResourceSyncStatus +WHERE resource = ? AND event_slug = ? +LIMIT 1; + +deleteById: +DELETE FROM ResourceSyncStatus +WHERE id = ?; + +deleteByResourceFilter: +DELETE FROM ResourceSyncStatus +WHERE resource LIKE :filter; + +deleteByResourceFilterAndEventSlug: +DELETE FROM ResourceSyncStatus +WHERE resource LIKE :filter AND event_slug = :event_slug; + +insert: +INSERT INTO ResourceSyncStatus +( + event_slug, + last_modified, + meta, + resource, + status +) +VALUES ( + ?, + ?, + ?, + ?, + ? +); + +updateLastModified: +UPDATE ResourceSyncStatus +SET + last_modified = ? +WHERE id = ?; + +updateLastModifiedAndMeta: +UPDATE ResourceSyncStatus +SET + last_modified = ?, + meta = ? +WHERE id = ?; + +updateStatus: +UPDATE ResourceSyncStatus +SET + status = ? +WHERE id = ?; diff --git a/libpretixsync/src/main/sqldelight/common/eu/pretix/libpretixsync/sqldelight/ReusableMedium.sq b/libpretixsync/src/main/sqldelight/common/eu/pretix/libpretixsync/sqldelight/ReusableMedium.sq new file mode 100644 index 00000000..f5214ac1 --- /dev/null +++ b/libpretixsync/src/main/sqldelight/common/eu/pretix/libpretixsync/sqldelight/ReusableMedium.sq @@ -0,0 +1,62 @@ +selectByServerId: +SELECT * +FROM ReusableMedium +WHERE server_id = ?; + +selectServerIds: +SELECT server_id +FROM ReusableMedium; + +selectByServerIdList: +SELECT * +FROM ReusableMedium +WHERE server_id IN ?; + +selectForCheck: +SELECT ReusableMedium.* +FROM ReusableMedium +LEFT JOIN OrderPosition ON ReusableMedium.linked_orderposition_id = OrderPosition.server_id +LEFT JOIN orders ON OrderPosition.order_ref = orders.id +WHERE ReusableMedium.identifier = :identifier +AND ReusableMedium.type = :type +AND orders.event_slug IN :event_slugs; + +deleteByServerId: +DELETE FROM ReusableMedium +WHERE server_id = ?; + +insert: +INSERT INTO ReusableMedium( + active, + customer_id, + expires, + identifier, + json_data, + linked_giftcard_id, + linked_orderposition_id, + server_id, + type +) VALUES( + ?, + ?, + ?, + ?, + ?, + ?, + ?, + ?, + ? +); + +updateFromJson: +UPDATE ReusableMedium +SET + active = ?, + customer_id = ?, + expires = ?, + identifier = ?, + json_data = ?, + linked_giftcard_id = ?, + linked_orderposition_id = ?, + type = ? +WHERE id = ?; diff --git a/libpretixsync/src/main/sqldelight/common/eu/pretix/libpretixsync/sqldelight/RevokedTicketSecret.sq b/libpretixsync/src/main/sqldelight/common/eu/pretix/libpretixsync/sqldelight/RevokedTicketSecret.sq new file mode 100644 index 00000000..673b4c6b --- /dev/null +++ b/libpretixsync/src/main/sqldelight/common/eu/pretix/libpretixsync/sqldelight/RevokedTicketSecret.sq @@ -0,0 +1,47 @@ +selectByServerId: +SELECT * +FROM RevokedTicketSecret +WHERE server_id = ?; + +selectByServerIdListAndEventSlug: +SELECT * +FROM RevokedTicketSecret +WHERE server_id IN ? AND event_slug = ?; + +selectServerIdsByEventSlug: +SELECT server_id +FROM RevokedTicketSecret +WHERE event_slug = ?; + +countForSecret: +SELECT COUNT(*) +FROM RevokedTicketSecret +WHERE secret = ?; + +deleteByServerId: +DELETE FROM RevokedTicketSecret +WHERE server_id = ?; + +insert: +INSERT INTO RevokedTicketSecret( + created, + event_slug, + json_data, + secret, + server_id +) VALUES( + ?, + ?, + ?, + ?, + ? +); + +updateFromJson: +UPDATE RevokedTicketSecret +SET + created = ?, + event_slug = ?, + json_data = ?, + secret = ? +WHERE id = ?; diff --git a/libpretixsync/src/main/sqldelight/common/eu/pretix/libpretixsync/sqldelight/Settings.sq b/libpretixsync/src/main/sqldelight/common/eu/pretix/libpretixsync/sqldelight/Settings.sq new file mode 100644 index 00000000..f1239649 --- /dev/null +++ b/libpretixsync/src/main/sqldelight/common/eu/pretix/libpretixsync/sqldelight/Settings.sq @@ -0,0 +1,52 @@ +selectById: +SELECT * +FROM Settings +WHERE id = ?; + +selectBySlug: +SELECT * +FROM Settings +WHERE slug = ?; + +deleteByEventSlug: +DELETE FROM Settings +WHERE slug = ?; + +insert: +INSERT INTO Settings ( + address, + city, + country, + json_data, + name, + pretixpos_additional_receipt_text, + slug, + tax_id, + vat_id, + zipcode +) VALUES( + ?, + ?, + ?, + ?, + ?, + ?, + ?, + ?, + ?, + ? +); + +updateFromJson: +UPDATE Settings +SET + address = ?, + city = ?, + country = ?, + json_data = ?, + name = ?, + pretixpos_additional_receipt_text = ?, + tax_id = ?, + vat_id = ?, + zipcode = ? +WHERE slug = ?; diff --git a/libpretixsync/src/main/sqldelight/common/eu/pretix/libpretixsync/sqldelight/SubEvent.sq b/libpretixsync/src/main/sqldelight/common/eu/pretix/libpretixsync/sqldelight/SubEvent.sq new file mode 100644 index 00000000..0efd32c0 --- /dev/null +++ b/libpretixsync/src/main/sqldelight/common/eu/pretix/libpretixsync/sqldelight/SubEvent.sq @@ -0,0 +1,54 @@ +selectById: +SELECT * +FROM SubEvent +WHERE id = ?; + +selectServerIds: +SELECT server_id +FROM SubEvent; + +selectByServerId: +SELECT * +FROM SubEvent +WHERE server_id = ?; + +selectByServerIdList: +SELECT * +FROM SubEvent +WHERE server_id IN ?; + +selectByServerIdAndSlug: +SELECT * +FROM SubEvent +WHERE server_id = ? AND event_slug = ?; + +deleteByServerId: +DELETE FROM SubEvent +WHERE server_id = ?; + +insert: +INSERT INTO SubEvent ( + active, + date_from, + date_to, + event_slug, + json_data, + server_id +) VALUES ( + ?, + ?, + ?, + ?, + ?, + ? +); + +updateFromJson: +UPDATE SubEvent +SET + active = ?, + date_from = ?, + date_to = ?, + event_slug = ?, + json_data = ? +WHERE id = ?; diff --git a/libpretixsync/src/main/sqldelight/common/eu/pretix/libpretixsync/sqldelight/TaxRule.sq b/libpretixsync/src/main/sqldelight/common/eu/pretix/libpretixsync/sqldelight/TaxRule.sq new file mode 100644 index 00000000..9e66beb6 --- /dev/null +++ b/libpretixsync/src/main/sqldelight/common/eu/pretix/libpretixsync/sqldelight/TaxRule.sq @@ -0,0 +1,41 @@ +selectById: +SELECT * +FROM TaxRule +WHERE id = ?; + +selectByServerId: +SELECT * +FROM TaxRule +WHERE server_id = ?; + +selectByServerIdListAndEventSlug: +SELECT * +FROM TaxRule +WHERE server_id IN ? AND event_slug = ?; + +selectServerIdsByEventSlug: +SELECT server_id +FROM TaxRule +WHERE event_slug = ?; + +deleteByServerId: +DELETE FROM TaxRule +WHERE server_id = ?; + +insert: +INSERT INTO TaxRule( + event_slug, + json_data, + server_id +) VALUES ( + ?, + ?, + ? +); + +updateFromJson: +UPDATE TaxRule +SET + event_slug = ?, + json_data = ? +WHERE id = ?; diff --git a/libpretixsync/src/main/sqldelight/common/eu/pretix/libpretixsync/sqldelight/TicketLayout.sq b/libpretixsync/src/main/sqldelight/common/eu/pretix/libpretixsync/sqldelight/TicketLayout.sq new file mode 100644 index 00000000..6c844314 --- /dev/null +++ b/libpretixsync/src/main/sqldelight/common/eu/pretix/libpretixsync/sqldelight/TicketLayout.sq @@ -0,0 +1,49 @@ +selectByServerId: +SELECT * +FROM TicketLayout +WHERE server_id = ?; + +selectByServerIdListAndEventSlug: +SELECT * +FROM TicketLayout +WHERE server_id IN ? AND event_slug = ?; + +selectServerIdsByEventSlug: +SELECT server_id +FROM TicketLayout +WHERE event_slug = ?; + +selectDefaultForEventSlug: +SELECT * +FROM TicketLayout +WHERE + CAST(is_default AS INTEGER) = 1 + AND event_slug = ?; + +deleteByServerId: +DELETE FROM TicketLayout +WHERE server_id = ?; + +insert: +INSERT INTO TicketLayout( + background_filename, + event_slug, + is_default, + json_data, + server_id +) VALUES( + ?, + ?, + ?, + ?, + ? +); + +updateFromJson: +UPDATE TicketLayout +SET + background_filename = ?, + event_slug = ?, + is_default = ?, + json_data = ? +WHERE id = ?; diff --git a/libpretixsync/src/main/sqldelight/migrations/104.sqm b/libpretixsync/src/main/sqldelight/migrations/104.sqm new file mode 100644 index 00000000..6bbfe35e --- /dev/null +++ b/libpretixsync/src/main/sqldelight/migrations/104.sqm @@ -0,0 +1 @@ +-- Empty migration to set database version to 105 diff --git a/libpretixsync/src/main/sqldelight/postgres/eu/pretix/libpretixsync/sqldelight/BadgeLayout.sq b/libpretixsync/src/main/sqldelight/postgres/eu/pretix/libpretixsync/sqldelight/BadgeLayout.sq new file mode 100644 index 00000000..2f1f2722 --- /dev/null +++ b/libpretixsync/src/main/sqldelight/postgres/eu/pretix/libpretixsync/sqldelight/BadgeLayout.sq @@ -0,0 +1,10 @@ +import kotlin.Long; + +CREATE TABLE BadgeLayout ( + id serial AS Long PRIMARY KEY NOT NULL, + background_filename character varying(255), + event_slug character varying(255), + is_default boolean NOT NULL, + json_data text, + server_id bigint +); diff --git a/libpretixsync/src/main/sqldelight/postgres/eu/pretix/libpretixsync/sqldelight/BadgeLayoutItem.sq b/libpretixsync/src/main/sqldelight/postgres/eu/pretix/libpretixsync/sqldelight/BadgeLayoutItem.sq new file mode 100644 index 00000000..ad9ee021 --- /dev/null +++ b/libpretixsync/src/main/sqldelight/postgres/eu/pretix/libpretixsync/sqldelight/BadgeLayoutItem.sq @@ -0,0 +1,9 @@ +import kotlin.Long; + +CREATE TABLE BadgeLayoutItem ( + id serial AS Long PRIMARY KEY NOT NULL, + json_data text, + server_id bigint, + item bigint REFERENCES Item (id) ON DELETE CASCADE, + layout bigint REFERENCES BadgeLayout (id) ON DELETE CASCADE +); diff --git a/libpretixsync/src/main/sqldelight/postgres/eu/pretix/libpretixsync/sqldelight/BlockedTicketSecret.sq b/libpretixsync/src/main/sqldelight/postgres/eu/pretix/libpretixsync/sqldelight/BlockedTicketSecret.sq new file mode 100644 index 00000000..9b4f9996 --- /dev/null +++ b/libpretixsync/src/main/sqldelight/postgres/eu/pretix/libpretixsync/sqldelight/BlockedTicketSecret.sq @@ -0,0 +1,11 @@ +import kotlin.Long; + +CREATE TABLE BlockedTicketSecret ( + id serial AS Long PRIMARY KEY NOT NULL, + blocked boolean NOT NULL, + event_slug character varying(255), + json_data text, + secret character varying(255), + server_id bigint, + updated character varying(255) +); diff --git a/libpretixsync/src/main/sqldelight/postgres/eu/pretix/libpretixsync/sqldelight/CachedPdfImage.sq b/libpretixsync/src/main/sqldelight/postgres/eu/pretix/libpretixsync/sqldelight/CachedPdfImage.sq new file mode 100644 index 00000000..e696356d --- /dev/null +++ b/libpretixsync/src/main/sqldelight/postgres/eu/pretix/libpretixsync/sqldelight/CachedPdfImage.sq @@ -0,0 +1,8 @@ +import kotlin.Long; + +CREATE TABLE CachedPdfImage ( + id serial AS Long PRIMARY KEY NOT NULL, + etag character varying(255), + key character varying(255), + orderposition_id bigint +); diff --git a/libpretixsync/src/main/sqldelight/postgres/eu/pretix/libpretixsync/sqldelight/Cashier.sq b/libpretixsync/src/main/sqldelight/postgres/eu/pretix/libpretixsync/sqldelight/Cashier.sq new file mode 100644 index 00000000..73145a2f --- /dev/null +++ b/libpretixsync/src/main/sqldelight/postgres/eu/pretix/libpretixsync/sqldelight/Cashier.sq @@ -0,0 +1,11 @@ +import kotlin.Long; + +CREATE TABLE Cashier ( + id serial AS Long PRIMARY KEY, + active boolean DEFAULT FALSE NOT NULL, + json_data text, + name character varying(255), + pin character varying(255), + server_id bigint, + userid character varying(255) +); diff --git a/libpretixsync/src/main/sqldelight/postgres/eu/pretix/libpretixsync/sqldelight/CheckIn.sq b/libpretixsync/src/main/sqldelight/postgres/eu/pretix/libpretixsync/sqldelight/CheckIn.sq new file mode 100644 index 00000000..0577ec4b --- /dev/null +++ b/libpretixsync/src/main/sqldelight/postgres/eu/pretix/libpretixsync/sqldelight/CheckIn.sq @@ -0,0 +1,12 @@ +import java.util.Date; +import kotlin.Long; + +CREATE TABLE CheckIn ( + id serial AS Long PRIMARY KEY NOT NULL, + datetime DATE AS Date, + json_data text, + listId bigint, + server_id bigint, + type character varying(255), + "position" bigint REFERENCES OrderPosition (id) ON DELETE CASCADE +); diff --git a/libpretixsync/src/main/sqldelight/postgres/eu/pretix/libpretixsync/sqldelight/CheckInList.sq b/libpretixsync/src/main/sqldelight/postgres/eu/pretix/libpretixsync/sqldelight/CheckInList.sq new file mode 100644 index 00000000..0fcb1c06 --- /dev/null +++ b/libpretixsync/src/main/sqldelight/postgres/eu/pretix/libpretixsync/sqldelight/CheckInList.sq @@ -0,0 +1,12 @@ +import kotlin.Long; + +CREATE TABLE CheckInList ( + id serial AS Long PRIMARY KEY NOT NULL, + all_items boolean NOT NULL, + event_slug character varying(255), + include_pending boolean NOT NULL, + json_data text, + name character varying(255), + server_id bigint, + subevent_id bigint +); diff --git a/libpretixsync/src/main/sqldelight/postgres/eu/pretix/libpretixsync/sqldelight/CheckInList_Item.sq b/libpretixsync/src/main/sqldelight/postgres/eu/pretix/libpretixsync/sqldelight/CheckInList_Item.sq new file mode 100644 index 00000000..bf43e50d --- /dev/null +++ b/libpretixsync/src/main/sqldelight/postgres/eu/pretix/libpretixsync/sqldelight/CheckInList_Item.sq @@ -0,0 +1,5 @@ +CREATE TABLE CheckInList_Item ( + ItemId bigint NOT NULL REFERENCES Item(id) ON DELETE CASCADE, + CheckInListId bigint NOT NULL REFERENCES CheckInList(id) ON DELETE CASCADE, + CONSTRAINT checkinlist_item_pkey PRIMARY KEY (ItemId, CheckInListId) +); diff --git a/libpretixsync/src/main/sqldelight/postgres/eu/pretix/libpretixsync/sqldelight/Closing.sq b/libpretixsync/src/main/sqldelight/postgres/eu/pretix/libpretixsync/sqldelight/Closing.sq new file mode 100644 index 00000000..bc9d3d55 --- /dev/null +++ b/libpretixsync/src/main/sqldelight/postgres/eu/pretix/libpretixsync/sqldelight/Closing.sq @@ -0,0 +1,21 @@ +import java.math.BigDecimal; +import java.util.Date; +import kotlin.Long; + +CREATE TABLE Closing ( + id serial AS Long PRIMARY KEY NOT NULL, + cash_counted numeric AS BigDecimal, + cashier_name character varying(255), + cashier_numericid bigint, + cashier_userid character varying(255), + datetime DATE AS Date, + dsfinvk_uploaded boolean, + first_receipt bigint, + invoice_settings character varying(255), + json_data character varying(255), + last_receipt bigint, + open boolean NOT NULL, + payment_sum numeric AS BigDecimal, + payment_sum_cash numeric AS BigDecimal, + server_id bigint +); diff --git a/libpretixsync/src/main/sqldelight/postgres/eu/pretix/libpretixsync/sqldelight/Event.sq b/libpretixsync/src/main/sqldelight/postgres/eu/pretix/libpretixsync/sqldelight/Event.sq new file mode 100644 index 00000000..b1479177 --- /dev/null +++ b/libpretixsync/src/main/sqldelight/postgres/eu/pretix/libpretixsync/sqldelight/Event.sq @@ -0,0 +1,13 @@ +import java.util.Date; +import kotlin.Long; + +CREATE TABLE Event ( + id serial AS Long PRIMARY KEY NOT NULL, + currency character varying(255), + date_from DATE AS Date, + date_to DATE AS Date, + has_subevents boolean NOT NULL, + json_data text, + live boolean NOT NULL, + slug character varying(255) +); diff --git a/libpretixsync/src/main/sqldelight/postgres/eu/pretix/libpretixsync/sqldelight/Item.sq b/libpretixsync/src/main/sqldelight/postgres/eu/pretix/libpretixsync/sqldelight/Item.sq new file mode 100644 index 00000000..0e3d512f --- /dev/null +++ b/libpretixsync/src/main/sqldelight/postgres/eu/pretix/libpretixsync/sqldelight/Item.sq @@ -0,0 +1,16 @@ +import kotlin.Long; + +CREATE TABLE Item ( + id serial AS Long PRIMARY KEY, + active boolean NOT NULL, + admission boolean NOT NULL, + category_id bigint, + checkin_text text, + event_slug character varying(255), + json_data text, + picture_filename character varying(255), + "position" bigint, + server_id bigint NOT NULL, + ticket_layout_id bigint, + ticket_layout_pretixpos_id bigint +); diff --git a/libpretixsync/src/main/sqldelight/postgres/eu/pretix/libpretixsync/sqldelight/ItemCategory.sq b/libpretixsync/src/main/sqldelight/postgres/eu/pretix/libpretixsync/sqldelight/ItemCategory.sq new file mode 100644 index 00000000..d9e39e6d --- /dev/null +++ b/libpretixsync/src/main/sqldelight/postgres/eu/pretix/libpretixsync/sqldelight/ItemCategory.sq @@ -0,0 +1,10 @@ +import kotlin.Long; + +CREATE TABLE ItemCategory ( + id serial AS Long PRIMARY KEY NOT NULL, + event_slug character varying(255), + is_addon boolean NOT NULL, + json_data text, + "position" bigint, + server_id bigint +); diff --git a/libpretixsync/src/main/sqldelight/postgres/eu/pretix/libpretixsync/sqldelight/MediumKeySet.sq b/libpretixsync/src/main/sqldelight/postgres/eu/pretix/libpretixsync/sqldelight/MediumKeySet.sq new file mode 100644 index 00000000..b1215ef8 --- /dev/null +++ b/libpretixsync/src/main/sqldelight/postgres/eu/pretix/libpretixsync/sqldelight/MediumKeySet.sq @@ -0,0 +1,12 @@ +import kotlin.Long; + +CREATE TABLE MediumKeySet ( + id serial AS Long PRIMARY KEY NOT NULL, + active boolean NOT NULL, + diversification_key character varying(255), + json_data text, + media_type character varying(255), + organizer character varying(255), + public_id bigint, + uid_key character varying(255) +); diff --git a/libpretixsync/src/main/sqldelight/postgres/eu/pretix/libpretixsync/sqldelight/Order.sq b/libpretixsync/src/main/sqldelight/postgres/eu/pretix/libpretixsync/sqldelight/Order.sq new file mode 100644 index 00000000..829c942d --- /dev/null +++ b/libpretixsync/src/main/sqldelight/postgres/eu/pretix/libpretixsync/sqldelight/Order.sq @@ -0,0 +1,14 @@ +import kotlin.Long; + +CREATE TABLE orders ( + id serial AS Long PRIMARY KEY NOT NULL, + checkin_attention boolean NOT NULL, + checkin_text text, + code character varying(255), + deleteAfterTimestamp bigint, + email character varying(255), + event_slug character varying(255), + json_data text, + status character varying(255), + valid_if_pending boolean DEFAULT FALSE +); diff --git a/libpretixsync/src/main/sqldelight/postgres/eu/pretix/libpretixsync/sqldelight/OrderPosition.sq b/libpretixsync/src/main/sqldelight/postgres/eu/pretix/libpretixsync/sqldelight/OrderPosition.sq new file mode 100644 index 00000000..5da46222 --- /dev/null +++ b/libpretixsync/src/main/sqldelight/postgres/eu/pretix/libpretixsync/sqldelight/OrderPosition.sq @@ -0,0 +1,15 @@ +import kotlin.Long; + +CREATE TABLE OrderPosition ( + id serial AS Long PRIMARY KEY NOT NULL, + attendee_email character varying(255), + attendee_name character varying(255), + json_data text, + positionid bigint, + secret character varying(255), + server_id bigint, + subevent_id bigint, + variation_id bigint, + item bigint REFERENCES Item (id) ON DELETE CASCADE, + order_ref bigint REFERENCES orders (id) ON DELETE CASCADE +); diff --git a/libpretixsync/src/main/sqldelight/postgres/eu/pretix/libpretixsync/sqldelight/Question.sq b/libpretixsync/src/main/sqldelight/postgres/eu/pretix/libpretixsync/sqldelight/Question.sq new file mode 100644 index 00000000..91effe69 --- /dev/null +++ b/libpretixsync/src/main/sqldelight/postgres/eu/pretix/libpretixsync/sqldelight/Question.sq @@ -0,0 +1,10 @@ +import kotlin.Long; + +CREATE TABLE Question ( + id serial AS Long PRIMARY KEY NOT NULL, + event_slug character varying(255), + json_data text, + "position" bigint, + required boolean NOT NULL, + server_id bigint +); diff --git a/libpretixsync/src/main/sqldelight/postgres/eu/pretix/libpretixsync/sqldelight/QuestionItem.sq b/libpretixsync/src/main/sqldelight/postgres/eu/pretix/libpretixsync/sqldelight/QuestionItem.sq new file mode 100644 index 00000000..0fe1f943 --- /dev/null +++ b/libpretixsync/src/main/sqldelight/postgres/eu/pretix/libpretixsync/sqldelight/QuestionItem.sq @@ -0,0 +1,5 @@ +CREATE TABLE Question_Item ( + ItemId bigint NOT NULL REFERENCES Item (id) ON DELETE CASCADE, + QuestionId bigint NOT NULL REFERENCES Question (id) ON DELETE CASCADE, + PRIMARY KEY (ItemId, QuestionId) +); diff --git a/libpretixsync/src/main/sqldelight/postgres/eu/pretix/libpretixsync/sqldelight/QueuedCall.sq b/libpretixsync/src/main/sqldelight/postgres/eu/pretix/libpretixsync/sqldelight/QueuedCall.sq new file mode 100644 index 00000000..a7e7d6b6 --- /dev/null +++ b/libpretixsync/src/main/sqldelight/postgres/eu/pretix/libpretixsync/sqldelight/QueuedCall.sq @@ -0,0 +1,8 @@ +import kotlin.Long; + +CREATE TABLE QueuedCall ( + id serial AS Long PRIMARY KEY NOT NULL, + body text, + idempotency_key character varying(255), + url text +); diff --git a/libpretixsync/src/main/sqldelight/postgres/eu/pretix/libpretixsync/sqldelight/QueuedCheckIn.sq b/libpretixsync/src/main/sqldelight/postgres/eu/pretix/libpretixsync/sqldelight/QueuedCheckIn.sq new file mode 100644 index 00000000..2d664aac --- /dev/null +++ b/libpretixsync/src/main/sqldelight/postgres/eu/pretix/libpretixsync/sqldelight/QueuedCheckIn.sq @@ -0,0 +1,15 @@ +import java.util.Date; +import kotlin.Long; + +CREATE TABLE QueuedCheckIn ( + id serial AS Long PRIMARY KEY NOT NULL, + answers character varying(255), + checkinListId bigint, + datetime DATE AS Date, + datetime_string character varying(255), + event_slug character varying(255), + nonce character varying(255), + secret character varying(255), + source_type character varying(255), + type character varying(255) +); diff --git a/libpretixsync/src/main/sqldelight/postgres/eu/pretix/libpretixsync/sqldelight/QueuedOrder.sq b/libpretixsync/src/main/sqldelight/postgres/eu/pretix/libpretixsync/sqldelight/QueuedOrder.sq new file mode 100644 index 00000000..961c8eed --- /dev/null +++ b/libpretixsync/src/main/sqldelight/postgres/eu/pretix/libpretixsync/sqldelight/QueuedOrder.sq @@ -0,0 +1,11 @@ +import kotlin.Long; + +CREATE TABLE QueuedOrder ( + id serial AS Long PRIMARY KEY NOT NULL, + error character varying(255), + event_slug character varying(255), + idempotency_key character varying(255), + locked boolean DEFAULT FALSE, + payload character varying(255), + receipt bigint REFERENCES Receipt (id) ON DELETE CASCADE +); diff --git a/libpretixsync/src/main/sqldelight/postgres/eu/pretix/libpretixsync/sqldelight/Quota.sq b/libpretixsync/src/main/sqldelight/postgres/eu/pretix/libpretixsync/sqldelight/Quota.sq new file mode 100644 index 00000000..71ac167e --- /dev/null +++ b/libpretixsync/src/main/sqldelight/postgres/eu/pretix/libpretixsync/sqldelight/Quota.sq @@ -0,0 +1,13 @@ +import kotlin.Boolean; +import kotlin.Long; + +CREATE TABLE Quota ( + id serial AS Long PRIMARY KEY NOT NULL, + available bigint AS Boolean, + available_number bigint, + event_slug character varying(255), + json_data text, + server_id bigint, + size bigint, + subevent_id bigint +); diff --git a/libpretixsync/src/main/sqldelight/postgres/eu/pretix/libpretixsync/sqldelight/QuotaItem.sq b/libpretixsync/src/main/sqldelight/postgres/eu/pretix/libpretixsync/sqldelight/QuotaItem.sq new file mode 100644 index 00000000..b7b3e3cf --- /dev/null +++ b/libpretixsync/src/main/sqldelight/postgres/eu/pretix/libpretixsync/sqldelight/QuotaItem.sq @@ -0,0 +1,5 @@ +CREATE TABLE Quota_Item ( + QuotaId bigint NOT NULL REFERENCES Quota (id) ON DELETE CASCADE, + ItemId bigint NOT NULL REFERENCES Item (id) ON DELETE CASCADE, + PRIMARY KEY (QuotaId, ItemId) +); diff --git a/libpretixsync/src/main/sqldelight/postgres/eu/pretix/libpretixsync/sqldelight/Receipt.sq b/libpretixsync/src/main/sqldelight/postgres/eu/pretix/libpretixsync/sqldelight/Receipt.sq new file mode 100644 index 00000000..3346a61a --- /dev/null +++ b/libpretixsync/src/main/sqldelight/postgres/eu/pretix/libpretixsync/sqldelight/Receipt.sq @@ -0,0 +1,29 @@ +import java.util.Date; +import kotlin.Long; + +CREATE TABLE Receipt ( + id serial AS Long PRIMARY KEY, + additional_text character varying(255), + canceled boolean NOT NULL, + cashier_name character varying(255), + cashier_numericid bigint, + cashier_userid character varying(255), + chosen_cart_id character varying(255), + currency character varying(255) DEFAULT 'EUR' NOT NULL, + datetime_closed DATE AS Date, + datetime_opened DATE AS Date, + email_to character varying(255), + event_slug character varying(255), + fiscalisation_data character varying(255), + fiscalisation_qr character varying(255), + fiscalisation_text character varying(255), + open boolean DEFAULT FALSE, + order_code character varying(255), + payment_data character varying(255), + payment_type character varying(255), + printed boolean DEFAULT FALSE NOT NULL, + server_id bigint, + started boolean DEFAULT FALSE, + training boolean DEFAULT FALSE NOT NULL, + closing bigint REFERENCES Closing (id) ON DELETE CASCADE +); diff --git a/libpretixsync/src/main/sqldelight/postgres/eu/pretix/libpretixsync/sqldelight/ReceiptLine.sq b/libpretixsync/src/main/sqldelight/postgres/eu/pretix/libpretixsync/sqldelight/ReceiptLine.sq new file mode 100644 index 00000000..d643aa29 --- /dev/null +++ b/libpretixsync/src/main/sqldelight/postgres/eu/pretix/libpretixsync/sqldelight/ReceiptLine.sq @@ -0,0 +1,54 @@ +import java.util.Date; +import kotlin.Long; + +CREATE TABLE ReceiptLine ( + id serial AS Long PRIMARY KEY, + answers character varying(255), + attendee_city character varying(255), + attendee_company character varying(255), + attendee_country character varying(255), + attendee_email character varying(255), + attendee_name character varying(255), + attendee_street character varying(255), + attendee_zipcode character varying(255), + canceled boolean NOT NULL, + canceled_because_of_receipt boolean DEFAULT FALSE, + cart_expires DATE AS Date, + cart_id character varying(255), + cart_position_id bigint, + created DATE AS Date, + custom_price_input numeric, + event_date_from character varying(255), + event_date_to character varying(255), + gift_card_id bigint, + gift_card_secret character varying(255), + is_bundled boolean DEFAULT FALSE, + item_id bigint, + listed_price numeric, + positionid bigint, + price numeric, + price_after_voucher numeric, + price_calculated_from_net boolean DEFAULT FALSE, + remote_error character varying(255), + requested_valid_from character varying(255), + sale_text character varying(255), + seat_guid character varying(255), + seat_name character varying(255), + secret character varying(255), + subevent_id bigint, + subevent_text character varying(255), + tax_rate numeric, + tax_rule bigint, + tax_value numeric, + type character varying(255), + use_reusable_medium bigint, + variation_id bigint, + voucher_code character varying(255), + receipt bigint REFERENCES Receipt (id) ON DELETE CASCADE, + + -- SQLDelight does not support self-referential foreign key constraints on columns. + -- Creating the constraint on a table-level skips the dependency check for table creation order, + -- but the check is not relevant in this case anyway. + addon_to bigint, + FOREIGN KEY (addon_to) REFERENCES ReceiptLine (id) ON DELETE CASCADE +); diff --git a/libpretixsync/src/main/sqldelight/postgres/eu/pretix/libpretixsync/sqldelight/ReceiptPayment.sq b/libpretixsync/src/main/sqldelight/postgres/eu/pretix/libpretixsync/sqldelight/ReceiptPayment.sq new file mode 100644 index 00000000..3224e969 --- /dev/null +++ b/libpretixsync/src/main/sqldelight/postgres/eu/pretix/libpretixsync/sqldelight/ReceiptPayment.sq @@ -0,0 +1,10 @@ +import kotlin.Long; + +CREATE TABLE ReceiptPayment ( + id serial AS Long PRIMARY KEY NOT NULL, + amount numeric, + detailsJson character varying(255), + payment_type character varying(255), + status character varying(255), + receipt bigint REFERENCES Receipt (id) ON DELETE CASCADE +); diff --git a/libpretixsync/src/main/sqldelight/postgres/eu/pretix/libpretixsync/sqldelight/ResourceSyncStatus.sq b/libpretixsync/src/main/sqldelight/postgres/eu/pretix/libpretixsync/sqldelight/ResourceSyncStatus.sq new file mode 100644 index 00000000..f55414a2 --- /dev/null +++ b/libpretixsync/src/main/sqldelight/postgres/eu/pretix/libpretixsync/sqldelight/ResourceSyncStatus.sq @@ -0,0 +1,10 @@ +import kotlin.Long; + +CREATE TABLE ResourceSyncStatus ( + id serial AS Long PRIMARY KEY, + event_slug character varying(255), + last_modified character varying(255), + meta character varying(255), + resource character varying(255), + status character varying(255) +); diff --git a/libpretixsync/src/main/sqldelight/postgres/eu/pretix/libpretixsync/sqldelight/ReusableMedium.sq b/libpretixsync/src/main/sqldelight/postgres/eu/pretix/libpretixsync/sqldelight/ReusableMedium.sq new file mode 100644 index 00000000..62a5c88f --- /dev/null +++ b/libpretixsync/src/main/sqldelight/postgres/eu/pretix/libpretixsync/sqldelight/ReusableMedium.sq @@ -0,0 +1,14 @@ +import kotlin.Long; + +CREATE TABLE ReusableMedium ( + id serial AS Long PRIMARY KEY NOT NULL, + active boolean NOT NULL, + customer_id bigint, + expires character varying(255), + identifier character varying(255), + json_data text, + linked_giftcard_id bigint, + linked_orderposition_id bigint, + server_id bigint, + type character varying(255) +); diff --git a/libpretixsync/src/main/sqldelight/postgres/eu/pretix/libpretixsync/sqldelight/RevokedTicketSecret.sq b/libpretixsync/src/main/sqldelight/postgres/eu/pretix/libpretixsync/sqldelight/RevokedTicketSecret.sq new file mode 100644 index 00000000..daf3fc53 --- /dev/null +++ b/libpretixsync/src/main/sqldelight/postgres/eu/pretix/libpretixsync/sqldelight/RevokedTicketSecret.sq @@ -0,0 +1,10 @@ +import kotlin.Long; + +CREATE TABLE RevokedTicketSecret ( + id serial AS Long PRIMARY KEY NOT NULL, + created character varying(255), + event_slug character varying(255), + json_data text, + secret character varying(255), + server_id bigint +); diff --git a/libpretixsync/src/main/sqldelight/postgres/eu/pretix/libpretixsync/sqldelight/Settings.sq b/libpretixsync/src/main/sqldelight/postgres/eu/pretix/libpretixsync/sqldelight/Settings.sq new file mode 100644 index 00000000..7ba8ee61 --- /dev/null +++ b/libpretixsync/src/main/sqldelight/postgres/eu/pretix/libpretixsync/sqldelight/Settings.sq @@ -0,0 +1,15 @@ +import kotlin.Long; + +CREATE TABLE Settings ( + id serial AS Long PRIMARY KEY, + address text, + city character varying(255), + country character varying(255), + json_data text, + name character varying(255), + pretixpos_additional_receipt_text text, + slug character varying(255), + tax_id character varying(255), + vat_id character varying(255), + zipcode character varying(255) +); diff --git a/libpretixsync/src/main/sqldelight/postgres/eu/pretix/libpretixsync/sqldelight/SubEvent.sq b/libpretixsync/src/main/sqldelight/postgres/eu/pretix/libpretixsync/sqldelight/SubEvent.sq new file mode 100644 index 00000000..62d3b901 --- /dev/null +++ b/libpretixsync/src/main/sqldelight/postgres/eu/pretix/libpretixsync/sqldelight/SubEvent.sq @@ -0,0 +1,12 @@ +import java.util.Date; +import kotlin.Long; + +CREATE TABLE SubEvent ( + id serial AS Long PRIMARY KEY NOT NULL, + active boolean NOT NULL, + date_from DATE AS Date, + date_to DATE AS Date, + event_slug character varying(255), + json_data text, + server_id bigint +); diff --git a/libpretixsync/src/main/sqldelight/postgres/eu/pretix/libpretixsync/sqldelight/TaxRule.sq b/libpretixsync/src/main/sqldelight/postgres/eu/pretix/libpretixsync/sqldelight/TaxRule.sq new file mode 100644 index 00000000..e226063a --- /dev/null +++ b/libpretixsync/src/main/sqldelight/postgres/eu/pretix/libpretixsync/sqldelight/TaxRule.sq @@ -0,0 +1,8 @@ +import kotlin.Long; + +CREATE TABLE TaxRule ( + id serial AS Long PRIMARY KEY NOT NULL, + event_slug character varying(255), + json_data text, + server_id bigint +); diff --git a/libpretixsync/src/main/sqldelight/postgres/eu/pretix/libpretixsync/sqldelight/TicketLayout.sq b/libpretixsync/src/main/sqldelight/postgres/eu/pretix/libpretixsync/sqldelight/TicketLayout.sq new file mode 100644 index 00000000..468237b6 --- /dev/null +++ b/libpretixsync/src/main/sqldelight/postgres/eu/pretix/libpretixsync/sqldelight/TicketLayout.sq @@ -0,0 +1,10 @@ +import kotlin.Long; + +CREATE TABLE TicketLayout ( + id serial AS Long PRIMARY KEY NOT NULL, + background_filename character varying(255), + event_slug character varying(255), + is_default boolean NOT NULL, + json_data text, + server_id bigint +); diff --git a/libpretixsync/src/main/sqldelight/postgres/eu/pretix/libpretixsync/sqldelight/compat.sq b/libpretixsync/src/main/sqldelight/postgres/eu/pretix/libpretixsync/sqldelight/compat.sq new file mode 100644 index 00000000..89996648 --- /dev/null +++ b/libpretixsync/src/main/sqldelight/postgres/eu/pretix/libpretixsync/sqldelight/compat.sq @@ -0,0 +1,125 @@ +-- Dialect-specific queries +-- Cannot be part of the table-specific .sq files, as they would generate conflicting query classes + +-- Switch to RETURNING once it is also supported by SQLite +getLastInsertedOrderId: +SELECT currval('orders_id_seq') AS Long; + +-- Switch to RETURNING once it is also supported by SQLite +getLastInsertedCheckInId: +SELECT currval('checkin_id_seq') AS Long; + +-- Switch to RETURNING once it is also supported by SQLite +getLastInsertedCheckInListId: +SELECT currval('checkinlist_id_seq') AS Long; + +-- Switch to RETURNING once it is also supported by SQLite +getLastInsertedOrderPositionId: +SELECT currval('orderposition_id_seq') AS Long; + +-- Switch to RETURNING once it is also supported by SQLite +getLastInsertedQuestionId: +SELECT currval('question_id_seq') AS Long; + +-- Switch to RETURNING once it is also supported by SQLite +getLastInsertedQuotaId: +SELECT currval('quota_id_seq') AS Long; + +truncateCheckIn: +TRUNCATE CheckIn; + +truncateOrderPosition: +TRUNCATE OrderPosition; + +truncateOrder: +TRUNCATE orders; + +truncateResourceSyncStatus: +TRUNCATE ResourceSyncStatus; + +truncateReusableMedium: +TRUNCATE ReusableMedium; + +truncateAllTables: +TRUNCATE + BadgeLayout, + BadgeLayoutItem, + BlockedTicketSecret, + CachedPdfImage, + Cashier, + CheckIn, + CheckInList, + CheckInList_Item, + Closing, + Event, + Item, + ItemCategory, + MediumKeySet, + orders, + OrderPosition, + Question, + Question_Item, + QueuedCall, + QueuedCheckIn, + QueuedOrder, + Quota, + Quota_Item, + Receipt, + ReceiptLine, + ReceiptPayment, + ResourceSyncStatus, + ReusableMedium, + RevokedTicketSecret, + Settings, + SubEvent, + TaxRule, + TicketLayout; + +searchOrderPosition: +SELECT OrderPosition.* +FROM OrderPosition +LEFT JOIN orders ON OrderPosition.order_ref = orders.id +LEFT JOIN Item ON OrderPosition.item = Item.id +WHERE ( + UPPER(OrderPosition.secret) LIKE :queryStartsWith + OR UPPER(OrderPosition.attendee_name) LIKE :queryContains + OR UPPER(OrderPosition.attendee_email) LIKE :queryContains + OR UPPER(orders.email) LIKE :queryContains + OR UPPER(orders.code) LIKE :queryStartsWith +) +AND ( + CASE WHEN (:use_event_filter) THEN (orders.event_slug IN :event_filter) ELSE FALSE END + OR CASE WHEN (:use_event_item_filter) THEN (orders.event_slug IN :event_item_filter_events AND OrderPosition.item IN :event_item_filter_items) ELSE FALSE END + OR CASE WHEN (:use_event_subevent_filter) THEN (orders.event_slug IN :event_subevent_filter_events AND OrderPosition.subevent_id IN :event_subevent_filter_subevents) ELSE FALSE END + OR CASE WHEN (:use_all_filter) THEN (orders.event_slug IN :all_filter_events AND OrderPosition.item IN :all_filter_items AND OrderPosition.subevent_id IN :all_filter_subevents) ELSE FALSE END +) +LIMIT :limit +OFFSET :offset; + +countOrderPositionForStatus: +-- TODO: Check if DISTINCT is required here +SELECT COUNT(DISTINCT OrderPosition.id) +FROM OrderPosition +LEFT JOIN orders ON OrderPosition.order_ref = orders.id +WHERE( + orders.event_slug = :event_slug + AND CASE WHEN (:include_pending) + THEN orders.status IN ('p', 'n') + ELSE (orders.status = 'p' OR (orders.status = 'n' AND orders.valid_if_pending = TRUE)) + END + AND CASE WHEN (:subevent_id > 0) THEN OrderPosition.subevent_id = :subevent_id ELSE TRUE END + AND CASE WHEN (:not_all_items) THEN OrderPosition.item IN :list_item_ids ELSE TRUE END + AND CASE WHEN (:only_checked_in_list_server_id > 0) THEN + (OrderPosition.id IN (SELECT position FROM CheckIn WHERE listId = :only_checked_in_list_server_id AND type = 'entry')) + ELSE + TRUE + END + AND OrderPosition.item = :item_id + AND CASE WHEN (:variation_id > 0) THEN OrderPosition.variation_id = :variation_id ELSE TRUE END +); + +lockQueuedOrder: +UPDATE QueuedOrder +SET + locked = TRUE +WHERE id = ?; diff --git a/libpretixsync/src/main/sqldelight/sqlite/eu/pretix/libpretixsync/sqldelight/BadgeLayout.sq b/libpretixsync/src/main/sqldelight/sqlite/eu/pretix/libpretixsync/sqldelight/BadgeLayout.sq new file mode 100644 index 00000000..05a37ef5 --- /dev/null +++ b/libpretixsync/src/main/sqldelight/sqlite/eu/pretix/libpretixsync/sqldelight/BadgeLayout.sq @@ -0,0 +1,10 @@ +import kotlin.Boolean; + +CREATE TABLE BadgeLayout ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + background_filename TEXT, + event_slug TEXT , + is_default INTEGER AS Boolean NOT NULL, + json_data TEXT, + server_id integer +); diff --git a/libpretixsync/src/main/sqldelight/sqlite/eu/pretix/libpretixsync/sqldelight/BadgeLayoutItem.sq b/libpretixsync/src/main/sqldelight/sqlite/eu/pretix/libpretixsync/sqldelight/BadgeLayoutItem.sq new file mode 100644 index 00000000..f850c502 --- /dev/null +++ b/libpretixsync/src/main/sqldelight/sqlite/eu/pretix/libpretixsync/sqldelight/BadgeLayoutItem.sq @@ -0,0 +1,7 @@ +CREATE TABLE BadgeLayoutItem ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + item INTEGER REFERENCES Item (id) ON DELETE CASCADE, + json_data TEXT, + layout INTEGER REFERENCES BadgeLayout (id) ON DELETE CASCADE, + server_id INTEGER +); diff --git a/libpretixsync/src/main/sqldelight/sqlite/eu/pretix/libpretixsync/sqldelight/BlockedTicketSecret.sq b/libpretixsync/src/main/sqldelight/sqlite/eu/pretix/libpretixsync/sqldelight/BlockedTicketSecret.sq new file mode 100644 index 00000000..5c3d8e5f --- /dev/null +++ b/libpretixsync/src/main/sqldelight/sqlite/eu/pretix/libpretixsync/sqldelight/BlockedTicketSecret.sq @@ -0,0 +1,11 @@ +import kotlin.Boolean; + +CREATE TABLE BlockedTicketSecret ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + blocked INTEGER AS Boolean NOT NULL, + event_slug TEXT, + json_data TEXT, + secret TEXT, + server_id INTEGER, + updated TEXT +); diff --git a/libpretixsync/src/main/sqldelight/sqlite/eu/pretix/libpretixsync/sqldelight/CachedPdfImage.sq b/libpretixsync/src/main/sqldelight/sqlite/eu/pretix/libpretixsync/sqldelight/CachedPdfImage.sq new file mode 100644 index 00000000..66f92d10 --- /dev/null +++ b/libpretixsync/src/main/sqldelight/sqlite/eu/pretix/libpretixsync/sqldelight/CachedPdfImage.sq @@ -0,0 +1,6 @@ +CREATE TABLE CachedPdfImage ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + etag TEXT, + key TEXT, + orderposition_id INTEGER +); diff --git a/libpretixsync/src/main/sqldelight/sqlite/eu/pretix/libpretixsync/sqldelight/Cashier.sq b/libpretixsync/src/main/sqldelight/sqlite/eu/pretix/libpretixsync/sqldelight/Cashier.sq new file mode 100644 index 00000000..d94f261f --- /dev/null +++ b/libpretixsync/src/main/sqldelight/sqlite/eu/pretix/libpretixsync/sqldelight/Cashier.sq @@ -0,0 +1,11 @@ +import kotlin.Boolean; + +CREATE TABLE Cashier ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + active INTEGER AS Boolean DEFAULT 0 NOT NULL, + json_data TEXT, + name TEXT, + pin TEXT, + server_id INTEGER NOT NULL, + userid TEXT +); diff --git a/libpretixsync/src/main/sqldelight/sqlite/eu/pretix/libpretixsync/sqldelight/CheckIn.sq b/libpretixsync/src/main/sqldelight/sqlite/eu/pretix/libpretixsync/sqldelight/CheckIn.sq new file mode 100644 index 00000000..edfa2d5e --- /dev/null +++ b/libpretixsync/src/main/sqldelight/sqlite/eu/pretix/libpretixsync/sqldelight/CheckIn.sq @@ -0,0 +1,11 @@ +import java.util.Date; + +CREATE TABLE CheckIn ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + datetime TEXT AS Date, + json_data TEXT, + listId INTEGER, + position INTEGER REFERENCES OrderPosition (id) ON DELETE CASCADE, + server_id INTEGER, + type TEXT +); diff --git a/libpretixsync/src/main/sqldelight/sqlite/eu/pretix/libpretixsync/sqldelight/CheckInList.sq b/libpretixsync/src/main/sqldelight/sqlite/eu/pretix/libpretixsync/sqldelight/CheckInList.sq new file mode 100644 index 00000000..b09e0059 --- /dev/null +++ b/libpretixsync/src/main/sqldelight/sqlite/eu/pretix/libpretixsync/sqldelight/CheckInList.sq @@ -0,0 +1,12 @@ +import kotlin.Boolean; + +CREATE TABLE CheckInList ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + all_items INTEGER AS Boolean NOT NULL, + event_slug TEXT, + include_pending INTEGER AS Boolean NOT NULL, + json_data TEXT, + name TEXT, + server_id INTEGER, + subevent_id INTEGER +); diff --git a/libpretixsync/src/main/sqldelight/sqlite/eu/pretix/libpretixsync/sqldelight/CheckInList_Item.sq b/libpretixsync/src/main/sqldelight/sqlite/eu/pretix/libpretixsync/sqldelight/CheckInList_Item.sq new file mode 100644 index 00000000..ec547515 --- /dev/null +++ b/libpretixsync/src/main/sqldelight/sqlite/eu/pretix/libpretixsync/sqldelight/CheckInList_Item.sq @@ -0,0 +1,5 @@ +CREATE TABLE CheckInList_Item ( + ItemId INTEGER REFERENCES Item (id) ON DELETE CASCADE, + CheckInListId INTEGER REFERENCES CheckInList (id) ON DELETE CASCADE, + PRIMARY KEY (ItemId, CheckInListId) +); diff --git a/libpretixsync/src/main/sqldelight/sqlite/eu/pretix/libpretixsync/sqldelight/Closing.sq b/libpretixsync/src/main/sqldelight/sqlite/eu/pretix/libpretixsync/sqldelight/Closing.sq new file mode 100644 index 00000000..debeeb0b --- /dev/null +++ b/libpretixsync/src/main/sqldelight/sqlite/eu/pretix/libpretixsync/sqldelight/Closing.sq @@ -0,0 +1,21 @@ +import java.math.BigDecimal; +import java.util.Date; +import kotlin.Boolean; + +CREATE TABLE Closing ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + cash_counted REAL AS BigDecimal, + cashier_name TEXT, + cashier_numericid INTEGER, + cashier_userid TEXT, + datetime TEXT AS Date, + dsfinvk_uploaded INTEGER AS Boolean, + first_receipt INTEGER, + invoice_settings TEXT, + json_data TEXT, + last_receipt INTEGER, + open INTEGER AS Boolean NOT NULL, + payment_sum REAL AS BigDecimal, + payment_sum_cash REAL AS BigDecimal, + server_id INTEGER +); diff --git a/libpretixsync/src/main/sqldelight/sqlite/eu/pretix/libpretixsync/sqldelight/Event.sq b/libpretixsync/src/main/sqldelight/sqlite/eu/pretix/libpretixsync/sqldelight/Event.sq new file mode 100644 index 00000000..f24b5e59 --- /dev/null +++ b/libpretixsync/src/main/sqldelight/sqlite/eu/pretix/libpretixsync/sqldelight/Event.sq @@ -0,0 +1,13 @@ +import java.util.Date; +import kotlin.Boolean; + +CREATE TABLE Event ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + currency TEXT, + date_from TEXT AS Date, + date_to TEXT AS Date, + has_subevents INTEGER AS Boolean NOT NULL, + json_data TEXT, + live INTEGER AS Boolean NOT NULL, + slug TEXT +); diff --git a/libpretixsync/src/main/sqldelight/sqlite/eu/pretix/libpretixsync/sqldelight/Item.sq b/libpretixsync/src/main/sqldelight/sqlite/eu/pretix/libpretixsync/sqldelight/Item.sq new file mode 100644 index 00000000..db346501 --- /dev/null +++ b/libpretixsync/src/main/sqldelight/sqlite/eu/pretix/libpretixsync/sqldelight/Item.sq @@ -0,0 +1,16 @@ +import kotlin.Boolean; + +CREATE TABLE Item ( + id INTEGER PRIMARY KEY AUTOINCREMENT , + active INTEGER AS Boolean NOT NULL, + admission INTEGER AS Boolean NOT NULL, + category_id INTEGER, + checkin_text TEXT, + event_slug TEXT, + json_data TEXT, + picture_filename TEXT, + position INTEGER, + server_id INTEGER NOT NULL, + ticket_layout_id INTEGER, + ticket_layout_pretixpos_id INTEGER +); diff --git a/libpretixsync/src/main/sqldelight/sqlite/eu/pretix/libpretixsync/sqldelight/ItemCategory.sq b/libpretixsync/src/main/sqldelight/sqlite/eu/pretix/libpretixsync/sqldelight/ItemCategory.sq new file mode 100644 index 00000000..98c86fe6 --- /dev/null +++ b/libpretixsync/src/main/sqldelight/sqlite/eu/pretix/libpretixsync/sqldelight/ItemCategory.sq @@ -0,0 +1,10 @@ +import kotlin.Boolean; + +CREATE TABLE ItemCategory ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + event_slug TEXT, + is_addon INTEGER AS Boolean NOT NULL, + json_data TEXT, + position INTEGER, + server_id INTEGER +); diff --git a/libpretixsync/src/main/sqldelight/sqlite/eu/pretix/libpretixsync/sqldelight/MediumKeySet.sq b/libpretixsync/src/main/sqldelight/sqlite/eu/pretix/libpretixsync/sqldelight/MediumKeySet.sq new file mode 100644 index 00000000..dbe786f7 --- /dev/null +++ b/libpretixsync/src/main/sqldelight/sqlite/eu/pretix/libpretixsync/sqldelight/MediumKeySet.sq @@ -0,0 +1,12 @@ +import kotlin.Boolean; + +CREATE TABLE MediumKeySet ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + active INTEGER AS Boolean NOT NULL, + diversification_key TEXT, + json_data TEXT, + media_type TEXT, + organizer TEXT, + public_id INTEGER, + uid_key TEXT +); diff --git a/libpretixsync/src/main/sqldelight/sqlite/eu/pretix/libpretixsync/sqldelight/Order.sq b/libpretixsync/src/main/sqldelight/sqlite/eu/pretix/libpretixsync/sqldelight/Order.sq new file mode 100644 index 00000000..f9fc1609 --- /dev/null +++ b/libpretixsync/src/main/sqldelight/sqlite/eu/pretix/libpretixsync/sqldelight/Order.sq @@ -0,0 +1,14 @@ +import kotlin.Boolean; + +CREATE TABLE orders ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + checkin_attention INTEGER AS Boolean NOT NULL, + checkin_text TEXT, + code TEXT, + deleteAfterTimestamp INTEGER, + email TEXT, + event_slug TEXT , + json_data TEXT, + status TEXT, + valid_if_pending INTEGER AS Boolean DEFAULT 0 +); diff --git a/libpretixsync/src/main/sqldelight/sqlite/eu/pretix/libpretixsync/sqldelight/OrderPosition.sq b/libpretixsync/src/main/sqldelight/sqlite/eu/pretix/libpretixsync/sqldelight/OrderPosition.sq new file mode 100644 index 00000000..e36a0c7e --- /dev/null +++ b/libpretixsync/src/main/sqldelight/sqlite/eu/pretix/libpretixsync/sqldelight/OrderPosition.sq @@ -0,0 +1,13 @@ +CREATE TABLE OrderPosition ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + attendee_email TEXT, + attendee_name TEXT, + item INTEGER REFERENCES Item (id) ON DELETE CASCADE, + json_data TEXT, + order_ref INTEGER REFERENCES orders (id) ON DELETE CASCADE, + positionid INTEGER, + secret TEXT, + server_id INTEGER, + subevent_id INTEGER, + variation_id INTEGER +); diff --git a/libpretixsync/src/main/sqldelight/sqlite/eu/pretix/libpretixsync/sqldelight/Question.sq b/libpretixsync/src/main/sqldelight/sqlite/eu/pretix/libpretixsync/sqldelight/Question.sq new file mode 100644 index 00000000..0312ab65 --- /dev/null +++ b/libpretixsync/src/main/sqldelight/sqlite/eu/pretix/libpretixsync/sqldelight/Question.sq @@ -0,0 +1,10 @@ +import kotlin.Boolean; + +CREATE TABLE Question ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + event_slug TEXT, + json_data TEXT, + position INTEGER, + required INTEGER AS Boolean NOT NULL, + server_id INTEGER +); diff --git a/libpretixsync/src/main/sqldelight/sqlite/eu/pretix/libpretixsync/sqldelight/QuestionItem.sq b/libpretixsync/src/main/sqldelight/sqlite/eu/pretix/libpretixsync/sqldelight/QuestionItem.sq new file mode 100644 index 00000000..79a6469f --- /dev/null +++ b/libpretixsync/src/main/sqldelight/sqlite/eu/pretix/libpretixsync/sqldelight/QuestionItem.sq @@ -0,0 +1,5 @@ +CREATE TABLE Question_Item ( + ItemId INTEGER REFERENCES Item (id) ON DELETE CASCADE, + QuestionId INTEGER REFERENCES Question (id) ON DELETE CASCADE, + PRIMARY KEY (ItemId, QuestionId) +); diff --git a/libpretixsync/src/main/sqldelight/sqlite/eu/pretix/libpretixsync/sqldelight/QueuedCall.sq b/libpretixsync/src/main/sqldelight/sqlite/eu/pretix/libpretixsync/sqldelight/QueuedCall.sq new file mode 100644 index 00000000..16742c3a --- /dev/null +++ b/libpretixsync/src/main/sqldelight/sqlite/eu/pretix/libpretixsync/sqldelight/QueuedCall.sq @@ -0,0 +1,6 @@ +CREATE TABLE QueuedCall ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + body TEXT, + idempotency_key TEXT, + url TEXT +); diff --git a/libpretixsync/src/main/sqldelight/sqlite/eu/pretix/libpretixsync/sqldelight/QueuedCheckIn.sq b/libpretixsync/src/main/sqldelight/sqlite/eu/pretix/libpretixsync/sqldelight/QueuedCheckIn.sq new file mode 100644 index 00000000..0735cd30 --- /dev/null +++ b/libpretixsync/src/main/sqldelight/sqlite/eu/pretix/libpretixsync/sqldelight/QueuedCheckIn.sq @@ -0,0 +1,14 @@ +import java.util.Date; + +CREATE TABLE QueuedCheckIn ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + answers TEXT, + checkinListId INTEGER, + datetime TEXT AS Date, + datetime_string TEXT, + event_slug TEXT, + nonce TEXT, + secret TEXT, + source_type TEXT, + type TEXT +); diff --git a/libpretixsync/src/main/sqldelight/sqlite/eu/pretix/libpretixsync/sqldelight/QueuedOrder.sq b/libpretixsync/src/main/sqldelight/sqlite/eu/pretix/libpretixsync/sqldelight/QueuedOrder.sq new file mode 100644 index 00000000..8bf006c3 --- /dev/null +++ b/libpretixsync/src/main/sqldelight/sqlite/eu/pretix/libpretixsync/sqldelight/QueuedOrder.sq @@ -0,0 +1,11 @@ +import kotlin.Boolean; + +CREATE TABLE QueuedOrder ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + error TEXT, + event_slug TEXT, + idempotency_key TEXT, + locked INTEGER AS Boolean DEFAULT 0, + payload TEXT, + receipt INTEGER REFERENCES Receipt (id) ON DELETE CASCADE +); diff --git a/libpretixsync/src/main/sqldelight/sqlite/eu/pretix/libpretixsync/sqldelight/Quota.sq b/libpretixsync/src/main/sqldelight/sqlite/eu/pretix/libpretixsync/sqldelight/Quota.sq new file mode 100644 index 00000000..37bff20d --- /dev/null +++ b/libpretixsync/src/main/sqldelight/sqlite/eu/pretix/libpretixsync/sqldelight/Quota.sq @@ -0,0 +1,12 @@ +import kotlin.Boolean; + +CREATE TABLE Quota ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + available INTEGER AS Boolean, + available_number INTEGER, + event_slug TEXT, + json_data TEXT, + server_id INTEGER, + size INTEGER, + subevent_id INTEGER +); diff --git a/libpretixsync/src/main/sqldelight/sqlite/eu/pretix/libpretixsync/sqldelight/QuotaItem.sq b/libpretixsync/src/main/sqldelight/sqlite/eu/pretix/libpretixsync/sqldelight/QuotaItem.sq new file mode 100644 index 00000000..1f442edf --- /dev/null +++ b/libpretixsync/src/main/sqldelight/sqlite/eu/pretix/libpretixsync/sqldelight/QuotaItem.sq @@ -0,0 +1,5 @@ +CREATE TABLE Quota_Item ( + QuotaId INTEGER REFERENCES Quota (id) ON DELETE CASCADE, + ItemId INTEGER REFERENCES Item (id) ON DELETE CASCADE, + PRIMARY KEY (QuotaId, ItemId) +); diff --git a/libpretixsync/src/main/sqldelight/sqlite/eu/pretix/libpretixsync/sqldelight/Receipt.sq b/libpretixsync/src/main/sqldelight/sqlite/eu/pretix/libpretixsync/sqldelight/Receipt.sq new file mode 100644 index 00000000..a6a9e6f2 --- /dev/null +++ b/libpretixsync/src/main/sqldelight/sqlite/eu/pretix/libpretixsync/sqldelight/Receipt.sq @@ -0,0 +1,29 @@ +import java.util.Date; +import kotlin.Boolean; + +CREATE TABLE Receipt ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + additional_text TEXT, + canceled INTEGER AS Boolean NOT NULL, + cashier_name TEXT, + cashier_numericid INTEGER, + cashier_userid TEXT, + chosen_cart_id TEXT, + closing INTEGER REFERENCES Closing (id) ON DELETE CASCADE, + currency TEXT DEFAULT 'EUR' NOT NULL, + datetime_closed TEXT AS Date, + datetime_opened TEXT AS Date, + email_to TEXT, + event_slug TEXT, + fiscalisation_data TEXT, + fiscalisation_qr TEXT, + fiscalisation_text TEXT, + open INTEGER AS Boolean DEFAULT 0, + order_code TEXT, + payment_data TEXT, + payment_type TEXT, + printed INTEGER AS Boolean DEFAULT 0 NOT NULL, + server_id INTEGER, + started INTEGER AS Boolean DEFAULT 0, + training INTEGER AS Boolean DEFAULT 0 NOT NULL +); diff --git a/libpretixsync/src/main/sqldelight/sqlite/eu/pretix/libpretixsync/sqldelight/ReceiptLine.sq b/libpretixsync/src/main/sqldelight/sqlite/eu/pretix/libpretixsync/sqldelight/ReceiptLine.sq new file mode 100644 index 00000000..b2167095 --- /dev/null +++ b/libpretixsync/src/main/sqldelight/sqlite/eu/pretix/libpretixsync/sqldelight/ReceiptLine.sq @@ -0,0 +1,50 @@ +import java.math.BigDecimal; +import java.util.Date; +import kotlin.Boolean; + +CREATE TABLE ReceiptLine ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + addon_to INTEGER REFERENCES ReceiptLine (id) ON DELETE CASCADE, + answers TEXT, + attendee_city TEXT, + attendee_company TEXT, + attendee_country TEXT, + attendee_email TEXT, + attendee_name TEXT, + attendee_street TEXT, + attendee_zipcode TEXT, + canceled INTEGER AS Boolean NOT NULL, + canceled_because_of_receipt INTEGER AS Boolean DEFAULT 0, + cart_expires TEXT AS Date, + cart_id TEXT, + cart_position_id INTEGER, + created TEXT AS Date, + custom_price_input REAL AS BigDecimal, + event_date_from TEXT, + event_date_to TEXT, + gift_card_id INTEGER, + gift_card_secret TEXT, + is_bundled INTEGER AS Boolean DEFAULT 0, + item_id INTEGER, + listed_price REAL AS BigDecimal, + positionid INTEGER, + price REAL AS BigDecimal, + price_after_voucher REAL AS BigDecimal, + price_calculated_from_net INTEGER AS Boolean DEFAULT 0, + receipt INTEGER REFERENCES Receipt (id) ON DELETE CASCADE, + remote_error TEXT, + requested_valid_from TEXT, + sale_text TEXT, + seat_guid TEXT, + seat_name TEXT, + secret TEXT, + subevent_id INTEGER, + subevent_text TEXT, + tax_rate REAL AS BigDecimal, + tax_rule INTEGER, + tax_value REAL AS BigDecimal, + type TEXT, + use_reusable_medium INTEGER, + variation_id INTEGER, + voucher_code TEXT +); diff --git a/libpretixsync/src/main/sqldelight/sqlite/eu/pretix/libpretixsync/sqldelight/ReceiptPayment.sq b/libpretixsync/src/main/sqldelight/sqlite/eu/pretix/libpretixsync/sqldelight/ReceiptPayment.sq new file mode 100644 index 00000000..3ee6610d --- /dev/null +++ b/libpretixsync/src/main/sqldelight/sqlite/eu/pretix/libpretixsync/sqldelight/ReceiptPayment.sq @@ -0,0 +1,10 @@ +import java.math.BigDecimal; + +CREATE TABLE ReceiptPayment ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + amount REAL AS BigDecimal, + detailsJson TEXT, + payment_type TEXT, + receipt INTEGER REFERENCES Receipt (id) ON DELETE CASCADE, + status TEXT +); diff --git a/libpretixsync/src/main/sqldelight/sqlite/eu/pretix/libpretixsync/sqldelight/ResourceSyncStatus.sq b/libpretixsync/src/main/sqldelight/sqlite/eu/pretix/libpretixsync/sqldelight/ResourceSyncStatus.sq new file mode 100644 index 00000000..b933698b --- /dev/null +++ b/libpretixsync/src/main/sqldelight/sqlite/eu/pretix/libpretixsync/sqldelight/ResourceSyncStatus.sq @@ -0,0 +1,8 @@ +CREATE TABLE ResourceSyncStatus ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + event_slug TEXT, + last_modified TEXT, + meta TEXT, + resource TEXT, + status TEXT +); diff --git a/libpretixsync/src/main/sqldelight/sqlite/eu/pretix/libpretixsync/sqldelight/ReusableMedium.sq b/libpretixsync/src/main/sqldelight/sqlite/eu/pretix/libpretixsync/sqldelight/ReusableMedium.sq new file mode 100644 index 00000000..a7ea6cc9 --- /dev/null +++ b/libpretixsync/src/main/sqldelight/sqlite/eu/pretix/libpretixsync/sqldelight/ReusableMedium.sq @@ -0,0 +1,14 @@ +import kotlin.Boolean; + +CREATE TABLE ReusableMedium ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + active INTEGER AS Boolean NOT NULL, + customer_id INTEGER, + expires TEXT, + identifier TEXT, + json_data TEXT, + linked_giftcard_id INTEGER, + linked_orderposition_id INTEGER, + server_id INTEGER, + type TEXT +); diff --git a/libpretixsync/src/main/sqldelight/sqlite/eu/pretix/libpretixsync/sqldelight/RevokedTicketSecret.sq b/libpretixsync/src/main/sqldelight/sqlite/eu/pretix/libpretixsync/sqldelight/RevokedTicketSecret.sq new file mode 100644 index 00000000..2dc5f980 --- /dev/null +++ b/libpretixsync/src/main/sqldelight/sqlite/eu/pretix/libpretixsync/sqldelight/RevokedTicketSecret.sq @@ -0,0 +1,8 @@ +CREATE TABLE RevokedTicketSecret ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + created TEXT, + event_slug TEXT, + json_data TEXT, + secret TEXT, + server_id integer +); diff --git a/libpretixsync/src/main/sqldelight/sqlite/eu/pretix/libpretixsync/sqldelight/Settings.sq b/libpretixsync/src/main/sqldelight/sqlite/eu/pretix/libpretixsync/sqldelight/Settings.sq new file mode 100644 index 00000000..2c948de4 --- /dev/null +++ b/libpretixsync/src/main/sqldelight/sqlite/eu/pretix/libpretixsync/sqldelight/Settings.sq @@ -0,0 +1,15 @@ +import kotlin.Boolean; + +CREATE TABLE Settings ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + address TEXT, + city TEXT, + country TEXT, + json_data TEXT, + name TEXT, + pretixpos_additional_receipt_text TEXT, + slug TEXT, + tax_id TEXT, + vat_id TEXT, + zipcode TEXT +); diff --git a/libpretixsync/src/main/sqldelight/sqlite/eu/pretix/libpretixsync/sqldelight/SubEvent.sq b/libpretixsync/src/main/sqldelight/sqlite/eu/pretix/libpretixsync/sqldelight/SubEvent.sq new file mode 100644 index 00000000..2c630035 --- /dev/null +++ b/libpretixsync/src/main/sqldelight/sqlite/eu/pretix/libpretixsync/sqldelight/SubEvent.sq @@ -0,0 +1,12 @@ +import java.util.Date; +import kotlin.Boolean; + +CREATE TABLE SubEvent ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + active INTEGER AS Boolean NOT NULL, + date_from TEXT AS Date, + date_to TEXT AS Date, + event_slug TEXT, + json_data TEXT, + server_id INTEGER +); diff --git a/libpretixsync/src/main/sqldelight/sqlite/eu/pretix/libpretixsync/sqldelight/TaxRule.sq b/libpretixsync/src/main/sqldelight/sqlite/eu/pretix/libpretixsync/sqldelight/TaxRule.sq new file mode 100644 index 00000000..36a9567d --- /dev/null +++ b/libpretixsync/src/main/sqldelight/sqlite/eu/pretix/libpretixsync/sqldelight/TaxRule.sq @@ -0,0 +1,6 @@ +CREATE TABLE TaxRule ( + id INTEGER PRIMARY KEY AUTOINCREMENT , + event_slug TEXT, + json_data TEXT, + server_id INTEGER +); diff --git a/libpretixsync/src/main/sqldelight/sqlite/eu/pretix/libpretixsync/sqldelight/TicketLayout.sq b/libpretixsync/src/main/sqldelight/sqlite/eu/pretix/libpretixsync/sqldelight/TicketLayout.sq new file mode 100644 index 00000000..8942f2a8 --- /dev/null +++ b/libpretixsync/src/main/sqldelight/sqlite/eu/pretix/libpretixsync/sqldelight/TicketLayout.sq @@ -0,0 +1,10 @@ +import kotlin.Boolean; + +CREATE TABLE TicketLayout ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + background_filename TEXT, + event_slug TEXT, + is_default INTEGER AS Boolean NOT NULL, + json_data TEXT, + server_id INTEGER +); diff --git a/libpretixsync/src/main/sqldelight/sqlite/eu/pretix/libpretixsync/sqldelight/compat.sq b/libpretixsync/src/main/sqldelight/sqlite/eu/pretix/libpretixsync/sqldelight/compat.sq new file mode 100644 index 00000000..cc4e7b8b --- /dev/null +++ b/libpretixsync/src/main/sqldelight/sqlite/eu/pretix/libpretixsync/sqldelight/compat.sq @@ -0,0 +1,125 @@ +-- Dialect-specific queries +-- Cannot be part of the table-specific .sq files, as they would generate conflicting query classes + +-- Switch to RETURNING once it is also supported by SQLite +getLastInsertedOrderId: +SELECT last_insert_rowid(); + +-- Switch to RETURNING once it is also supported by SQLite +getLastInsertedCheckInId: +SELECT last_insert_rowid(); + +-- Switch to RETURNING once it is also supported by SQLite +getLastInsertedCheckInListId: +SELECT last_insert_rowid(); + +-- Switch to RETURNING once it is also supported by SQLite +getLastInsertedOrderPositionId: +SELECT last_insert_rowid(); + +-- Switch to RETURNING once it is also supported by SQLite +getLastInsertedQuestionId: +SELECT last_insert_rowid(); + +-- Switch to RETURNING once it is also supported by SQLite +getLastInsertedQuotaId: +SELECT last_insert_rowid(); + +truncateCheckIn: +DELETE FROM CheckIn; + +truncateOrderPosition: +DELETE FROM OrderPosition; + +truncateOrder: +DELETE FROM orders; + +truncateResourceSyncStatus: +DELETE FROM ResourceSyncStatus; + +truncateReusableMedium: +DELETE FROM ReusableMedium; + +truncateAllTables { + DELETE FROM BadgeLayout; + DELETE FROM BadgeLayoutItem; + DELETE FROM BlockedTicketSecret; + DELETE FROM CachedPdfImage; + DELETE FROM Cashier; + DELETE FROM CheckIn; + DELETE FROM CheckInList; + DELETE FROM CheckInList_Item; + DELETE FROM Closing; + DELETE FROM Event; + DELETE FROM Item; + DELETE FROM ItemCategory; + DELETE FROM MediumKeySet; + DELETE FROM orders; + DELETE FROM OrderPosition; + DELETE FROM Question; + DELETE FROM Question_Item; + DELETE FROM QueuedCall; + DELETE FROM QueuedCheckIn; + DELETE FROM QueuedOrder; + DELETE FROM Quota; + DELETE FROM Quota_Item; + DELETE FROM Receipt; + DELETE FROM ReceiptLine; + DELETE FROM ReceiptPayment; + DELETE FROM ResourceSyncStatus; + DELETE FROM ReusableMedium; + DELETE FROM RevokedTicketSecret; + DELETE FROM Settings; + DELETE FROM SubEvent; + DELETE FROM TaxRule; + DELETE FROM TicketLayout; +} + +searchOrderPosition: +SELECT OrderPosition.* +FROM OrderPosition +LEFT JOIN orders ON OrderPosition.order_ref = orders.id +LEFT JOIN Item ON OrderPosition.item = Item.id +WHERE ( + UPPER(OrderPosition.secret) LIKE :queryStartsWith + OR UPPER(OrderPosition.attendee_name) LIKE :queryContains + OR UPPER(OrderPosition.attendee_email) LIKE :queryContains + OR UPPER(orders.email) LIKE :queryContains + OR UPPER(orders.code) LIKE :queryStartsWith +) +AND ( + CASE WHEN (:use_event_filter) THEN (orders.event_slug IN :event_filter) ELSE 0 END + OR CASE WHEN (:use_event_item_filter) THEN (orders.event_slug IN :event_item_filter_events AND OrderPosition.item IN :event_item_filter_items) ELSE 0 END + OR CASE WHEN (:use_event_subevent_filter) THEN (orders.event_slug IN :event_subevent_filter_events AND OrderPosition.subevent_id IN :event_subevent_filter_subevents) ELSE 0 END + OR CASE WHEN (:use_all_filter) THEN (orders.event_slug IN :all_filter_events AND OrderPosition.item IN :all_filter_items AND OrderPosition.subevent_id IN :all_filter_subevents) ELSE 0 END +) +LIMIT :limit +OFFSET :offset; + +countOrderPositionForStatus: +-- TODO: Check if DISTINCT is required here +SELECT COUNT(DISTINCT OrderPosition.id) +FROM OrderPosition +LEFT JOIN orders ON OrderPosition.order_ref = orders.id +WHERE( + orders.event_slug = :event_slug + AND CASE WHEN (:include_pending) + THEN orders.status IN ('p', 'n') + ELSE (orders.status = 'p' OR (orders.status = 'n' AND orders.valid_if_pending = 1)) + END + AND CASE WHEN (:subevent_id > 0) THEN OrderPosition.subevent_id = :subevent_id ELSE 1 END + AND CASE WHEN (:not_all_items) THEN OrderPosition.item IN :list_item_ids ELSE 1 END + AND CASE WHEN (:only_checked_in_list_server_id > 0) THEN + (OrderPosition.id IN (SELECT position FROM CheckIn WHERE listId = :only_checked_in_list_server_id AND type = 'entry')) + ELSE + 1 + END + AND OrderPosition.item = :item_id + AND CASE WHEN (:variation_id > 0) THEN OrderPosition.variation_id = :variation_id ELSE 1 END +); + +lockQueuedOrder: +UPDATE QueuedOrder +SET + locked = 1 +WHERE id = ?; diff --git a/libpretixsync/src/test/java/eu/pretix/libpretixsync/check/AsyncCheckProviderTest.kt b/libpretixsync/src/test/java/eu/pretix/libpretixsync/check/AsyncCheckProviderTest.kt index 8504e24f..b20e1c3e 100644 --- a/libpretixsync/src/test/java/eu/pretix/libpretixsync/check/AsyncCheckProviderTest.kt +++ b/libpretixsync/src/test/java/eu/pretix/libpretixsync/check/AsyncCheckProviderTest.kt @@ -1,8 +1,13 @@ package eu.pretix.libpretixsync.check -import eu.pretix.libpretixsync.db.* -import eu.pretix.libpretixsync.sync.* -import eu.pretix.pretixscan.scanproxy.tests.db.BaseDatabaseTest +import eu.pretix.libpretixsync.db.Answer +import eu.pretix.libpretixsync.db.BaseDatabaseTest +import eu.pretix.libpretixsync.sync.CheckInListSyncAdapter +import eu.pretix.libpretixsync.sync.EventSyncAdapter +import eu.pretix.libpretixsync.sync.ItemSyncAdapter +import eu.pretix.libpretixsync.sync.OrderSyncAdapter +import eu.pretix.libpretixsync.sync.QuestionSyncAdapter +import eu.pretix.libpretixsync.sync.SubEventSyncAdapter import eu.pretix.pretixscan.scanproxy.tests.test.FakeConfigStore import eu.pretix.pretixscan.scanproxy.tests.test.FakeFileStorage import eu.pretix.pretixscan.scanproxy.tests.test.FakePretixApi @@ -21,44 +26,43 @@ import org.junit.Assert.assertNull class AsyncCheckProviderTest : BaseDatabaseTest() { private var configStore: FakeConfigStore? = null private var fakeApi: FakePretixApi? = null - private var item: Item? = null private var p: AsyncCheckProvider? = null @Before fun setUpFakes() { configStore = FakeConfigStore() fakeApi = FakePretixApi() - p = AsyncCheckProvider(configStore!!, dataStore) - - EventSyncAdapter(dataStore, "demo", "demo", fakeApi, "", null).standaloneRefreshFromJSON(jsonResource("events/event1.json")) - EventSyncAdapter(dataStore, "demo", "demo", fakeApi, "", null).standaloneRefreshFromJSON(jsonResource("events/event2.json")) - ItemSyncAdapter(dataStore, FakeFileStorage(), "demo", fakeApi, "", null).standaloneRefreshFromJSON(jsonResource("items/item1.json")) - ItemSyncAdapter(dataStore, FakeFileStorage(), "demo", fakeApi, "", null).standaloneRefreshFromJSON(jsonResource("items/item2.json")) - ItemSyncAdapter(dataStore, FakeFileStorage(), "demo2", fakeApi, "", null).standaloneRefreshFromJSON(jsonResource("items/event2-item3.json")) - CheckInListSyncAdapter(dataStore, FakeFileStorage(), "demo", fakeApi, "", null, 0).standaloneRefreshFromJSON( + p = AsyncCheckProvider(configStore!!, db) + + EventSyncAdapter(db, "demo", "demo", fakeApi!!, "", null).standaloneRefreshFromJSON(jsonResource("events/event1.json")) + EventSyncAdapter(db, "demo2", "demo2", fakeApi!!, "", null).standaloneRefreshFromJSON(jsonResource("events/event2.json")) + ItemSyncAdapter(db, FakeFileStorage(), "demo", fakeApi!!, "", null).standaloneRefreshFromJSON(jsonResource("items/item1.json")) + ItemSyncAdapter(db, FakeFileStorage(), "demo", fakeApi!!, "", null).standaloneRefreshFromJSON(jsonResource("items/item2.json")) + ItemSyncAdapter(db, FakeFileStorage(), "demo2", fakeApi!!, "", null).standaloneRefreshFromJSON(jsonResource("items/event2-item3.json")) + CheckInListSyncAdapter(db, FakeFileStorage(), "demo", fakeApi!!, "", null, 0).standaloneRefreshFromJSON( jsonResource("checkinlists/list1.json") ) - CheckInListSyncAdapter(dataStore, FakeFileStorage(), "demo", fakeApi, "", null, 0).standaloneRefreshFromJSON( + CheckInListSyncAdapter(db, FakeFileStorage(), "demo", fakeApi!!, "", null, 0).standaloneRefreshFromJSON( jsonResource("checkinlists/list2.json") ) - CheckInListSyncAdapter(dataStore, FakeFileStorage(), "demo", fakeApi, "", null, 0).standaloneRefreshFromJSON( + CheckInListSyncAdapter(db, FakeFileStorage(), "demo", fakeApi!!, "", null, 0).standaloneRefreshFromJSON( jsonResource("checkinlists/list3.json") ) - CheckInListSyncAdapter(dataStore, FakeFileStorage(), "demo", fakeApi, "", null, 0).standaloneRefreshFromJSON( + CheckInListSyncAdapter(db, FakeFileStorage(), "demo", fakeApi!!, "", null, 0).standaloneRefreshFromJSON( jsonResource("checkinlists/list4.json") ) - CheckInListSyncAdapter(dataStore, FakeFileStorage(), "demo", fakeApi, "", null, 0).standaloneRefreshFromJSON( + CheckInListSyncAdapter(db, FakeFileStorage(), "demo", fakeApi!!, "", null, 0).standaloneRefreshFromJSON( jsonResource("checkinlists/list5.json") ) - CheckInListSyncAdapter(dataStore, FakeFileStorage(), "demo", fakeApi, "", null, 0).standaloneRefreshFromJSON( + CheckInListSyncAdapter(db, FakeFileStorage(), "demo", fakeApi!!, "", null, 0).standaloneRefreshFromJSON( jsonResource("checkinlists/list6.json") ) - CheckInListSyncAdapter(dataStore, FakeFileStorage(), "demo2", fakeApi, "", null, 0).standaloneRefreshFromJSON( + CheckInListSyncAdapter(db, FakeFileStorage(), "demo2", fakeApi!!, "", null, 0).standaloneRefreshFromJSON( jsonResource("checkinlists/event2-list7.json") ) - SubEventSyncAdapter(dataStore, "demo", "14", fakeApi, "", null).standaloneRefreshFromJSON(jsonResource("subevents/subevent1.json")) + SubEventSyncAdapter(db, "demo", "14", fakeApi!!, "", null).standaloneRefreshFromJSON(jsonResource("subevents/subevent1.json")) - val osa = OrderSyncAdapter(dataStore, FakeFileStorage(), "demo", 0, true, false, fakeApi, "", null) + val osa = OrderSyncAdapter(db, FakeFileStorage(), "demo", 0, true, false, fakeApi!!, "", null) osa.standaloneRefreshFromJSON(jsonResource("orders/order1.json")) osa.standaloneRefreshFromJSON(jsonResource("orders/order2.json")) osa.standaloneRefreshFromJSON(jsonResource("orders/order3.json")) @@ -68,7 +72,7 @@ class AsyncCheckProviderTest : BaseDatabaseTest() { osa.standaloneRefreshFromJSON(jsonResource("orders/order7.json")) osa.standaloneRefreshFromJSON(jsonResource("orders/order8.json")) osa.standaloneRefreshFromJSON(jsonResource("orders/order9.json")) - val osa2 = OrderSyncAdapter(dataStore, FakeFileStorage(), "demo2", 0, true, false, fakeApi, "", null) + val osa2 = OrderSyncAdapter(db, FakeFileStorage(), "demo2", 0, true, false, fakeApi!!, "", null) osa2.standaloneRefreshFromJSON(jsonResource("orders/event2-order1.json")) } @@ -81,9 +85,9 @@ class AsyncCheckProviderTest : BaseDatabaseTest() { assertEquals("Casey Flores", r.attendee_name) assertEquals(true, r.isRequireAttention) - val qciList = dataStore.select(QueuedCheckIn::class.java).get().toList() + val qciList = db.queuedCheckInQueries.selectAll().executeAsList() assertEquals(1, qciList.size.toLong()) - assertEquals("kfndgffgyw4tdgcacx6bb3bgemq69cxj", qciList[0].getSecret()) + assertEquals("kfndgffgyw4tdgcacx6bb3bgemq69cxj", qciList[0].secret) } @Test @@ -147,7 +151,7 @@ class AsyncCheckProviderTest : BaseDatabaseTest() { @Test fun testSimpleValidUntil() { - val p2 = AsyncCheckProvider(configStore!!, dataStore) + val p2 = AsyncCheckProvider(configStore!!, db) p2.setNow(ISODateTimeFormat.dateTime().parseDateTime("2023-03-04T00:00:01.000Z")) var r = p2.check(mapOf("demo" to 1L), "dz4OBvVsTDSJ6T1nY1dD") @@ -160,7 +164,7 @@ class AsyncCheckProviderTest : BaseDatabaseTest() { @Test fun testSimpleValidFrom() { - val p2 = AsyncCheckProvider(configStore!!, dataStore) + val p2 = AsyncCheckProvider(configStore!!, db) p2.setNow(ISODateTimeFormat.dateTime().parseDateTime("2023-03-03T23:59:59.000Z")) var r = p2.check(mapOf("demo" to 1L), "uG3H4hgRYEIrw4YNclyH") @@ -238,8 +242,9 @@ class AsyncCheckProviderTest : BaseDatabaseTest() { assertEquals(TicketCheckProvider.CheckResult.Type.VALID, r.type) r = p!!.check(mapOf("demo" to 2L), "kfndgffgyw4tdgcacx6bb3bgemq69cxj") assertEquals(TicketCheckProvider.CheckResult.Type.VALID, r.type) - assertEquals(dataStore.count(QueuedCheckIn::class.java).get().value(), 2) - assertEquals(dataStore.count(CheckIn::class.java).join(OrderPosition::class.java).on(OrderPosition.ID.eq(CheckIn.POSITION_ID)).where(OrderPosition.SECRET.eq("kfndgffgyw4tdgcacx6bb3bgemq69cxj")).get().value(), 3) + assertEquals(db.queuedCheckInQueries.count().executeAsOne(), 2L) + assertEquals(db.checkInQueries.testCountByOrderPositionSecret("kfndgffgyw4tdgcacx6bb3bgemq69cxj").executeAsOne(), 3L) + } @Test @@ -250,8 +255,8 @@ class AsyncCheckProviderTest : BaseDatabaseTest() { assertEquals(TicketCheckProvider.CheckResult.Type.VALID, r.type) r = p!!.check(mapOf("demo" to 1L), "kfndgffgyw4tdgcacx6bb3bgemq69cxj", "barcode", null, false, false, TicketCheckProvider.CheckInType.EXIT) assertEquals(TicketCheckProvider.CheckResult.Type.VALID, r.type) - assertEquals(dataStore.count(QueuedCheckIn::class.java).get().value(), 3) - assertEquals(dataStore.select(QueuedCheckIn::class.java).get().toList().last().getType(), "exit") + assertEquals(db.queuedCheckInQueries.count().executeAsOne(), 3L) + assertEquals(db.queuedCheckInQueries.selectAll().executeAsList().last().type, "exit") } @Test @@ -265,7 +270,7 @@ class AsyncCheckProviderTest : BaseDatabaseTest() { assertEquals(TicketCheckProvider.CheckResult.Type.VALID, r.type) r = p!!.check(mapOf("demo" to 1L), "kfndgffgyw4tdgcacx6bb3bgemq69cxj", "barcode", null, false, false, TicketCheckProvider.CheckInType.ENTRY) assertEquals(TicketCheckProvider.CheckResult.Type.USED, r.type) - assertEquals(dataStore.count(QueuedCheckIn::class.java).get().value(), 3) + assertEquals(db.queuedCheckInQueries.count().executeAsOne(), 3L) } @Test @@ -276,36 +281,38 @@ class AsyncCheckProviderTest : BaseDatabaseTest() { assertEquals(TicketCheckProvider.CheckResult.Type.VALID, r.type) r = p!!.check(mapOf("demo" to 3L), "kfndgffgyw4tdgcacx6bb3bgemq69cxj", "barcode", null, false, false, TicketCheckProvider.CheckInType.ENTRY) assertEquals(TicketCheckProvider.CheckResult.Type.USED, r.type) - assertEquals(dataStore.count(QueuedCheckIn::class.java).get().value(), 2) + assertEquals(db.queuedCheckInQueries.count().executeAsOne(), 2L) } @Test fun testAddonMatchDisabled() { val r = p!!.check(mapOf("demo" to 5L), "XwBltvZO50PKtygKtlIHgAFAxmhtDlzK") assertEquals(TicketCheckProvider.CheckResult.Type.PRODUCT, r.type) - assertEquals(dataStore.count(QueuedCheckIn::class.java).get().value(), 0) + assertEquals(db.queuedCheckInQueries.count().executeAsOne(), 0L) } @Test fun testAddonMatchValid() { val r = p!!.check(mapOf("demo" to 3L), "XwBltvZO50PKtygKtlIHgAFAxmhtDlzK") assertEquals(TicketCheckProvider.CheckResult.Type.VALID, r.type) - assertEquals(dataStore.count(QueuedCheckIn::class.java).get().value(), 1) + assertEquals(db.queuedCheckInQueries.count().executeAsOne(), 1L) } @Test fun testAddonMatchAmbiguous() { val r = p!!.check(mapOf("demo" to 4L), "XwBltvZO50PKtygKtlIHgAFAxmhtDlzK") assertEquals(TicketCheckProvider.CheckResult.Type.AMBIGUOUS, r.type) - assertEquals(dataStore.count(QueuedCheckIn::class.java).get().value(), 0) + assertEquals(db.queuedCheckInQueries.count().executeAsOne(), 0L) } private fun setRuleOnList2(r: String) { - val cl = dataStore.select(CheckInList::class.java).where(CheckInList.SERVER_ID.eq(2)).get().first() - val j = cl.json + val cl = db.checkInListQueries.selectByServerId(2L).executeAsOne() + val j = JSONObject(cl.json_data) j.put("rules", JSONObject(r)) - cl.setJson_data(j.toString()) - dataStore.update(cl) + db.checkInListQueries.testUpdateJsonData( + json_data = j.toString(), + id = cl.id, + ) } @Test @@ -324,27 +331,27 @@ class AsyncCheckProviderTest : BaseDatabaseTest() { @Test fun testRulesProduct() { setRuleOnList2("{\n" + - " \"inList\": [\n" + - " {\"var\": \"product\"}, {\n" + - " \"objectList\": [\n" + - " {\"lookup\": [\"product\", \"2\", \"Ticket\"]}\n" + - " ]\n" + - " }\n" + - " ]\n" + - " }") + " \"inList\": [\n" + + " {\"var\": \"product\"}, {\n" + + " \"objectList\": [\n" + + " {\"lookup\": [\"product\", \"2\", \"Ticket\"]}\n" + + " ]\n" + + " }\n" + + " ]\n" + + " }") var r = p!!.check(mapOf("demo" to 2L), "kfndgffgyw4tdgcacx6bb3bgemq69cxj") assertEquals(TicketCheckProvider.CheckResult.Type.RULES, r.type) setRuleOnList2("{\n" + - " \"inList\": [\n" + - " {\"var\": \"product\"}, {\n" + - " \"objectList\": [\n" + - " {\"lookup\": [\"product\", \"1\", \"Ticket\"]},\n" + - " {\"lookup\": [\"product\", \"2\", \"Ticket\"]}\n" + - " ]\n" + - " }\n" + - " ]\n" + - " }") + " \"inList\": [\n" + + " {\"var\": \"product\"}, {\n" + + " \"objectList\": [\n" + + " {\"lookup\": [\"product\", \"1\", \"Ticket\"]},\n" + + " {\"lookup\": [\"product\", \"2\", \"Ticket\"]}\n" + + " ]\n" + + " }\n" + + " ]\n" + + " }") r = p!!.check(mapOf("demo" to 2L), "kfndgffgyw4tdgcacx6bb3bgemq69cxj") assertEquals(TicketCheckProvider.CheckResult.Type.VALID, r.type) } @@ -352,27 +359,27 @@ class AsyncCheckProviderTest : BaseDatabaseTest() { @Test fun testRulesVariation() { setRuleOnList2("{\n" + - " \"inList\": [\n" + - " {\"var\": \"variation\"}, {\n" + - " \"objectList\": [\n" + - " {\"lookup\": [\"variation\", \"3\", \"Ticket\"]}\n" + - " ]\n" + - " }\n" + - " ]\n" + - " }") + " \"inList\": [\n" + + " {\"var\": \"variation\"}, {\n" + + " \"objectList\": [\n" + + " {\"lookup\": [\"variation\", \"3\", \"Ticket\"]}\n" + + " ]\n" + + " }\n" + + " ]\n" + + " }") var r = p!!.check(mapOf("demo" to 2L), "kfndgffgyw4tdgcacx6bb3bgemq69cxj") assertEquals(TicketCheckProvider.CheckResult.Type.RULES, r.type) setRuleOnList2("{\n" + - " \"inList\": [\n" + - " {\"var\": \"variation\"}, {\n" + - " \"objectList\": [\n" + - " {\"lookup\": [\"variation\", \"3\", \"Ticket\"]},\n" + - " {\"lookup\": [\"variation\", \"2\", \"Ticket\"]}\n" + - " ]\n" + - " }\n" + - " ]\n" + - " }") + " \"inList\": [\n" + + " {\"var\": \"variation\"}, {\n" + + " \"objectList\": [\n" + + " {\"lookup\": [\"variation\", \"3\", \"Ticket\"]},\n" + + " {\"lookup\": [\"variation\", \"2\", \"Ticket\"]}\n" + + " ]\n" + + " }\n" + + " ]\n" + + " }") r = p!!.check(mapOf("demo" to 2L), "kfndgffgyw4tdgcacx6bb3bgemq69cxj") assertEquals(TicketCheckProvider.CheckResult.Type.VALID, r.type) } @@ -380,14 +387,14 @@ class AsyncCheckProviderTest : BaseDatabaseTest() { @Test fun testRulesGate() { setRuleOnList2("{\n" + - " \"inList\": [\n" + - " {\"var\": \"gate\"}, {\n" + - " \"objectList\": [\n" + - " {\"lookup\": [\"gate\", \"1\", \"Gate 1\"]},\n" + - " ]\n" + - " }\n" + - " ]\n" + - " }") + " \"inList\": [\n" + + " {\"var\": \"gate\"}, {\n" + + " \"objectList\": [\n" + + " {\"lookup\": [\"gate\", \"1\", \"Gate 1\"]},\n" + + " ]\n" + + " }\n" + + " ]\n" + + " }") configStore!!.deviceKnownGateID = 0 var r = p!!.check(mapOf("demo" to 2L), "kfndgffgyw4tdgcacx6bb3bgemq69cxj") assertEquals(TicketCheckProvider.CheckResult.Type.RULES, r.type) @@ -400,15 +407,15 @@ class AsyncCheckProviderTest : BaseDatabaseTest() { assertEquals(TicketCheckProvider.CheckResult.Type.RULES, r.type) setRuleOnList2("{\n" + - " \"inList\": [\n" + - " {\"var\": \"gate\"}, {\n" + - " \"objectList\": [\n" + - " {\"lookup\": [\"gate\", \"1\", \"Gate 1\"]},\n" + - " {\"lookup\": [\"gate\", \"2\", \"Gate 2\"]},\n" + - " ]\n" + - " }\n" + - " ]\n" + - " }") + " \"inList\": [\n" + + " {\"var\": \"gate\"}, {\n" + + " \"objectList\": [\n" + + " {\"lookup\": [\"gate\", \"1\", \"Gate 1\"]},\n" + + " {\"lookup\": [\"gate\", \"2\", \"Gate 2\"]},\n" + + " ]\n" + + " }\n" + + " ]\n" + + " }") r = p!!.check(mapOf("demo" to 2L), "kfndgffgyw4tdgcacx6bb3bgemq69cxj") assertEquals(TicketCheckProvider.CheckResult.Type.VALID, r.type) r = p!!.check(mapOf("demo" to 2L), "E4BibyTSylQOgeKjuMPiTDxi5HXPuTVsx1qCli3IL0143gj0EZXOB9iQInANxRFJTt4Pf9nXnHdB91Qk/RN0L5AIBABSxw2TKFnSUNUCKAEAPAQA") @@ -433,11 +440,11 @@ class AsyncCheckProviderTest : BaseDatabaseTest() { @Test fun testRulesEntryStatus() { setRuleOnList2("{\n" + - " \"or\": [\n" + - " {\"==\": [{\"var\": \"entry_status\"}, \"absent\"]},\n" + - " {\"<\": [{\"var\": \"entries_number\"}, 1]}\n" + - " ]\n" + - " }") + " \"or\": [\n" + + " {\"==\": [{\"var\": \"entry_status\"}, \"absent\"]},\n" + + " {\"<\": [{\"var\": \"entries_number\"}, 1]}\n" + + " ]\n" + + " }") var r = p!!.check(mapOf("demo" to 2L), "kfndgffgyw4tdgcacx6bb3bgemq69cxj") assertEquals(TicketCheckProvider.CheckResult.Type.VALID, r.type) r = p!!.check(mapOf("demo" to 2L), "kfndgffgyw4tdgcacx6bb3bgemq69cxj") @@ -450,7 +457,7 @@ class AsyncCheckProviderTest : BaseDatabaseTest() { @Test fun testRulesEntriesToday() { - val p2 = AsyncCheckProvider(configStore!!, dataStore) + val p2 = AsyncCheckProvider(configStore!!, db) p2.setNow(ISODateTimeFormat.dateTime().parseDateTime("2020-01-01T10:00:00.000Z")) setRuleOnList2("{\"<\": [{\"var\": \"entries_today\"}, 3]}") @@ -484,7 +491,7 @@ class AsyncCheckProviderTest : BaseDatabaseTest() { @Test fun testRulesEntriesDays() { - val p2 = AsyncCheckProvider(configStore!!, dataStore) + val p2 = AsyncCheckProvider(configStore!!, db) // Ticket is valid unlimited times, but only on two arbitrary days setRuleOnList2("{\"or\": [{\">\": [{\"var\": \"entries_today\"}, 0]}, {\"<\": [{\"var\": \"entries_days\"}, 2]}]}") @@ -520,18 +527,18 @@ class AsyncCheckProviderTest : BaseDatabaseTest() { @Test fun testRulesEntriesSince() { - val p2 = AsyncCheckProvider(configStore!!, dataStore) + val p2 = AsyncCheckProvider(configStore!!, db) // Ticket is valid once before X and once after X setRuleOnList2("{\n" + - " \"or\": [\n" + - " {\"<=\": [{\"var\": \"entries_number\"}, 0]},\n" + - " {\"and\": [\n" + - " {\"isAfter\": [{\"var\": \"now\"}, {\"buildTime\": [\"custom\", \"2020-01-01T23:00:00.000+01:00\"]}, 0]},\n" + - " {\"<=\": [{\"entries_since\": [{\"buildTime\": [\"custom\", \"2020-01-01T23:00:00.000+01:00\"]}]}, 0]},\n" + - " ]},\n" + - " ],\n" + - " }") + " \"or\": [\n" + + " {\"<=\": [{\"var\": \"entries_number\"}, 0]},\n" + + " {\"and\": [\n" + + " {\"isAfter\": [{\"var\": \"now\"}, {\"buildTime\": [\"custom\", \"2020-01-01T23:00:00.000+01:00\"]}, 0]},\n" + + " {\"<=\": [{\"entries_since\": [{\"buildTime\": [\"custom\", \"2020-01-01T23:00:00.000+01:00\"]}]}, 0]},\n" + + " ]},\n" + + " ],\n" + + " }") p2.setNow(ISODateTimeFormat.dateTime().parseDateTime("2020-01-01T21:00:00.000Z")) @@ -552,18 +559,18 @@ class AsyncCheckProviderTest : BaseDatabaseTest() { @Test fun testRulesEntriesSinceTimeOfDay() { - val p2 = AsyncCheckProvider(configStore!!, dataStore) + val p2 = AsyncCheckProvider(configStore!!, db) // Ticket is valid once before X and once after X setRuleOnList2("{\n" + - " \"or\": [\n" + - " {\"<=\": [{\"var\": \"entries_today\"}, 0]},\n" + - " {\"and\": [\n" + - " {\"isAfter\": [{\"var\": \"now\"}, {\"buildTime\": [\"customtime\", \"23:00:00\"]}, 0]},\n" + - " {\"<=\": [{\"entries_since\": [{\"buildTime\": [\"customtime\", \"23:00:00\"]}]}, 0]},\n" + - " ]},\n" + - " ],\n" + - " }") + " \"or\": [\n" + + " {\"<=\": [{\"var\": \"entries_today\"}, 0]},\n" + + " {\"and\": [\n" + + " {\"isAfter\": [{\"var\": \"now\"}, {\"buildTime\": [\"customtime\", \"23:00:00\"]}, 0]},\n" + + " {\"<=\": [{\"entries_since\": [{\"buildTime\": [\"customtime\", \"23:00:00\"]}]}, 0]},\n" + + " ]},\n" + + " ],\n" + + " }") val times = listOf( "2020-01-01T22:00:00.000+09:00", @@ -585,18 +592,18 @@ class AsyncCheckProviderTest : BaseDatabaseTest() { @Test fun testRulesEntriesBefore() { - val p2 = AsyncCheckProvider(configStore!!, dataStore) + val p2 = AsyncCheckProvider(configStore!!, db) // Ticket is valid after 23:00 only if people already showed up before setRuleOnList2("{\n" + - " \"or\": [\n" + - " {\"isBefore\": [{\"var\": \"now\"}, {\"buildTime\": [\"custom\", \"2020-01-01T23:00:00.000+01:00\"]}, 0]},\n" + - " {\"and\": [\n" + - " {\"isAfter\": [{\"var\": \"now\"}, {\"buildTime\": [\"custom\", \"2020-01-01T23:00:00.000+01:00\"]}, 0]},\n" + - " {\">=\": [{\"entries_before\": [{\"buildTime\": [\"custom\", \"2020-01-01T23:00:00.000+01:00\"]}]}, 1]},\n" + - " ]},\n" + - " ],\n" + - " }") + " \"or\": [\n" + + " {\"isBefore\": [{\"var\": \"now\"}, {\"buildTime\": [\"custom\", \"2020-01-01T23:00:00.000+01:00\"]}, 0]},\n" + + " {\"and\": [\n" + + " {\"isAfter\": [{\"var\": \"now\"}, {\"buildTime\": [\"custom\", \"2020-01-01T23:00:00.000+01:00\"]}, 0]},\n" + + " {\">=\": [{\"entries_before\": [{\"buildTime\": [\"custom\", \"2020-01-01T23:00:00.000+01:00\"]}]}, 1]},\n" + + " ]},\n" + + " ],\n" + + " }") p2.setNow(ISODateTimeFormat.dateTime().parseDateTime("2020-01-01T21:00:00.000Z")) @@ -608,7 +615,7 @@ class AsyncCheckProviderTest : BaseDatabaseTest() { r = p2.check(mapOf("demo" to 2L), "kfndgffgyw4tdgcacx6bb3bgemq69cxj") assertEquals(TicketCheckProvider.CheckResult.Type.VALID, r.type) - dataStore.delete(CheckIn::class.java).get().value() + db.compatQueries.truncateCheckIn() r = p2.check(mapOf("demo" to 2L), "kfndgffgyw4tdgcacx6bb3bgemq69cxj") assertEquals(TicketCheckProvider.CheckResult.Type.RULES, r.type) @@ -616,21 +623,21 @@ class AsyncCheckProviderTest : BaseDatabaseTest() { @Test fun testRulesEntriesDaysSince() { - val p2 = AsyncCheckProvider(configStore!!, dataStore) + val p2 = AsyncCheckProvider(configStore!!, db) // Ticket is valid once before X and on one day after X setRuleOnList2("{" + - " \"or\": [\n" + - " {\"<=\": [{\"var\": \"entries_number\"}, 0]},\n" + - " {\"and\": [\n" + - " {\"isAfter\": [{\"var\": \"now\"}, {\"buildTime\": [\"custom\", \"2020-01-01T23:00:00.000+01:00\"]}, 0]},\n" + - " {\"or\": [\n" + - " {\">\": [{\"var\": \"entries_today\"}, 0]},\n" + - " {\"<=\": [{\"entries_days_since\": [{\"buildTime\": [\"custom\", \"2020-01-01T23:00:00.000+01:00\"]}]}, 0]},\n" + - " ]}\n" + - " ]},\n" + - " ],\n" + - " }") + " \"or\": [\n" + + " {\"<=\": [{\"var\": \"entries_number\"}, 0]},\n" + + " {\"and\": [\n" + + " {\"isAfter\": [{\"var\": \"now\"}, {\"buildTime\": [\"custom\", \"2020-01-01T23:00:00.000+01:00\"]}, 0]},\n" + + " {\"or\": [\n" + + " {\">\": [{\"var\": \"entries_today\"}, 0]},\n" + + " {\"<=\": [{\"entries_days_since\": [{\"buildTime\": [\"custom\", \"2020-01-01T23:00:00.000+01:00\"]}]}, 0]},\n" + + " ]}\n" + + " ]},\n" + + " ],\n" + + " }") p2.setNow(ISODateTimeFormat.dateTime().parseDateTime("2020-01-01T21:00:00.000Z")) @@ -656,18 +663,18 @@ class AsyncCheckProviderTest : BaseDatabaseTest() { @Test fun testRulesEntriesDaysBefore() { - val p2 = AsyncCheckProvider(configStore!!, dataStore) + val p2 = AsyncCheckProvider(configStore!!, db) // Ticket is valid after 23:00 only if people already showed up on two days before setRuleOnList2("{" + - " \"or\": [\n" + - " {\"isBefore\": [{\"var\": \"now\"}, {\"buildTime\": [\"custom\", \"2020-01-01T23:00:00.000+01:00\"]}, 0]},\n" + - " {\"and\": [\n" + - " {\"isAfter\": [{\"var\": \"now\"}, {\"buildTime\": [\"custom\", \"2020-01-01T23:00:00.000+01:00\"]}, 0]},\n" + - " {\">=\": [{\"entries_days_before\": [{\"buildTime\": [\"custom\", \"2020-01-01T23:00:00.000+01:00\"]}]}, 2]},\n" + - " ]},\n" + - " ],\n" + - " }") + " \"or\": [\n" + + " {\"isBefore\": [{\"var\": \"now\"}, {\"buildTime\": [\"custom\", \"2020-01-01T23:00:00.000+01:00\"]}, 0]},\n" + + " {\"and\": [\n" + + " {\"isAfter\": [{\"var\": \"now\"}, {\"buildTime\": [\"custom\", \"2020-01-01T23:00:00.000+01:00\"]}, 0]},\n" + + " {\">=\": [{\"entries_days_before\": [{\"buildTime\": [\"custom\", \"2020-01-01T23:00:00.000+01:00\"]}]}, 2]},\n" + + " ]},\n" + + " ],\n" + + " }") p2.setNow(ISODateTimeFormat.dateTime().parseDateTime("2019-12-30T21:00:00.000Z")) @@ -692,7 +699,7 @@ class AsyncCheckProviderTest : BaseDatabaseTest() { @Test fun testRulesMinutesSinceLastEntry() { - val p2 = AsyncCheckProvider(configStore!!, dataStore) + val p2 = AsyncCheckProvider(configStore!!, db) setRuleOnList2("{\"or\": [{\"<=\": [{\"var\": \"minutes_since_last_entry\"}, -1]}, {\">\": [{\"var\": \"minutes_since_last_entry\"}, 180]}]}") p2.setNow(ISODateTimeFormat.dateTime().parseDateTime("2020-01-01T10:00:00.000Z")) @@ -718,7 +725,7 @@ class AsyncCheckProviderTest : BaseDatabaseTest() { @Test fun testRulesMinutesSinceFirstEntry() { - val p2 = AsyncCheckProvider(configStore!!, dataStore) + val p2 = AsyncCheckProvider(configStore!!, db) setRuleOnList2("{\"or\": [{\"<=\": [{\"var\": \"minutes_since_first_entry\"}, -1]}, {\"<\": [{\"var\": \"minutes_since_first_entry\"}, 180]}]}") p2.setNow(ISODateTimeFormat.dateTime().parseDateTime("2020-01-01T10:00:00.000Z")) @@ -736,7 +743,7 @@ class AsyncCheckProviderTest : BaseDatabaseTest() { @Test fun testRulesIsAfterTolerance() { - val p2 = AsyncCheckProvider(configStore!!, dataStore) + val p2 = AsyncCheckProvider(configStore!!, db) // Ticket is valid unlimited times, but only on two arbitrary days setRuleOnList2("{\"isAfter\": [{\"var\": \"now\"}, {\"buildTime\": [\"date_admission\"]}, 10]}") @@ -756,7 +763,7 @@ class AsyncCheckProviderTest : BaseDatabaseTest() { @Test fun testRulesIsAfterSubevent() { - val p2 = AsyncCheckProvider(configStore!!, dataStore) + val p2 = AsyncCheckProvider(configStore!!, db) // Ticket is valid unlimited times, but only on two arbitrary days setRuleOnList2("{\"isAfter\": [{\"var\": \"now\"}, {\"buildTime\": [\"date_admission\"]}, 10]}") @@ -776,7 +783,7 @@ class AsyncCheckProviderTest : BaseDatabaseTest() { @Test fun testRulesIsAfterNoTolerance() { - val p2 = AsyncCheckProvider(configStore!!, dataStore) + val p2 = AsyncCheckProvider(configStore!!, db) // Ticket is valid unlimited times, but only on two arbitrary days setRuleOnList2("{\"isAfter\": [{\"var\": \"now\"}, {\"buildTime\": [\"date_admission\"]}, null]}") @@ -792,7 +799,7 @@ class AsyncCheckProviderTest : BaseDatabaseTest() { @Test fun testRulesIsBeforeTolerance() { - val p2 = AsyncCheckProvider(configStore!!, dataStore) + val p2 = AsyncCheckProvider(configStore!!, db) // Ticket is valid unlimited times, but only on two arbitrary days setRuleOnList2("{\"isBefore\": [{\"var\": \"now\"}, {\"buildTime\": [\"date_to\"]}, 10]}") @@ -808,7 +815,7 @@ class AsyncCheckProviderTest : BaseDatabaseTest() { @Test fun testRulesIsBeforeNoTolerance() { - val p2 = AsyncCheckProvider(configStore!!, dataStore) + val p2 = AsyncCheckProvider(configStore!!, db) // Ticket is valid unlimited times, but only on two arbitrary days setRuleOnList2("{\"isBefore\": [{\"var\": \"now\"}, {\"buildTime\": [\"date_to\"]}]}") @@ -824,7 +831,7 @@ class AsyncCheckProviderTest : BaseDatabaseTest() { @Test fun testRulesIsAfterCustomDateTime() { - val p2 = AsyncCheckProvider(configStore!!, dataStore) + val p2 = AsyncCheckProvider(configStore!!, db) // Ticket is valid unlimited times, but only on two arbitrary days setRuleOnList2("{\"isAfter\": [{\"var\": \"now\"}, {\"buildTime\": [\"custom\", \"2020-01-01T22:00:00.000Z\"]}]}") @@ -840,7 +847,7 @@ class AsyncCheckProviderTest : BaseDatabaseTest() { @Test fun testRulesIsAfterCustomTime() { - val p2 = AsyncCheckProvider(configStore!!, dataStore) + val p2 = AsyncCheckProvider(configStore!!, db) // Ticket is valid unlimited times, but only on two arbitrary days setRuleOnList2("{\"isAfter\": [{\"var\": \"now\"}, {\"buildTime\": [\"customtime\", \"14:00\"]}]}") @@ -856,7 +863,7 @@ class AsyncCheckProviderTest : BaseDatabaseTest() { @Test fun testRulesCompareIsoweekday() { - val p2 = AsyncCheckProvider(configStore!!, dataStore) + val p2 = AsyncCheckProvider(configStore!!, db) // Ticket is valid unlimited times, but only on two arbitrary days setRuleOnList2("{\">=\": [{\"var\": \"now_isoweekday\"}, 6]}") @@ -872,7 +879,7 @@ class AsyncCheckProviderTest : BaseDatabaseTest() { @Test fun testQuestionsForOtherItem() { - QuestionSyncAdapter(dataStore, FakeFileStorage(), "demo", fakeApi, "", null).standaloneRefreshFromJSON( + QuestionSyncAdapter(db, FakeFileStorage(), "demo", fakeApi!!, "", null).standaloneRefreshFromJSON( jsonResource("questions/question1.json") ) @@ -882,7 +889,7 @@ class AsyncCheckProviderTest : BaseDatabaseTest() { @Test fun testQuestionNotDuringCheckin() { - QuestionSyncAdapter(dataStore, FakeFileStorage(), "demo", fakeApi, "", null).standaloneRefreshFromJSON( + QuestionSyncAdapter(db, FakeFileStorage(), "demo", fakeApi!!, "", null).standaloneRefreshFromJSON( jsonResource("questions/question3.json") ) @@ -892,7 +899,7 @@ class AsyncCheckProviderTest : BaseDatabaseTest() { @Test fun testQuestionsFilled() { - QuestionSyncAdapter(dataStore, FakeFileStorage(), "demo", fakeApi, "", null).standaloneRefreshFromJSON( + QuestionSyncAdapter(db, FakeFileStorage(), "demo", fakeApi!!, "", null).standaloneRefreshFromJSON( jsonResource("questions/question1.json") ) @@ -902,7 +909,7 @@ class AsyncCheckProviderTest : BaseDatabaseTest() { @Test fun testQuestionsIgnored() { - QuestionSyncAdapter(dataStore, FakeFileStorage(), "demo", fakeApi, "", null).standaloneRefreshFromJSON( + QuestionSyncAdapter(db, FakeFileStorage(), "demo", fakeApi!!, "", null).standaloneRefreshFromJSON( jsonResource("questions/question1.json") ) @@ -912,7 +919,7 @@ class AsyncCheckProviderTest : BaseDatabaseTest() { @Test fun testQuestionsRequired() { - QuestionSyncAdapter(dataStore, FakeFileStorage(), "demo", fakeApi, "", null).standaloneRefreshFromJSON( + QuestionSyncAdapter(db, FakeFileStorage(), "demo", fakeApi!!, "", null).standaloneRefreshFromJSON( jsonResource("questions/question1.json") ) @@ -920,23 +927,23 @@ class AsyncCheckProviderTest : BaseDatabaseTest() { assertEquals(TicketCheckProvider.CheckResult.Type.ANSWERS_REQUIRED, r.type) assertEquals(1, r.requiredAnswers?.size) val ra = r.requiredAnswers!![0] - assertEquals(1, ra.question.getServer_id()) + assertEquals(1, ra.question.server_id) val answers = ArrayList() - answers.add(Answer(ra.question, "True")) + answers.add(Answer(ra.question.toModel(), "True")) r = p!!.check(mapOf("demo" to 1L), "kfndgffgyw4tdgcacx6bb3bgemq69cxj", "barcode", answers, false, false, TicketCheckProvider.CheckInType.ENTRY) assertEquals(TicketCheckProvider.CheckResult.Type.VALID, r.type) - val qciList = dataStore.select(QueuedCheckIn::class.java).get().toList() + val qciList = db.queuedCheckInQueries.selectAll().executeAsList() assertEquals(1, qciList.size.toLong()) - assertEquals("kfndgffgyw4tdgcacx6bb3bgemq69cxj", qciList[0].getSecret()) - assertEquals("[{\"answer\":\"True\",\"question\":1}]", qciList[0].getAnswers()) + assertEquals("kfndgffgyw4tdgcacx6bb3bgemq69cxj", qciList[0].secret) + assertEquals("[{\"answer\":\"True\",\"question\":1}]", qciList[0].answers) } @Test fun testQuestionsInvalidInput() { - QuestionSyncAdapter(dataStore, FakeFileStorage(), "demo", fakeApi, "", null).standaloneRefreshFromJSON( + QuestionSyncAdapter(db, FakeFileStorage(), "demo", fakeApi!!, "", null).standaloneRefreshFromJSON( jsonResource("questions/question2.json") ) @@ -946,12 +953,12 @@ class AsyncCheckProviderTest : BaseDatabaseTest() { val ra = r.requiredAnswers!![0] val answers = ArrayList() - answers.add(Answer(ra.question, "True")) + answers.add(Answer(ra.question.toModel(), "True")) r = p!!.check(mapOf("demo" to 1L), "kfndgffgyw4tdgcacx6bb3bgemq69cxj", "barcode", answers, false, false, TicketCheckProvider.CheckInType.ENTRY) assertEquals(TicketCheckProvider.CheckResult.Type.ANSWERS_REQUIRED, r.type) - val qciList = dataStore.select(QueuedCheckIn::class.java).get().toList() + val qciList = db.queuedCheckInQueries.selectAll().executeAsList() assertEquals(0, qciList.size.toLong()) } @@ -1021,12 +1028,12 @@ class AsyncCheckProviderTest : BaseDatabaseTest() { fun testSignedAndValid() { val r = p!!.check(mapOf("demo" to 1L), "E4BibyTSylQOgeKjuMPiTDxi5HXPuTVsx1qCli3IL0143gj0EZXOB9iQInANxRFJTt4Pf9nXnHdB91Qk/RN0L5AIBABSxw2TKFnSUNUCKAEAPAQA") assertEquals(TicketCheckProvider.CheckResult.Type.VALID, r.type) - assertEquals(dataStore.count(QueuedCheckIn::class.java).get().value(), 1) + assertEquals(db.queuedCheckInQueries.count().executeAsOne(), 1L) } @Test fun testSignedAndNotYetValid() { - val p2 = AsyncCheckProvider(configStore!!, dataStore) + val p2 = AsyncCheckProvider(configStore!!, db) p2.setNow(ISODateTimeFormat.dateTime().parseDateTime("2023-02-03T22:59:59.000Z")) val r = p2.check(mapOf("demo" to 1L), "Ok4EsqDRCr2cL6yDRtqeP7j5Usr1Vj1Db7J0izOuRGx6Qn0BS1ISW2nxlW8PXkYRk7PJhIBmsK1V1ucq5obBoBAMG4p9jCPKBAheRdFV0REVDZUCKAEAVAQA") @@ -1036,12 +1043,12 @@ class AsyncCheckProviderTest : BaseDatabaseTest() { val r2 = p2.check(mapOf("demo" to 1L), "Ok4EsqDRCr2cL6yDRtqeP7j5Usr1Vj1Db7J0izOuRGx6Qn0BS1ISW2nxlW8PXkYRk7PJhIBmsK1V1ucq5obBoBAMG4p9jCPKBAheRdFV0REVDZUCKAEAVAQA") assertEquals(TicketCheckProvider.CheckResult.Type.VALID, r2.type) - assertEquals(dataStore.count(QueuedCheckIn::class.java).get().value(), 1) + assertEquals(db.queuedCheckInQueries.count().executeAsOne(), 1L) } @Test fun testSignedAndNotLongerValid() { - val p2 = AsyncCheckProvider(configStore!!, dataStore) + val p2 = AsyncCheckProvider(configStore!!, db) p2.setNow(ISODateTimeFormat.dateTime().parseDateTime("2023-02-03T11:01:01.000Z")) val r = p2.check(mapOf("demo" to 1L), "EU9dJn3k5jzwfY4JQAKrTOVFmo+BvZKwH6UAIFOz3XTxABa7tmjU5UoLD8hJr3440uY7IFEHzau1DVk0sP994bgnzLNswAAKBARdUdGMmNVSHVUCKAEAVAQA") @@ -1051,50 +1058,52 @@ class AsyncCheckProviderTest : BaseDatabaseTest() { val r2 = p2.check(mapOf("demo" to 1L), "EU9dJn3k5jzwfY4JQAKrTOVFmo+BvZKwH6UAIFOz3XTxABa7tmjU5UoLD8hJr3440uY7IFEHzau1DVk0sP994bgnzLNswAAKBARdUdGMmNVSHVUCKAEAVAQA") assertEquals(TicketCheckProvider.CheckResult.Type.VALID, r2.type) - assertEquals(dataStore.count(QueuedCheckIn::class.java).get().value(), 1) + assertEquals(db.queuedCheckInQueries.count().executeAsOne(), 1L) } @Test fun testSignedAndRevoked() { - val rev = RevokedTicketSecret() - rev.setEvent_slug(configStore!!.eventSlug) - rev.setCreated("2020-10-19T10:00:00+00:00") - rev.setSecret("E4BibyTSylQOgeKjuMPiTDxi5HXPuTVsx1qCli3IL0143gj0EZXOB9iQInANxRFJTt4Pf9nXnHdB91Qk/RN0L5AIBABSxw2TKFnSUNUCKAEAPAQA") - rev.setJson_data("{}") - dataStore.insert(rev) + db.revokedTicketSecretQueries.insert( + created = "2020-10-19T10:00:00+00:00", + event_slug = configStore!!.eventSlug, + json_data = "{}", + secret = "E4BibyTSylQOgeKjuMPiTDxi5HXPuTVsx1qCli3IL0143gj0EZXOB9iQInANxRFJTt4Pf9nXnHdB91Qk/RN0L5AIBABSxw2TKFnSUNUCKAEAPAQA", + server_id = 1L, + ) val r = p!!.check(mapOf("demo" to 1L), "E4BibyTSylQOgeKjuMPiTDxi5HXPuTVsx1qCli3IL0143gj0EZXOB9iQInANxRFJTt4Pf9nXnHdB91Qk/RN0L5AIBABSxw2TKFnSUNUCKAEAPAQA") assertEquals(TicketCheckProvider.CheckResult.Type.REVOKED, r.type) - assertEquals(dataStore.count(QueuedCheckIn::class.java).get().value(), 0) + assertEquals(db.queuedCheckInQueries.count().executeAsOne(), 0L) } @Test fun testSignedAndBlocked() { - val rev = BlockedTicketSecret() - rev.setEvent_slug(configStore!!.eventSlug) - rev.setUpdated("2020-10-19T10:00:00+00:00") - rev.setSecret("E4BibyTSylQOgeKjuMPiTDxi5HXPuTVsx1qCli3IL0143gj0EZXOB9iQInANxRFJTt4Pf9nXnHdB91Qk/RN0L5AIBABSxw2TKFnSUNUCKAEAPAQA") - rev.isBlocked = true - rev.setJson_data("{}") - dataStore.insert(rev) + db.blockedTicketSecretQueries.insert( + blocked = true, + event_slug = configStore!!.eventSlug, + json_data = "{}", + secret = "E4BibyTSylQOgeKjuMPiTDxi5HXPuTVsx1qCli3IL0143gj0EZXOB9iQInANxRFJTt4Pf9nXnHdB91Qk/RN0L5AIBABSxw2TKFnSUNUCKAEAPAQA", + updated = "2020-10-19T10:00:00+00:00", + server_id = 1L, + ) val r = p!!.check(mapOf("demo" to 1L), "E4BibyTSylQOgeKjuMPiTDxi5HXPuTVsx1qCli3IL0143gj0EZXOB9iQInANxRFJTt4Pf9nXnHdB91Qk/RN0L5AIBABSxw2TKFnSUNUCKAEAPAQA") assertEquals(TicketCheckProvider.CheckResult.Type.BLOCKED, r.type) - assertEquals(dataStore.count(QueuedCheckIn::class.java).get().value(), 0) + assertEquals(db.queuedCheckInQueries.count().executeAsOne(), 0L) } @Test fun testSignedUnknownProduct() { val r = p!!.check(mapOf("demo" to 1L), "OUmw2Ro3YOMQ4ktAlAIsDVe4Xsr1KXla/0SZVN34qIZWtUX0hx1DXDHxaCatGTNzOeCMjHQABR5E6ESCOOx1g7AIkBhVkdDdJJTVSZWCKAEAPAQA") assertEquals(TicketCheckProvider.CheckResult.Type.ERROR, r.type) - assertEquals(dataStore.count(QueuedCheckIn::class.java).get().value(), 0) + assertEquals(db.queuedCheckInQueries.count().executeAsOne(), 0L) } @Test fun testSignedInvalidSignature() { val r = p!!.check(mapOf("demo" to 1L), "EFAKEyTSylQOgeKjuMPiTDxi5HXPuTVsx1qCli3IL0143gj0EZXOB9iQInANxRFJTt4Pf9nXnHdB91Qk/RN0L5AIBABSxw2TKFnSUNUCKAEAPAQA") assertEquals(TicketCheckProvider.CheckResult.Type.INVALID, r.type) - assertEquals(dataStore.count(QueuedCheckIn::class.java).get().value(), 0) + assertEquals(db.queuedCheckInQueries.count().executeAsOne(), 0L) } @Test @@ -1138,9 +1147,9 @@ class AsyncCheckProviderTest : BaseDatabaseTest() { fun testSignedMultipleLists() { var r = p!!.check(mapOf("demo2" to 7L), "E4BibyTSylQOgeKjuMPiTDxi5HXPuTVsx1qCli3IL0143gj0EZXOB9iQInANxRFJTt4Pf9nXnHdB91Qk/RN0L5AIBABSxw2TKFnSUNUCKAEAPAQA") assertEquals(TicketCheckProvider.CheckResult.Type.INVALID, r.type) - assertEquals(dataStore.count(QueuedCheckIn::class.java).get().value(), 0) + assertEquals(db.queuedCheckInQueries.count().executeAsOne(), 0L) r = p!!.check(mapOf("demo2" to 7L, "demo" to 1L), "E4BibyTSylQOgeKjuMPiTDxi5HXPuTVsx1qCli3IL0143gj0EZXOB9iQInANxRFJTt4Pf9nXnHdB91Qk/RN0L5AIBABSxw2TKFnSUNUCKAEAPAQA") assertEquals(TicketCheckProvider.CheckResult.Type.VALID, r.type) - assertEquals(dataStore.count(QueuedCheckIn::class.java).get().value(), 1) + assertEquals(db.queuedCheckInQueries.count().executeAsOne(), 1L) } } diff --git a/libpretixsync/src/test/java/eu/pretix/libpretixsync/db/BaseDatabaseTest.java b/libpretixsync/src/test/java/eu/pretix/libpretixsync/db/BaseDatabaseTest.java new file mode 100644 index 00000000..5334886a --- /dev/null +++ b/libpretixsync/src/test/java/eu/pretix/libpretixsync/db/BaseDatabaseTest.java @@ -0,0 +1,139 @@ +package eu.pretix.libpretixsync.db; + +import app.cash.sqldelight.driver.jdbc.sqlite.JdbcSqliteDriver; +import eu.pretix.libpretixsync.db.*; +import eu.pretix.libpretixsync.Models; +import eu.pretix.libpretixsync.sqldelight.BigDecimalAdapter; +import eu.pretix.libpretixsync.sqldelight.CheckIn; +import eu.pretix.libpretixsync.sqldelight.Closing; +import eu.pretix.libpretixsync.sqldelight.Event; +import eu.pretix.libpretixsync.sqldelight.JavaUtilDateAdapter; +import eu.pretix.libpretixsync.sqldelight.QueuedCheckIn; +import eu.pretix.libpretixsync.sqldelight.Receipt; +import eu.pretix.libpretixsync.sqldelight.ReceiptLine; +import eu.pretix.libpretixsync.sqldelight.ReceiptPayment; +import eu.pretix.libpretixsync.sqldelight.SubEvent; +import eu.pretix.libpretixsync.sqldelight.SyncDatabase; +import io.requery.Persistable; +import io.requery.cache.EntityCacheBuilder; +import io.requery.sql.Configuration; +import io.requery.sql.ConfigurationBuilder; +import io.requery.sql.EntityDataStore; +import org.junit.After; +import org.junit.Before; +import org.junit.Rule; +import org.junit.rules.TestName; +import org.sqlite.SQLiteConfig; +import org.sqlite.SQLiteDataSource; + +import java.security.MessageDigest; +import java.security.NoSuchAlgorithmException; +import java.sql.Connection; +import java.sql.SQLException; +import java.util.Formatter; +import java.util.Properties; +import java.util.Random; + + +public abstract class BaseDatabaseTest { + + @Rule + public TestName name = new TestName(); + + protected EntityDataStore dataStore; + private Connection connection; + + protected SyncDatabase db; + + private static String byteArray2Hex(final byte[] hash) { + Formatter formatter = new Formatter(); + for (byte b : hash) { + formatter.format("%02x", b); + } + return formatter.toString(); + } + + @Before + public void setUpDataStore() throws SQLException, NoSuchAlgorithmException { + byte[] randomBytes = new byte[32]; // length is bounded by 7 + new Random().nextBytes(randomBytes); + MessageDigest md = MessageDigest.getInstance("SHA-1"); + md.update(name.getMethodName().getBytes()); + md.update(randomBytes); + String dbname = byteArray2Hex(md.digest()); + String sourceUrl = "jdbc:sqlite:file:" + dbname + "?mode=memory&cache=shared"; + + SQLiteDataSource dataSource = new SQLiteDataSource(); + dataSource.setUrl(sourceUrl); + SQLiteConfig config = new SQLiteConfig(); + config.setDateClass("TEXT"); + dataSource.setConfig(config); + dataSource.setEnforceForeignKeys(true); + Migrations.migrate(dataSource, true); + connection = dataSource.getConnection(); + + Configuration configuration = new ConfigurationBuilder(dataSource, Models.DEFAULT) + .useDefaultLogging() + .setEntityCache(new EntityCacheBuilder(Models.DEFAULT) + .useReferenceCache(false) + .useSerializableCache(false) + .build()) + .build(); + dataStore = new EntityDataStore<>(configuration); + + setUpDb(sourceUrl); + } + + private void setUpDb(String sourceUrl) { + JdbcSqliteDriver driver = new JdbcSqliteDriver(sourceUrl, new Properties()); + JavaUtilDateAdapter dateAdapter = new JavaUtilDateAdapter(); + BigDecimalAdapter bigDecimalAdapter = new BigDecimalAdapter(); + + db = SyncDatabase.Companion.invoke( + driver, + new CheckIn.Adapter( + dateAdapter + ), + new Closing.Adapter( + bigDecimalAdapter, + dateAdapter, + bigDecimalAdapter, + bigDecimalAdapter + ), + new Event.Adapter( + dateAdapter, + dateAdapter + ), + new QueuedCheckIn.Adapter( + dateAdapter + ), + new Receipt.Adapter( + dateAdapter, + dateAdapter + ), + new ReceiptLine.Adapter( + dateAdapter, + dateAdapter, + bigDecimalAdapter, + bigDecimalAdapter, + bigDecimalAdapter, + bigDecimalAdapter, + bigDecimalAdapter, + bigDecimalAdapter + ), + new ReceiptPayment.Adapter( + bigDecimalAdapter + ), + new SubEvent.Adapter( + dateAdapter, + dateAdapter + ) + ); + } + + @After + public void tearDownDataStore() throws Exception { + dataStore.close(); + connection.close(); + } +} diff --git a/libpretixsync/src/test/java/eu/pretix/libpretixsync/sync/OrderSyncTest.kt b/libpretixsync/src/test/java/eu/pretix/libpretixsync/sync/OrderSyncTest.kt index 5547a61e..2df37fe0 100644 --- a/libpretixsync/src/test/java/eu/pretix/libpretixsync/sync/OrderSyncTest.kt +++ b/libpretixsync/src/test/java/eu/pretix/libpretixsync/sync/OrderSyncTest.kt @@ -1,9 +1,7 @@ package eu.pretix.libpretixsync.sync import eu.pretix.libpretixsync.api.ApiException -import eu.pretix.libpretixsync.db.* -import eu.pretix.libpretixsync.sync.* -import eu.pretix.pretixscan.scanproxy.tests.db.BaseDatabaseTest +import eu.pretix.libpretixsync.db.BaseDatabaseTest import eu.pretix.pretixscan.scanproxy.tests.test.FakeConfigStore import eu.pretix.pretixscan.scanproxy.tests.test.FakeFileStorage import eu.pretix.pretixscan.scanproxy.tests.test.FakePretixApi @@ -27,9 +25,9 @@ class OrderSyncTest : BaseDatabaseTest() { fun setUpFakes() { configStore = FakeConfigStore() fakeApi = FakePretixApi() - osa = OrderSyncAdapter(dataStore, FakeFileStorage(), "demo", 0, true, false, fakeApi, "", null) + osa = OrderSyncAdapter(db, FakeFileStorage(), "demo", 0, true, false, fakeApi, "", null) - CheckInListSyncAdapter(dataStore, FakeFileStorage(), "demo", fakeApi, "", null, 0).standaloneRefreshFromJSON( + CheckInListSyncAdapter(db, FakeFileStorage(), "demo", fakeApi, "", null, 0).standaloneRefreshFromJSON( jsonResource("checkinlists/list1.json") ) } @@ -62,13 +60,13 @@ class OrderSyncTest : BaseDatabaseTest() { osa.download() assertEquals("http://1.1.1.1/api/v1/organizers/demo/events/demo/orders/?testmode=false&exclude=downloads&exclude=payment_date&exclude=payment_provider&exclude=fees&exclude=positions.downloads&exclude=payments&exclude=refunds&pdf_data=true", fakeApi.lastRequestUrl) - assertEquals(2, dataStore.count(Order::class.java).get().value()) - assertEquals(5, dataStore.count(OrderPosition::class.java).get().value()) - assertEquals(3, dataStore.count(CheckIn::class.java).get().value()) - val rlm = dataStore.select(ResourceSyncStatus::class.java).where(ResourceSyncStatus.RESOURCE.eq("orders_withpdfdata")).get().first() - assertEquals(rlm.getEvent_slug(), "demo") - assertEquals(rlm.getLast_modified(), "timestamp1") - assertEquals(rlm.getStatus(), "complete") + assertEquals(2L, db.orderQueries.count().executeAsOne()) + assertEquals(5L, db.orderPositionQueries.count().executeAsOne()) + assertEquals(3L, db.checkInQueries.count().executeAsOne()) + val rlm = db.resourceSyncStatusQueries.selectByResource("orders_withpdfdata").executeAsList().first() + assertEquals(rlm.event_slug, "demo") + assertEquals(rlm.last_modified, "timestamp1") + assertEquals(rlm.status, "complete") } @Test @@ -96,14 +94,14 @@ class OrderSyncTest : BaseDatabaseTest() { osa.download() assertEquals("%page2?testmode=false&exclude=downloads&exclude=payment_date&exclude=payment_provider&exclude=fees&exclude=positions.downloads&exclude=payments&exclude=refunds&pdf_data=true", fakeApi.lastRequestUrl) - assertEquals(2, dataStore.count(Order::class.java).get().value()) - assertEquals(5, dataStore.count(OrderPosition::class.java).get().value()) - assertEquals(3, dataStore.count(CheckIn::class.java).get().value()) + assertEquals(2L, db.orderQueries.count().executeAsOne()) + assertEquals(5L, db.orderPositionQueries.count().executeAsOne()) + assertEquals(3L, db.checkInQueries.count().executeAsOne()) - val rlm = dataStore.select(ResourceSyncStatus::class.java).where(ResourceSyncStatus.RESOURCE.eq("orders_withpdfdata")).get().first() - assertEquals(rlm.getEvent_slug(), "demo") - assertEquals(rlm.getLast_modified(), "timestamp1") - assertEquals(rlm.getStatus(), "complete") + val rlm = db.resourceSyncStatusQueries.selectByResource("orders_withpdfdata").executeAsList().first() + assertEquals(rlm.event_slug, "demo") + assertEquals(rlm.last_modified, "timestamp1") + assertEquals(rlm.status, "complete") } @Test @@ -137,14 +135,14 @@ class OrderSyncTest : BaseDatabaseTest() { assertEquals("%page3?testmode=false&exclude=downloads&exclude=payment_date&exclude=payment_provider&exclude=fees&exclude=positions.downloads&exclude=payments&exclude=refunds&pdf_data=true", fakeApi.lastRequestUrl) - assertEquals(2, dataStore.count(Order::class.java).get().value()) - assertEquals(5, dataStore.count(OrderPosition::class.java).get().value()) - assertEquals(3, dataStore.count(CheckIn::class.java).get().value()) + assertEquals(2L, db.orderQueries.count().executeAsOne()) + assertEquals(5L, db.orderPositionQueries.count().executeAsOne()) + assertEquals(3L, db.checkInQueries.count().executeAsOne()) - val rlm = dataStore.select(ResourceSyncStatus::class.java).where(ResourceSyncStatus.RESOURCE.eq("orders_withpdfdata")).get().first() - assertEquals(rlm.getEvent_slug(), "demo") - assertEquals(rlm.getLast_modified(), "timestamp1") - assertEquals(rlm.getStatus(), "incomplete:2019-01-01T00:11:30Z") + val rlm = db.resourceSyncStatusQueries.selectByResource("orders_withpdfdata").executeAsList().first() + assertEquals(rlm.event_slug, "demo") + assertEquals(rlm.last_modified, "timestamp1") + assertEquals(rlm.status, "incomplete:2019-01-01T00:11:30Z") fakeApi.fetchResponses.add { val respdata = JSONObject() @@ -160,12 +158,12 @@ class OrderSyncTest : BaseDatabaseTest() { osa.download() assertEquals("http://1.1.1.1/api/v1/organizers/demo/events/demo/orders/?testmode=false&exclude=downloads&exclude=payment_date&exclude=payment_provider&exclude=fees&exclude=positions.downloads&exclude=payments&exclude=refunds&pdf_data=true&ordering=datetime&created_since=2019-01-01T00%3A11%3A30Z", fakeApi.lastRequestUrl) - assertEquals(4, dataStore.count(Order::class.java).get().value()) + assertEquals(4L, db.orderQueries.count().executeAsOne()) - val rlm2 = dataStore.select(ResourceSyncStatus::class.java).where(ResourceSyncStatus.RESOURCE.eq("orders_withpdfdata")).get().first() - assertEquals(rlm2.getEvent_slug(), "demo") - assertEquals(rlm2.getLast_modified(), "timestamp1") - assertEquals(rlm2.getStatus(), "complete") + val rlm2 = db.resourceSyncStatusQueries.selectByResource("orders_withpdfdata").executeAsList().first() + assertEquals(rlm2.event_slug, "demo") + assertEquals(rlm2.last_modified, "timestamp1") + assertEquals(rlm2.status, "complete") } @Test @@ -199,14 +197,14 @@ class OrderSyncTest : BaseDatabaseTest() { assertEquals("%page3?testmode=false&exclude=downloads&exclude=payment_date&exclude=payment_provider&exclude=fees&exclude=positions.downloads&exclude=payments&exclude=refunds&pdf_data=true", fakeApi.lastRequestUrl) - assertEquals(2, dataStore.count(Order::class.java).get().value()) - assertEquals(5, dataStore.count(OrderPosition::class.java).get().value()) - assertEquals(3, dataStore.count(CheckIn::class.java).get().value()) + assertEquals(2L, db.orderQueries.count().executeAsOne()) + assertEquals(5L, db.orderPositionQueries.count().executeAsOne()) + assertEquals(3L, db.checkInQueries.count().executeAsOne()) - val rlm = dataStore.select(ResourceSyncStatus::class.java).where(ResourceSyncStatus.RESOURCE.eq("orders_withpdfdata")).get().first() - assertEquals(rlm.getEvent_slug(), "demo") - assertEquals(rlm.getLast_modified(), "timestamp1") - assertEquals(rlm.getStatus(), "incomplete:2019-01-01T00:11:30Z") + val rlm = db.resourceSyncStatusQueries.selectByResource("orders_withpdfdata").executeAsList().first() + assertEquals(rlm.event_slug, "demo") + assertEquals(rlm.last_modified, "timestamp1") + assertEquals(rlm.status, "incomplete:2019-01-01T00:11:30Z") fakeApi.fetchResponses.add { @@ -228,12 +226,12 @@ class OrderSyncTest : BaseDatabaseTest() { } assertEquals("%page4?testmode=false&exclude=downloads&exclude=payment_date&exclude=payment_provider&exclude=fees&exclude=positions.downloads&exclude=payments&exclude=refunds&pdf_data=true&ordering=datetime&created_since=2019-01-01T00%3A11%3A30Z", fakeApi.lastRequestUrl) - assertEquals(3, dataStore.count(Order::class.java).get().value()) + assertEquals(3L, db.orderQueries.count().executeAsOne()) - val rlm3 = dataStore.select(ResourceSyncStatus::class.java).where(ResourceSyncStatus.RESOURCE.eq("orders_withpdfdata")).get().first() - assertEquals(rlm3.getEvent_slug(), "demo") - assertEquals(rlm3.getLast_modified(), "timestamp1") - assertEquals(rlm3.getStatus(), "incomplete:2019-01-01T00:15:15Z") + val rlm3 = db.resourceSyncStatusQueries.selectByResource("orders_withpdfdata").executeAsList().first() + assertEquals(rlm3.event_slug, "demo") + assertEquals(rlm3.last_modified, "timestamp1") + assertEquals(rlm3.status, "incomplete:2019-01-01T00:15:15Z") fakeApi.fetchResponses.add { @@ -249,12 +247,12 @@ class OrderSyncTest : BaseDatabaseTest() { osa.download() assertEquals("http://1.1.1.1/api/v1/organizers/demo/events/demo/orders/?testmode=false&exclude=downloads&exclude=payment_date&exclude=payment_provider&exclude=fees&exclude=positions.downloads&exclude=payments&exclude=refunds&pdf_data=true&ordering=datetime&created_since=2019-01-01T00%3A15%3A15Z", fakeApi.lastRequestUrl) - assertEquals(4, dataStore.count(Order::class.java).get().value()) + assertEquals(4L, db.orderQueries.count().executeAsOne()) - val rlm2 = dataStore.select(ResourceSyncStatus::class.java).where(ResourceSyncStatus.RESOURCE.eq("orders_withpdfdata")).get().first() - assertEquals(rlm2.getEvent_slug(), "demo") - assertEquals(rlm2.getLast_modified(), "timestamp1") - assertEquals(rlm2.getStatus(), "complete") + val rlm2 = db.resourceSyncStatusQueries.selectByResource("orders_withpdfdata").executeAsList().first() + assertEquals(rlm2.event_slug, "demo") + assertEquals(rlm2.last_modified, "timestamp1") + assertEquals(rlm2.status, "complete") } @Test @@ -273,9 +271,9 @@ class OrderSyncTest : BaseDatabaseTest() { } osa.download() - assertEquals(4, dataStore.count(Order::class.java).get().value()) - assertEquals(9, dataStore.count(OrderPosition::class.java).get().value()) - assertEquals(5, dataStore.count(CheckIn::class.java).get().value()) + assertEquals(4L, db.orderQueries.count().executeAsOne()) + assertEquals(9L, db.orderPositionQueries.count().executeAsOne()) + assertEquals(5L, db.checkInQueries.count().executeAsOne()) fakeApi.fetchResponses.add { val respdata = JSONObject() @@ -290,12 +288,12 @@ class OrderSyncTest : BaseDatabaseTest() { osa.download() assertEquals("http://1.1.1.1/api/v1/organizers/demo/events/demo/orders/?testmode=false&exclude=downloads&exclude=payment_date&exclude=payment_provider&exclude=fees&exclude=positions.downloads&exclude=payments&exclude=refunds&pdf_data=true&ordering=-last_modified&modified_since=timestamp1", fakeApi.lastRequestUrl) - assertEquals(11, dataStore.count(OrderPosition::class.java).get().value()) - assertEquals(6, dataStore.count(CheckIn::class.java).get().value()) - val rlm = dataStore.select(ResourceSyncStatus::class.java).where(ResourceSyncStatus.RESOURCE.eq("orders_withpdfdata")).get().first() - assertEquals(rlm.getEvent_slug(), "demo") - assertEquals(rlm.getLast_modified(), "timestamp2") - assertEquals(rlm.getStatus(), "complete") + assertEquals(11L, db.orderPositionQueries.count().executeAsOne()) + assertEquals(6L, db.checkInQueries.count().executeAsOne()) + val rlm = db.resourceSyncStatusQueries.selectByResource("orders_withpdfdata").executeAsList().first() + assertEquals(rlm.event_slug, "demo") + assertEquals(rlm.last_modified, "timestamp2") + assertEquals(rlm.status, "complete") } @Test @@ -314,8 +312,8 @@ class OrderSyncTest : BaseDatabaseTest() { } osa.download() - assertEquals(4, dataStore.count(Order::class.java).get().value()) - assertEquals(5, dataStore.count(CheckIn::class.java).get().value()) + assertEquals(4L, db.orderQueries.count().executeAsOne()) + assertEquals(5L, db.checkInQueries.count().executeAsOne()) fakeApi.fetchResponses.add { val respdata = JSONObject() @@ -335,13 +333,13 @@ class OrderSyncTest : BaseDatabaseTest() { } assertEquals("%page2?testmode=false&exclude=downloads&exclude=payment_date&exclude=payment_provider&exclude=fees&exclude=positions.downloads&exclude=payments&exclude=refunds&pdf_data=true&ordering=-last_modified&modified_since=timestamp1", fakeApi.lastRequestUrl) - assertEquals(4, dataStore.count(Order::class.java).get().value()) - assertEquals(6, dataStore.count(CheckIn::class.java).get().value()) + assertEquals(4L, db.orderQueries.count().executeAsOne()) + assertEquals(6L, db.checkInQueries.count().executeAsOne()) - val rlm = dataStore.select(ResourceSyncStatus::class.java).where(ResourceSyncStatus.RESOURCE.eq("orders_withpdfdata")).get().first() - assertEquals(rlm.getEvent_slug(), "demo") - assertEquals(rlm.getLast_modified(), "timestamp1") - assertEquals(rlm.getStatus(), "complete") + val rlm = db.resourceSyncStatusQueries.selectByResource("orders_withpdfdata").executeAsList().first() + assertEquals(rlm.event_slug, "demo") + assertEquals(rlm.last_modified, "timestamp1") + assertEquals(rlm.status, "complete") fakeApi.fetchResponses.add { val respdata = JSONObject() @@ -356,11 +354,11 @@ class OrderSyncTest : BaseDatabaseTest() { osa.download() assertEquals("http://1.1.1.1/api/v1/organizers/demo/events/demo/orders/?testmode=false&exclude=downloads&exclude=payment_date&exclude=payment_provider&exclude=fees&exclude=positions.downloads&exclude=payments&exclude=refunds&pdf_data=true&ordering=-last_modified&modified_since=timestamp1", fakeApi.lastRequestUrl) - assertEquals(11, dataStore.count(OrderPosition::class.java).get().value()) - assertEquals(6, dataStore.count(CheckIn::class.java).get().value()) - val rlm2 = dataStore.select(ResourceSyncStatus::class.java).where(ResourceSyncStatus.RESOURCE.eq("orders_withpdfdata")).get().first() - assertEquals(rlm2.getEvent_slug(), "demo") - assertEquals(rlm2.getLast_modified(), "timestamp2") - assertEquals(rlm2.getStatus(), "complete") + assertEquals(11L, db.orderPositionQueries.count().executeAsOne()) + assertEquals(6L, db.checkInQueries.count().executeAsOne()) + val rlm2 = db.resourceSyncStatusQueries.selectByResource("orders_withpdfdata").executeAsList().first() + assertEquals(rlm2.event_slug, "demo") + assertEquals(rlm2.last_modified, "timestamp2") + assertEquals(rlm2.status, "complete") } } diff --git a/libpretixsync/src/testFixtures/java/eu/pretix/pretixscan/scanproxy/tests/db/BaseDatabaseTest.java b/libpretixsync/src/testFixtures/java/eu/pretix/pretixscan/scanproxy/tests/db/BaseDatabaseTest.java deleted file mode 100644 index 0cbd3187..00000000 --- a/libpretixsync/src/testFixtures/java/eu/pretix/pretixscan/scanproxy/tests/db/BaseDatabaseTest.java +++ /dev/null @@ -1,74 +0,0 @@ -package eu.pretix.pretixscan.scanproxy.tests.db; - -import eu.pretix.libpretixsync.db.*; -import eu.pretix.libpretixsync.Models; -import io.requery.Persistable; -import io.requery.cache.EntityCacheBuilder; -import io.requery.sql.Configuration; -import io.requery.sql.ConfigurationBuilder; -import io.requery.sql.EntityDataStore; -import org.junit.After; -import org.junit.Before; -import org.junit.Rule; -import org.junit.rules.TestName; -import org.sqlite.SQLiteConfig; -import org.sqlite.SQLiteDataSource; - -import java.security.MessageDigest; -import java.security.NoSuchAlgorithmException; -import java.sql.Connection; -import java.sql.SQLException; -import java.util.Formatter; -import java.util.Random; - - -public abstract class BaseDatabaseTest { - - @Rule - public TestName name = new TestName(); - - protected EntityDataStore dataStore; - private Connection connection; - - private static String byteArray2Hex(final byte[] hash) { - Formatter formatter = new Formatter(); - for (byte b : hash) { - formatter.format("%02x", b); - } - return formatter.toString(); - } - - @Before - public void setUpDataStore() throws SQLException, NoSuchAlgorithmException { - byte[] randomBytes = new byte[32]; // length is bounded by 7 - new Random().nextBytes(randomBytes); - MessageDigest md = MessageDigest.getInstance("SHA-1"); - md.update(name.getMethodName().getBytes()); - md.update(randomBytes); - String dbname = byteArray2Hex(md.digest()); - - SQLiteDataSource dataSource = new SQLiteDataSource(); - dataSource.setUrl("jdbc:sqlite:file:" + dbname + "?mode=memory&cache=shared"); - SQLiteConfig config = new SQLiteConfig(); - config.setDateClass("TEXT"); - dataSource.setConfig(config); - dataSource.setEnforceForeignKeys(true); - Migrations.migrate(dataSource, true); - connection = dataSource.getConnection(); - - Configuration configuration = new ConfigurationBuilder(dataSource, Models.DEFAULT) - .useDefaultLogging() - .setEntityCache(new EntityCacheBuilder(Models.DEFAULT) - .useReferenceCache(false) - .useSerializableCache(false) - .build()) - .build(); - dataStore = new EntityDataStore<>(configuration); - } - - @After - public void tearDownDataStore() throws Exception { - dataStore.close(); - connection.close(); - } -} \ No newline at end of file diff --git a/libpretixsync/src/testFixtures/resources/events/event1.json b/libpretixsync/src/testFixtures/resources/events/event1.json index 48cd2efa..d4538ded 100644 --- a/libpretixsync/src/testFixtures/resources/events/event1.json +++ b/libpretixsync/src/testFixtures/resources/events/event1.json @@ -27,5 +27,6 @@ "LS0tLS1CRUdJTiBQVUJMSUMgS0VZLS0tLS0KTUNvd0JRWURLMlZ3QXlFQTdBRDcvdkZBMzNFc1k0ejJQSHI3aVpQc1o4bjVkaDBhalA4Z3l6Tm1tSXM9Ci0tLS0tRU5EIFBVQkxJQyBLRVktLS0tLQo=", "LS0tLS1CRUdJTiBQVUJMSUMgS0VZLS0tLS0KTUNvd0JRWURLMlZ3QXlFQVlNSnNPbkxwNUFXR3BKK2RGSkUycjlET1hlUUlaZ1ZyQ3Q5M0xBbDVCTjg9Ci0tLS0tRU5EIFBVQkxJQyBLRVktLS0tLQo=" ] - } + }, + "seat_category_mapping": {} } \ No newline at end of file diff --git a/libpretixsync/src/testFixtures/resources/events/event2.json b/libpretixsync/src/testFixtures/resources/events/event2.json index e14c2037..daecd997 100644 --- a/libpretixsync/src/testFixtures/resources/events/event2.json +++ b/libpretixsync/src/testFixtures/resources/events/event2.json @@ -24,5 +24,6 @@ ], "valid_keys": { "pretix_sig1": ["LS0tLS1CRUdJTiBQVUJMSUMgS0VZLS0tLS0KTUNvd0JRWURLMlZ3QXlFQWJWYmZxZGdtaFZzSFFmTk15eko5eUYxbGdWeWdkcE5oS29vdlE1NHhDWTQ9Ci0tLS0tRU5EIFBVQkxJQyBLRVktLS0tLQo="] - } + }, + "seat_category_mapping": {} } \ No newline at end of file diff --git a/libpretixsync/src/testFixtures/resources/items/item1.json b/libpretixsync/src/testFixtures/resources/items/item1.json index d21a5e0c..69cae90e 100644 --- a/libpretixsync/src/testFixtures/resources/items/item1.json +++ b/libpretixsync/src/testFixtures/resources/items/item1.json @@ -32,5 +32,6 @@ "bundles": [], "original_price": null, "require_approval": false, - "generate_tickets": null + "generate_tickets": null, + "issue_giftcard": false } diff --git a/libpretixsync/src/testFixtures/resources/items/item2.json b/libpretixsync/src/testFixtures/resources/items/item2.json index 74c67dae..eea55468 100644 --- a/libpretixsync/src/testFixtures/resources/items/item2.json +++ b/libpretixsync/src/testFixtures/resources/items/item2.json @@ -32,5 +32,6 @@ "bundles": [], "original_price": null, "require_approval": false, - "generate_tickets": null + "generate_tickets": null, + "issue_giftcard": false } \ No newline at end of file diff --git a/libpretixsync/src/testFixtures/resources/questions/question1.json b/libpretixsync/src/testFixtures/resources/questions/question1.json index baa59610..fcda6f41 100644 --- a/libpretixsync/src/testFixtures/resources/questions/question1.json +++ b/libpretixsync/src/testFixtures/resources/questions/question1.json @@ -11,6 +11,7 @@ "options": [], "position": 0, "ask_during_checkin": true, + "show_during_checkin": true, "identifier": "ABTBAB8S", "dependency_question": null, "dependency_value": null diff --git a/libpretixsync/versions.gradle b/libpretixsync/versions.gradle index d4bea4c9..ef3b1659 100644 --- a/libpretixsync/versions.gradle +++ b/libpretixsync/versions.gradle @@ -11,4 +11,5 @@ ext.eddsa_version = '0.3.0' ext.protobuf_version = '3.21.1' ext.jsr250_version = '1.0' ext.junit_version = '4.13.2' -ext.sqlite_jdbc_version = '3.36.0.1' \ No newline at end of file +ext.sqlite_jdbc_version = '3.45.2.0' // Should match org.xerial:sqlite-jdbc version used by SQLDelight +ext.sqldelight_version = '2.0.2'