diff --git a/.editorconfig b/.editorconfig new file mode 100644 index 00000000..ce7dcfe8 --- /dev/null +++ b/.editorconfig @@ -0,0 +1,26 @@ +# EditorConfig helps developers define and maintain consistent +# coding styles between different editors and IDEs +# editorconfig.org + +root = true + +[*] +# Change these settings to your own preference +indent_style = space +indent_size = 4 + +# We recommend you to keep these unchanged +end_of_line = lf +charset = utf-8 +trim_trailing_whitespace = true +insert_final_newline = true + +[*.{kt,kts}] +ktlint_standard_no-wildcard-imports = disabled + +[*.md] +trim_trailing_whitespace = false + +[*.{json,yaml,yml}] +indent_style = space +indent_size = 2 diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 9e9e0d2f..0bb5810c 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -33,3 +33,12 @@ jobs: # Gradle check - name: Check run: ./gradlew check + + - name: Check version + run: | + projectVersion=$(./gradlew properties | grep '^version:.*$') + pluginVersion=$(./gradlew :radar-commons-gradle:properties | grep '^version:.*$') + if [ "$projectVersion" != "$pluginVersion" ]; then + echo "Project version $projectVersion does not match plugin version $pluginVersion" + exit 1 + fi diff --git a/.github/workflows/publish_snapshots.yml b/.github/workflows/publish_snapshots.yml index 0467415f..0262d401 100644 --- a/.github/workflows/publish_snapshots.yml +++ b/.github/workflows/publish_snapshots.yml @@ -17,10 +17,6 @@ jobs: # Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it - uses: actions/checkout@v3 - - name: Has SNAPSHOT version - id: is-snapshot - run: grep "version = '.*-SNAPSHOT'" build.gradle - - uses: actions/setup-java@v3 with: distribution: temurin @@ -29,6 +25,9 @@ jobs: - name: Setup Gradle uses: gradle/gradle-build-action@v2 + - name: Has SNAPSHOT version + run: ./gradlew properties | grep '^version:.*-SNAPSHOT$' + - name: Install gpg secret key run: | cat <(echo -e "${{ secrets.OSSRH_GPG_SECRET_KEY }}") | gpg --batch --import @@ -39,3 +38,11 @@ jobs: OSSRH_USER: ${{ secrets.OSSRH_USER }} OSSRH_PASSWORD: ${{ secrets.OSSRH_PASSWORD }} run: ./gradlew -Psigning.gnupg.keyName=CBEF2CF0 -Psigning.gnupg.executable=gpg -Psigning.gnupg.passphrase=${{ secrets.OSSRH_GPG_SECRET_KEY_PASSWORD }} publish + + - name: Plugin has SNAPSHOT version + run: ./gradlew :radar-commons-gradle:properties | grep '^version:.*-SNAPSHOT$' + + - name: Publish gradle plugin + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + run: ./gradlew :radar-commons-gradle:publish diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index f0686b60..591f1c8b 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -23,6 +23,15 @@ jobs: - name: Setup Gradle uses: gradle/gradle-build-action@v2 + - name: Check version + run: | + projectVersion=$(./gradlew properties | grep '^version:.*$') + pluginVersion=$(./gradlew :radar-commons-gradle:properties | grep '^version:.*$') + if [ "$projectVersion" != "$pluginVersion" ]; then + echo "Project version $projectVersion does not match plugin version $pluginVersion" + exit 1 + fi + # Compile code - name: Compile code run: ./gradlew assemble @@ -44,3 +53,8 @@ jobs: OSSRH_USER: ${{ secrets.OSSRH_USER }} OSSRH_PASSWORD: ${{ secrets.OSSRH_PASSWORD }} run: ./gradlew -Psigning.gnupg.keyName=CBEF2CF0 -Psigning.gnupg.executable=gpg -Psigning.gnupg.passphrase=${{ secrets.OSSRH_GPG_SECRET_KEY_PASSWORD }} publish closeAndReleaseSonatypeStagingRepository + + - name: Publish gradle plugin + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + run: ./gradlew :radar-commons-gradle:publish diff --git a/.github/workflows/scheduled_snyk.yaml b/.github/workflows/scheduled_snyk.yaml index 965cbfb1..b4b2e617 100644 --- a/.github/workflows/scheduled_snyk.yaml +++ b/.github/workflows/scheduled_snyk.yaml @@ -2,16 +2,21 @@ name: Snyk scheduled test on: schedule: - cron: '0 2 * * 1' + push: + branches: + - master + jobs: security: runs-on: ubuntu-latest env: REPORT_FILE: test.json + steps: - uses: actions/checkout@v3 - uses: snyk/actions/setup@master with: - snyk-version: v1.931.0 + snyk-version: v1.1032.0 - uses: actions/setup-java@v3 with: @@ -28,13 +33,15 @@ jobs: snyk test --all-sub-projects --configuration-matching='^runtimeClasspath$' + --fail-on=upgradable --json-file-output=${{ env.REPORT_FILE }} --org=radar-base + --policy-path=$PWD/.snyk - name: Report new vulnerabilities uses: thehyve/report-vulnerability@master + if: success() || failure() with: report-file: ${{ env.REPORT_FILE }} env: TOKEN: ${{ secrets.GITHUB_TOKEN }} - if: ${{ failure() }} diff --git a/.github/workflows/snyk.yaml b/.github/workflows/snyk.yaml index de55c9f2..6169f72d 100644 --- a/.github/workflows/snyk.yaml +++ b/.github/workflows/snyk.yaml @@ -3,6 +3,7 @@ on: pull_request: branches: - master + jobs: security: runs-on: ubuntu-latest @@ -10,7 +11,7 @@ jobs: - uses: actions/checkout@v3 - uses: snyk/actions/setup@master with: - snyk-version: v1.931.0 + snyk-version: v1.1032.0 - uses: actions/setup-java@v3 with: @@ -27,6 +28,5 @@ jobs: snyk test --all-sub-projects --configuration-matching='^runtimeClasspath$' - --fail-on=upgradable --org=radar-base - --severity-threshold=high + --policy-path=$PWD/.snyk diff --git a/README.md b/README.md index d2cae214..6ab1c972 100644 --- a/README.md +++ b/README.md @@ -1,12 +1,10 @@ # RADAR-Commons -[![Build Status](https://travis-ci.org/RADAR-base/radar-commons.svg?branch=master)](https://travis-ci.org/RADAR-base/radar-commons) -[![Codacy Badge](https://api.codacy.com/project/badge/Grade/9fe7a419c83e4798af671e468c7e91cf)](https://www.codacy.com/app/RADAR-base/radar-commons?utm_source=github.com&utm_medium=referral&utm_content=RADAR-base/radar-commons&utm_campaign=Badge_Grade) Common utilities library containing basic schemas, streaming features, testing bridges and utils. # Usage -Add the RADAR-Commons library to your project with Gradle by updating your `build.gradle` file with: +Add the RADAR-Commons library to your project with Gradle by updating your `build.gradle.kts` file with: ```gradle repositories { @@ -14,62 +12,57 @@ repositories { } dependencies { - implementation group: 'org.radarbase', name: 'radar-commons', version: '0.15.0' + implementation("org.radarbase:radar-commons:1.0.0") } ``` Example use, after adding [`radar-schemas`](https://github.com/radar-base/radar-schemas) to classpath: + ```kotlin // Set URLs for RADAR-base installation -val baseUrl = "..." -val kafkaUrl = "$baseUrl/kafka/" -val schemaUrl = "$baseUrl/schema/" -val oauthHeaders = ... -val key = ObservationKey("myProject", "myUser", "mySource") - -// Configure RADAR-base clients -val client = RestClient.global().apply { - server(ServerConfig(kafkaUrl)) - gzipCompression(true) -}.build() +val baseUrl = "https://..." +val oauthToken = ... + +val kafkaSender = restKafkaSender { + baseUrl = "$baseUrl/kafka/" + headers.append("Authorization", "Bearer $oauthToken") + httpClient { + timeout(10.seconds) + } + schemaRetriever ("$baseUrl/schema/") +} -val schemaRetriever = SchemaRetriever(ServerConfig(schemaUrl), 30) +// Configure topic to send data over +val topic = AvroTopic( + "linux_raspberry_temperature", + ObservationKey.getClassSchema(), + RaspberryTemperature.getClassSchema(), + ObservationKey::class.java, + RaspberryTemperature::class.java +) -val restSender = RestSender.Builder().apply { - httpClient(client) - schemaRetriever(schemaRetriever) - useBinaryContent(true) - headers(oauthHeaders) -}.build() +val topicSender = kafkaSender.sender(topic) -val sender = BatchedKafkaSender(restSender, 60_000L, 1000L) +val key = ObservationKey("myProject", "myUser", "mySource") -// Configure topic to send data over -val topic = AvroTopic("linux_raspberry_temperature", - ObservationKey.getClassSchema(), RaspberryTemperature.getClassSchema(), - ObservationKey::class.java, RaspberryTemperature::class.java) - -// Send data to topic. Be sure to close -// the sender after use. Preferably, a sender is reused -// for many observations so that requests are efficiently -// batched. -sender.sender(topic).use { topicSender -> - readValuesFromSystem() { value -> - topicSender.send(key, value) - } +// Send data to topic. +runBlocking { + val values: List = readValuesFromSystem() + topicSender.send(key, values) } ``` -Note that this code above does not include any flows for registering a source with the managmentportal. + +Note that this code above does not include any flows for registering a source with the ManagementPortal. For server utilities, include `radar-commons-server`: ```gradle repositories { mavenCentral() - maven { url 'https://packages.confluent.io/maven/' } + maven(url = "https://packages.confluent.io/maven/") } dependencies { - implementation group: 'org.radarbase', name: 'radar-commons-server', version: '0.15.0' + implementation("org.radarbase:radar-commons-server:1.0.0") } ``` @@ -78,11 +71,11 @@ For mocking clients of the RADAR-base infrastructure, use that 'radar-commons-te ```gradle repositories { mavenCentral() - maven { url 'https://packages.confluent.io/maven/' } + maven(url = "https://packages.confluent.io/maven/") } dependencies { - testImplementation group: 'org.radarbase', name: 'radar-commons-testing', version: '0.15.0' + testImplementation("org.radarbase:radar-commons-testing:1.0.0") } ``` @@ -92,24 +85,26 @@ To test your backend with a MockProducer, copy `testing/mock.yml.template` to `t ``` to send data to your backend. +To use the RADAR Gradle plugins, see the README of the `radar-commons-gradle` directory. + ## Contributing For latest code use `dev` branch. This is released on JFrog's OSS Artifactory. To use that release, add the following fragment to your `build.gradle` file. ```gradle repositories { - maven { url 'https://oss.sonatype.org/content/repositories/snapshots' } + maven(url = "https://oss.sonatype.org/content/repositories/snapshots") } configurations.all { // Check for updates every build - resolutionStrategy.cacheChangingModulesFor 0, 'SECONDS' + resolutionStrategy.cacheChangingModulesFor(0, "SECONDS") } dependencies { - implementation group: 'org.radarbase', name: 'radar-commons', version: '0.15.1-SNAPSHOT' + implementation("org.radarbase:radar-commons:1.0.1-SNAPSHOT") } ``` -Code should be formatted using the [Google Java Code Style Guide](https://google.github.io/styleguide/javaguide.html). +Code should be formatted using the Kotlin official style guide, in addition to ktlint rules. If you want to contribute a feature or fix browse our [issues](https://github.com/RADAR-base/radar-commons/issues), and please make a pull request. diff --git a/build.gradle b/build.gradle deleted file mode 100644 index 524336a7..00000000 --- a/build.gradle +++ /dev/null @@ -1,125 +0,0 @@ -/* - * Copyright 2017 The Hyve and King's College London - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -plugins { - id 'com.github.davidmc24.gradle.plugin.avro' version '1.3.0' - id("io.github.gradle-nexus.publish-plugin") version "1.1.0" - id("com.github.ben-manes.versions") version "0.42.0" -} - -allprojects { - version = '0.15.0' - group = 'org.radarbase' -} - -subprojects { - // Apply the plugins - apply plugin: 'java' - apply plugin: 'java-library' - apply plugin: 'idea' - - targetCompatibility = '11' - sourceCompatibility = '11' - - //---------------------------------------------------------------------------// - // Configuration // - //---------------------------------------------------------------------------// - ext.githubRepoName = 'RADAR-base/radar-commons' - ext.githubUrl = "https://github.com/$githubRepoName" - ext.issueUrl = "https://github.com/$githubRepoName/issues" - ext.website = 'https://radar-base.org' - - //---------------------------------------------------------------------------// - // Dependencies // - //---------------------------------------------------------------------------// - repositories { - mavenCentral() - maven { url 'https://packages.confluent.io/maven/' } - flatDir { - dirs "${project.rootDir}/libs" - } - } - - idea { - module { - downloadSources = true - } - } - - //---------------------------------------------------------------------------// - // Style checking // - //---------------------------------------------------------------------------// - - tasks.matching { it instanceof Test }.all { - def stdout = new LinkedList() - beforeTest { TestDescriptor td -> - stdout.clear() - } - - onOutput { TestDescriptor td, TestOutputEvent toe -> - stdout.addAll(Arrays.asList(toe.getMessage().split('(?m)$'))) - while (stdout.size() > 100) { - stdout.remove() - } - } - - afterTest { TestDescriptor td, TestResult tr -> - if (tr.resultType == TestResult.ResultType.FAILURE) { - println() - print("${td.className}.${td.name} FAILED") - if (stdout.empty) { - println(" without any output") - } else { - println(" with last 100 lines of output:") - println('=' * 100) - stdout.each { print(it) } - println('=' * 100) - } - } - } - - testLogging { - showExceptions = true - showCauses = true - showStackTraces = true - exceptionFormat "full" - } - } -} - -def isNonStable = { String version -> - def stableKeyword = ["RELEASE", "FINAL", "GA"].any { version.toUpperCase().contains(it) } - def regex = /^[0-9,.v-]+(-r)?$/ - return !stableKeyword && !(version ==~ regex) -} - -tasks.named("dependencyUpdates").configure { - rejectVersionIf { - isNonStable(it.candidate.version) - } -} - -nexusPublishing { - repositories { - sonatype { - username = project.hasProperty("ossrh.user") ? project.property("ossrh.user") : System.getenv("OSSRH_USER") - password = project.hasProperty("ossrh.password") ? project.property("ossrh.password") : System.getenv("OSSRH_PASSWORD") - } - } -} - -wrapper { - gradleVersion '7.4.2' -} diff --git a/build.gradle.kts b/build.gradle.kts new file mode 100644 index 00000000..edaed552 --- /dev/null +++ b/build.gradle.kts @@ -0,0 +1,73 @@ +import org.radarbase.gradle.plugin.radarKotlin +import org.radarbase.gradle.plugin.radarPublishing + +/* + * Copyright 2017 The Hyve and King's College London + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +plugins { + kotlin("plugin.serialization") version Versions.Plugins.kotlinSerialization apply false + id("com.github.davidmc24.gradle.plugin.avro") version Versions.Plugins.avro apply false + id("org.radarbase.radar-root-project") + id("org.radarbase.radar-dependency-management") + id("org.radarbase.radar-kotlin") apply false + id("org.radarbase.radar-publishing") apply false +} + +val githubRepoName = "RADAR-base/radar-commons" +val githubUrl = "https://github.com/$githubRepoName" + +radarRootProject { + projectVersion.set(Versions.project) +} + +subprojects { + // Apply the plugins + apply(plugin = "org.radarbase.radar-kotlin") + apply(plugin = "org.radarbase.radar-publishing") + + dependencies { + configurations["testImplementation"]("org.jetbrains.kotlinx:kotlinx-coroutines-test:${Versions.coroutines}") + configurations["testRuntimeOnly"]("org.slf4j:slf4j-simple:${Versions.slf4j}") + } + + radarPublishing { + githubUrl.set("https://github.com/$githubRepoName") + developers { + developer { + id.set("blootsvoets") + name.set("Joris Borgdorff") + email.set("joris@thehyve.nl") + organization.set("The Hyve") + } + developer { + id.set("nivemaham") + name.set("Nivethika Mahasivam") + email.set("nivethika@thehyve.nl") + organization.set("The Hyve") + } + } + } + + radarKotlin { + javaVersion.set(Versions.java) + kotlinVersion.set(Versions.Plugins.kotlin) + junitVersion.set(Versions.junit) + slf4jVersion.set(Versions.slf4j) + } + + //---------------------------------------------------------------------------// + // Style checking // + //---------------------------------------------------------------------------// +} diff --git a/buildSrc/build.gradle.kts b/buildSrc/build.gradle.kts new file mode 100644 index 00000000..876c922b --- /dev/null +++ b/buildSrc/build.gradle.kts @@ -0,0 +1,7 @@ +plugins { + `kotlin-dsl` +} + +repositories { + mavenCentral() +} diff --git a/buildSrc/src/main/kotlin/Versions.kt b/buildSrc/src/main/kotlin/Versions.kt new file mode 100644 index 00000000..44592243 --- /dev/null +++ b/buildSrc/src/main/kotlin/Versions.kt @@ -0,0 +1,25 @@ +object Versions { + const val project = "1.0.0" + + object Plugins { + const val kotlin = "1.8.21" + const val kotlinSerialization = kotlin + const val avro = "1.7.0" + } + + const val java = 11 + const val slf4j = "2.0.7" + const val confluent = "7.3.2" + const val kafka = "7.3.2-ce" + const val avro = "1.11.1" + const val jackson = "2.15.0" + const val okhttp = "4.11.0" + const val junit = "5.9.3" + const val mockito = "5.3.1" + const val mockitoKotlin = "4.1.0" + const val hamcrest = "2.2" + const val radarSchemas = "0.8.2" + const val opencsv = "5.7.1" + const val ktor = "2.3.0" + const val coroutines = "1.6.4" +} diff --git a/config/checkstyle/checkstyle.xml b/config/checkstyle/checkstyle.xml deleted file mode 100644 index a252c58c..00000000 --- a/config/checkstyle/checkstyle.xml +++ /dev/null @@ -1,233 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/config/pmd/ruleset.xml b/config/pmd/ruleset.xml deleted file mode 100644 index 1157371b..00000000 --- a/config/pmd/ruleset.xml +++ /dev/null @@ -1,98 +0,0 @@ - - - - - This ruleset was parsed from the Codacy default codestyle. - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/gradle.properties b/gradle.properties index dd09c477..821e1274 100644 --- a/gradle.properties +++ b/gradle.properties @@ -1,12 +1 @@ -slf4jVersion=1.7.36 -confluentVersion=7.1.1 -kafkaVersion=3.1.0 -avroVersion=1.11.0 -jacksonVersion=2.13.2.20220328 -okhttpVersion=4.9.3 -junitVersion=4.13.2 -mockitoVersion=4.5.1 -hamcrestVersion=1.3 -radarSchemasVersion=0.7.9 -orgJsonVersion=20220320 -opencsvVersion=5.6 +org.gradle.jvmargs=-Xmx2g -XX:MaxMetaspaceSize=512m -XX:+HeapDumpOnOutOfMemoryError -Dfile.encoding=UTF-8 diff --git a/gradle/codestyle.gradle b/gradle/codestyle.gradle deleted file mode 100644 index 9a24dbb0..00000000 --- a/gradle/codestyle.gradle +++ /dev/null @@ -1,24 +0,0 @@ -apply plugin: 'checkstyle' -apply plugin: 'pmd' - -checkstyle { - toolVersion = '9.2' - ignoreFailures = false - - configFile = rootProject.file('config/checkstyle/checkstyle.xml') - // ignore tests - sourceSets = [sourceSets.main] -} - -pmd { - toolVersion = '6.41.0' - ignoreFailures = false - - consoleOutput = true - - ruleSets = [] - ruleSetFiles = rootProject.files("config/pmd/ruleset.xml") - // ignore tests - sourceSets = [sourceSets.main] -} - diff --git a/gradle/wrapper/gradle-wrapper.jar b/gradle/wrapper/gradle-wrapper.jar index 41d9927a..c1962a79 100644 Binary files a/gradle/wrapper/gradle-wrapper.jar and b/gradle/wrapper/gradle-wrapper.jar differ diff --git a/gradle/wrapper/gradle-wrapper.properties b/gradle/wrapper/gradle-wrapper.properties index aa991fce..37aef8d3 100644 --- a/gradle/wrapper/gradle-wrapper.properties +++ b/gradle/wrapper/gradle-wrapper.properties @@ -1,5 +1,6 @@ distributionBase=GRADLE_USER_HOME distributionPath=wrapper/dists -distributionUrl=https\://services.gradle.org/distributions/gradle-7.4.2-bin.zip +distributionUrl=https\://services.gradle.org/distributions/gradle-8.1.1-bin.zip +networkTimeout=10000 zipStoreBase=GRADLE_USER_HOME zipStorePath=wrapper/dists diff --git a/gradlew b/gradlew index 1b6c7873..aeb74cbb 100755 --- a/gradlew +++ b/gradlew @@ -55,7 +55,7 @@ # Darwin, MinGW, and NonStop. # # (3) This script is generated from the Groovy template -# https://github.com/gradle/gradle/blob/master/subprojects/plugins/src/main/resources/org/gradle/api/internal/plugins/unixStartScript.txt +# https://github.com/gradle/gradle/blob/HEAD/subprojects/plugins/src/main/resources/org/gradle/api/internal/plugins/unixStartScript.txt # within the Gradle project. # # You can find Gradle at https://github.com/gradle/gradle/. @@ -80,13 +80,10 @@ do esac done -APP_HOME=$( cd "${APP_HOME:-./}" && pwd -P ) || exit - -APP_NAME="Gradle" +# This is normally unused +# shellcheck disable=SC2034 APP_BASE_NAME=${0##*/} - -# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. -DEFAULT_JVM_OPTS='"-Xmx64m" "-Xms64m"' +APP_HOME=$( cd "${APP_HOME:-./}" && pwd -P ) || exit # Use the maximum available, or set MAX_FD != -1 to use that value. MAX_FD=maximum @@ -143,12 +140,16 @@ fi if ! "$cygwin" && ! "$darwin" && ! "$nonstop" ; then case $MAX_FD in #( max*) + # In POSIX sh, ulimit -H is undefined. That's why the result is checked to see if it worked. + # shellcheck disable=SC3045 MAX_FD=$( ulimit -H -n ) || warn "Could not query maximum file descriptor limit" esac case $MAX_FD in #( '' | soft) :;; #( *) + # In POSIX sh, ulimit -n is undefined. That's why the result is checked to see if it worked. + # shellcheck disable=SC3045 ulimit -n "$MAX_FD" || warn "Could not set maximum file descriptor limit to $MAX_FD" esac @@ -193,6 +194,10 @@ if "$cygwin" || "$msys" ; then done fi + +# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. +DEFAULT_JVM_OPTS='"-Xmx64m" "-Xms64m"' + # Collect all arguments for the java command; # * $DEFAULT_JVM_OPTS, $JAVA_OPTS, and $GRADLE_OPTS can contain fragments of # shell script including quotes and variable substitutions, so put them in @@ -205,6 +210,12 @@ set -- \ org.gradle.wrapper.GradleWrapperMain \ "$@" +# Stop when "xargs" is not available. +if ! command -v xargs >/dev/null 2>&1 +then + die "xargs is not available" +fi + # Use "xargs" to parse quoted args. # # With -n1 it outputs one arg per line, with the quotes and backslashes removed. diff --git a/gradlew.bat b/gradlew.bat index ac1b06f9..6689b85b 100644 --- a/gradlew.bat +++ b/gradlew.bat @@ -14,7 +14,7 @@ @rem limitations under the License. @rem -@if "%DEBUG%" == "" @echo off +@if "%DEBUG%"=="" @echo off @rem ########################################################################## @rem @rem Gradle startup script for Windows @@ -25,7 +25,8 @@ if "%OS%"=="Windows_NT" setlocal set DIRNAME=%~dp0 -if "%DIRNAME%" == "" set DIRNAME=. +if "%DIRNAME%"=="" set DIRNAME=. +@rem This is normally unused set APP_BASE_NAME=%~n0 set APP_HOME=%DIRNAME% @@ -40,7 +41,7 @@ if defined JAVA_HOME goto findJavaFromJavaHome set JAVA_EXE=java.exe %JAVA_EXE% -version >NUL 2>&1 -if "%ERRORLEVEL%" == "0" goto execute +if %ERRORLEVEL% equ 0 goto execute echo. echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. @@ -75,13 +76,15 @@ set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar :end @rem End local scope for the variables with windows NT shell -if "%ERRORLEVEL%"=="0" goto mainEnd +if %ERRORLEVEL% equ 0 goto mainEnd :fail rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of rem the _cmd.exe /c_ return code! -if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1 -exit /b 1 +set EXIT_CODE=%ERRORLEVEL% +if %EXIT_CODE% equ 0 set EXIT_CODE=1 +if not ""=="%GRADLE_EXIT_CONSOLE%" exit %EXIT_CODE% +exit /b %EXIT_CODE% :mainEnd if "%OS%"=="Windows_NT" endlocal diff --git a/radar-commons-gradle/README.md b/radar-commons-gradle/README.md new file mode 100644 index 00000000..47869a7c --- /dev/null +++ b/radar-commons-gradle/README.md @@ -0,0 +1,89 @@ +# radar-commons-gradle + +A Gradle plugin to do some common RADAR-base tasks. + +## Usage + +Add the following block to `settings.gradle.kts` to get access to the RADAR-base plugins. + +```gradle +pluginManagement { + repositories { + gradlePluginPortal() + mavenCentral() + maven(url = "https://maven.pkg.github.com/radar-base/radar-commons") { + credentials { + username = System.getenv("GITHUB_ACTOR") + ?: extra.properties["gpr.user"] as? String + ?: extra.properties["public.gpr.user"] as? String + password = System.getenv("GITHUB_TOKEN") + ?: extra.properties["gpr.token"] as? String + ?: (extra.properties["public.gpr.token"] as? String)?.let { + Base64.getDecoder().decode(it).decodeToString() + } + } + } + } +} +``` + +We recommend to store a Base64 encoded PAT in your projects' `gradle.properties` with only `read:packages` access, created in your [GitHub Developer settings](https://github.com/settings/tokens/new?scopes=read:packages&description=GPR%20for%20Gradle). The Base64 encoded token should be stored as `public.gpr.token` and the associated username as `public.gpr.user`. To use your personal PAT, store the PAT in `~/.gradle/gradle.properties` with keys `gpr.user` and `gpr.token`. Use the following PAT if needed +```properties +public.gpr.user=radar-public +public.gpr.token=Z2hwX0h0d0FHSmJzeEpjenBlUVIycVhWb0RpNGdZdHZnZzJTMFVJZA== +``` +Note that the above credentials may be changed or revoked at any time. + +Then use the plugins with the following root project configurations: + +```gradle +import org.radarbase.gradle.plugin.radarKotlin +import org.radarbase.gradle.plugin.radarPublishing + +plugins { + val radarCommonsVersion = "..." + id("org.radarbase.radar-root-project") version radarCommonsVersion + id("org.radarbase.radar-dependency-management") version radarCommonsVersion + id("org.radarbase.radar-kotlin") version radarCommonsVersion apply false + id("org.radarbase.radar-publishing") version radarCommonsVersion apply false +} + +radarRootProject { + projectVersion.set(Versions.project) + group.set("org.radarbase") // is already default value + gradleVersion.set(Versions.gradle) // already has a default value +} + +radarDependencies { + regex.set("(^[0-9,.v-]+(-r)?|RELEASE|FINAL|GA|-CE)$") // default value + // default value, if set to true then disregard major version + // updates, e.g. 5.0.0 -> 6.0.0 is not allowed but 1.6.0 -> 1.7.0 is allowed. + rejectMajorVersionUpdates.set(false) +} + +subprojects { + apply(plugin = "org.radarbase.radar-kotlin") + apply(plugin = "org.radarbase.radar-publishing") + + radarKotlin { + javaVersion.set(Versions.java) // already has a default value + kotlinVersion.set(Versions.Plugins.kotlin) // already has a default value + junitVersion.set(Versions.junit) // already has a default value + ktlintVersion.set(Versions.ktlint) // already has a default value + } + + // Both values are required to be set to use radar-publishing. + // This will force the use of GPG signing maven publications. + radarPublishing { + githubUrl.set("https://github.com/RADAR-base/my-project") + developers { + developer { + id.set("myhandle") + name.set("My Name") + email.set("my@email.com") + organization.set("My company") + } + } + } +} +``` diff --git a/radar-commons-gradle/build.gradle.kts b/radar-commons-gradle/build.gradle.kts new file mode 100644 index 00000000..49e540e2 --- /dev/null +++ b/radar-commons-gradle/build.gradle.kts @@ -0,0 +1,70 @@ +import org.jetbrains.kotlin.gradle.tasks.KotlinCompile + +plugins { + `kotlin-dsl` + `java-gradle-plugin` + kotlin("jvm") version "1.8.10" + `maven-publish` +} + +version = "1.0.0" +group = "org.radarbase" +description = "RADAR common Gradle plugins" + +repositories { + mavenCentral() + gradlePluginPortal() +} + +dependencies { + implementation("org.jetbrains.kotlin:kotlin-gradle-plugin:1.8.10") + implementation("org.jetbrains.dokka:dokka-gradle-plugin:1.8.10") + implementation("com.github.ben-manes:gradle-versions-plugin:0.46.0") + implementation("io.github.gradle-nexus:publish-plugin:1.3.0") + implementation("org.jlleitschuh.gradle:ktlint-gradle:11.3.2") +} + +gradlePlugin { + plugins { + create("radarRootProject") { + id = "org.radarbase.radar-root-project" + implementationClass = "org.radarbase.gradle.plugin.RadarRootProjectPlugin" + } + create("radarPublishing") { + id = "org.radarbase.radar-publishing" + implementationClass = "org.radarbase.gradle.plugin.RadarPublishingPlugin" + } + create("radarDependencyManagement") { + id = "org.radarbase.radar-dependency-management" + implementationClass = "org.radarbase.gradle.plugin.RadarDependencyManagementPlugin" + } + create("radarKotlin") { + id = "org.radarbase.radar-kotlin" + implementationClass = "org.radarbase.gradle.plugin.RadarKotlinPlugin" + } + } +} + +tasks.withType { + options.release.set(11) +} +tasks.withType { + kotlinOptions { + jvmTarget = "11" + } +} + +publishing { + repositories { + maven { + name = "GitHubPackages" + setUrl("https://maven.pkg.github.com/radar-base/radar-commons") + credentials { + username = System.getenv("GITHUB_ACTOR") + ?: extra.properties["gpr.user"] as? String + password = System.getenv("GITHUB_TOKEN") + ?: extra.properties["gpr.key"] as? String + } + } + } +} diff --git a/radar-commons-gradle/src/main/kotlin/org/radarbase/gradle/plugin/RadarDependencyManagementPlugin.kt b/radar-commons-gradle/src/main/kotlin/org/radarbase/gradle/plugin/RadarDependencyManagementPlugin.kt new file mode 100644 index 00000000..8baf32b6 --- /dev/null +++ b/radar-commons-gradle/src/main/kotlin/org/radarbase/gradle/plugin/RadarDependencyManagementPlugin.kt @@ -0,0 +1,48 @@ +package org.radarbase.gradle.plugin + +import com.github.benmanes.gradle.versions.VersionsPlugin +import com.github.benmanes.gradle.versions.updates.DependencyUpdatesTask +import org.gradle.api.Plugin +import org.gradle.api.Project +import org.gradle.api.artifacts.repositories.MavenArtifactRepository +import org.gradle.api.provider.Property +import org.gradle.kotlin.dsl.apply +import org.gradle.kotlin.dsl.configure +import org.gradle.kotlin.dsl.create + +fun Project.radarDependencyManagement(block: RadarDependencyManagementExtension.() -> Unit) { + configure(block) +} + +interface RadarDependencyManagementExtension { + val regex: Property + val rejectMajorVersionUpdates: Property +} + +class RadarDependencyManagementPlugin : Plugin { + override fun apply(project: Project): Unit = with(project) { + val extension = extensions.create("radarDependencies").apply { + regex.convention("(^[0-9,.v-]+(-r)?|RELEASE|FINAL|GA|-CE)$") + rejectMajorVersionUpdates.convention(false) + } + + apply() + + tasks.withType(DependencyUpdatesTask::class.java) { + doFirst { + allprojects { + repositories.removeAll { + it is MavenArtifactRepository && + it.url.toString().contains("snapshot", ignoreCase = true) + } + } + } + val isStable = extension.regex.get().toRegex(RegexOption.IGNORE_CASE) + val rejectMajorVersionUpdates = extension.rejectMajorVersionUpdates.get() + rejectVersionIf { + (!rejectMajorVersionUpdates || candidate.version.split('.', limit = 2)[0] != currentVersion.split('.', limit = 2)[0]) + && !isStable.containsMatchIn(candidate.version) + } + } + } +} diff --git a/radar-commons-gradle/src/main/kotlin/org/radarbase/gradle/plugin/RadarKotlinPlugin.kt b/radar-commons-gradle/src/main/kotlin/org/radarbase/gradle/plugin/RadarKotlinPlugin.kt new file mode 100644 index 00000000..eff4de65 --- /dev/null +++ b/radar-commons-gradle/src/main/kotlin/org/radarbase/gradle/plugin/RadarKotlinPlugin.kt @@ -0,0 +1,167 @@ +package org.radarbase.gradle.plugin + +import org.gradle.api.Plugin +import org.gradle.api.Project +import org.gradle.api.plugins.ApplicationPlugin +import org.gradle.api.plugins.JavaApplication +import org.gradle.api.provider.Property +import org.gradle.api.tasks.Copy +import org.gradle.api.tasks.compile.JavaCompile +import org.gradle.api.tasks.testing.Test +import org.gradle.api.tasks.testing.logging.TestExceptionFormat +import org.gradle.kotlin.dsl.* +import org.jetbrains.kotlin.gradle.dsl.JvmTarget +import org.jetbrains.kotlin.gradle.dsl.KotlinVersion +import org.jetbrains.kotlin.gradle.tasks.KotlinCompile +import org.jlleitschuh.gradle.ktlint.KtlintExtension +import org.jlleitschuh.gradle.ktlint.KtlintPlugin + +fun Project.radarKotlin(configure: RadarKotlinExtension.() -> Unit) { + configure(configure) +} + +interface RadarKotlinExtension { + val javaVersion: Property + val kotlinVersion: Property + val junitVersion: Property + val log4j2Version: Property + val slf4jVersion: Property + val ktlintVersion: Property +} + +class RadarKotlinPlugin : Plugin { + override fun apply(project: Project): Unit = with(project) { + val extension = extensions.create("radarKotlin").apply { + javaVersion.convention(Versions.java) + kotlinVersion.convention(Versions.kotlin) + junitVersion.convention(Versions.junit) + ktlintVersion.convention(Versions.ktlint) + slf4jVersion.convention(Versions.ktlint) + } + + apply(plugin = "kotlin") + apply() + + repositories { + mavenCentral { + mavenContent { + releasesOnly() + } + } + mavenLocal() + maven(url = "https://packages.confluent.io/maven/") { + mavenContent { + releasesOnly() + } + } + maven(url = "https://oss.sonatype.org/content/repositories/snapshots") { + mavenContent { + snapshotsOnly() + } + } + } + + tasks.withType { + options.release.set(extension.javaVersion) + } + + tasks.withType { + compilerOptions { + jvmTarget.set(extension.javaVersion.map { JvmTarget.fromTarget(it.toString()) }) + val kotlinVersion = extension.kotlinVersion.map { version -> + KotlinVersion.fromVersion( + version + .splitToSequence('.') + .take(2) + .joinToString(separator = "."), + ) + } + apiVersion.set(kotlinVersion) + languageVersion.set(kotlinVersion) + } + } + + extensions.configure { + version.set(extension.ktlintVersion) + } + + dependencies { + configurations["testImplementation"](extension.junitVersion.map { "org.junit.jupiter:junit-jupiter-api:$it" }) + configurations["testRuntimeOnly"](extension.junitVersion.map { "org.junit.jupiter:junit-jupiter-engine:$it" }) + } + + tasks.withType { + testLogging { + events("passed", "skipped", "failed") + showStandardStreams = true + exceptionFormat = TestExceptionFormat.FULL + } + useJUnitPlatform() + } + + + tasks.register("downloadDependencies") { + doFirst { + configurations["compileClasspath"].files + configurations["runtimeClasspath"].files + println("Downloaded all dependencies") + } + outputs.upToDateWhen { false } + } + + tasks.register("copyDependencies") { + from(configurations.named("runtimeClasspath").map { it.files }) + into("$buildDir/third-party/") + doLast { + println("Copied third-party runtime dependencies") + } + } + + afterEvaluate { + if (extension.slf4jVersion.isPresent) { + dependencies { + val implementation by configurations + implementation("org.slf4j:slf4j-api:${extension.slf4jVersion.get()}") + } + } + if (extension.log4j2Version.isPresent) { + dependencies { + val log4j2Version = extension.log4j2Version.get() + + if (plugins.hasPlugin("application")) { + val runtimeOnly by configurations + runtimeOnly("org.apache.logging.log4j:log4j-slf4j2-impl:$log4j2Version") + runtimeOnly("org.apache.logging.log4j:log4j-core:$log4j2Version") + runtimeOnly("org.apache.logging.log4j:log4j-jul:$log4j2Version") + } else { + val testRuntimeOnly by configurations + testRuntimeOnly("org.apache.logging.log4j:log4j-slf4j2-impl:$log4j2Version") + testRuntimeOnly("org.apache.logging.log4j:log4j-core:$log4j2Version") + testRuntimeOnly("org.apache.logging.log4j:log4j-jul:$log4j2Version") + } + } + + tasks.withType { + if ("java.util.logging.manager" !in systemProperties) { + systemProperty( + "java.util.logging.manager", + "org.apache.logging.log4j.jul.LogManager" + ) + } + } + + if (plugins.hasPlugin(ApplicationPlugin::class)) { + extensions.configure { + if (applicationDefaultJvmArgs.none { "-Djava.util.logging.manager=" in it }) { + applicationDefaultJvmArgs += "-Djava.util.logging.manager=org.apache.logging.log4j.jul.LogManager" + } + } + } + } + } + + configurations.named("implementation") { + resolutionStrategy.cacheChangingModulesFor(0, "SECONDS") + } + } +} diff --git a/radar-commons-gradle/src/main/kotlin/org/radarbase/gradle/plugin/RadarPublishingPlugin.kt b/radar-commons-gradle/src/main/kotlin/org/radarbase/gradle/plugin/RadarPublishingPlugin.kt new file mode 100644 index 00000000..1093d576 --- /dev/null +++ b/radar-commons-gradle/src/main/kotlin/org/radarbase/gradle/plugin/RadarPublishingPlugin.kt @@ -0,0 +1,130 @@ +package org.radarbase.gradle.plugin + +import org.gradle.api.Plugin +import org.gradle.api.Project +import org.gradle.api.file.DuplicatesStrategy +import org.gradle.api.provider.Property +import org.gradle.api.publish.PublishingExtension +import org.gradle.api.publish.maven.MavenPomDeveloperSpec +import org.gradle.api.publish.maven.MavenPublication +import org.gradle.api.publish.maven.plugins.MavenPublishPlugin +import org.gradle.api.tasks.SourceSetContainer +import org.gradle.api.tasks.bundling.Compression +import org.gradle.api.tasks.bundling.Jar +import org.gradle.api.tasks.bundling.Tar +import org.gradle.kotlin.dsl.* +import org.gradle.plugins.signing.Sign +import org.gradle.plugins.signing.SigningExtension +import org.gradle.plugins.signing.SigningPlugin +import org.jetbrains.dokka.gradle.DokkaPlugin + +fun Project.radarPublishing(configure: RadarPublishingExtension.() -> Unit) { + configure(configure) +} + +interface RadarPublishingExtension { + val githubUrl: Property + val developers: Property Unit> + + fun developers(configure: MavenPomDeveloperSpec.() -> Unit) { + developers.set(configure) + } +} + +class RadarPublishingPlugin : Plugin { + override fun apply(project: Project): Unit = with(project) { + val extension = extensions.create("radarPublishing") + + val sourcesJar by tasks.registering(Jar::class) { + from(project.the()["main"].allSource) + archiveClassifier.set("sources") + duplicatesStrategy = DuplicatesStrategy.EXCLUDE + val classes by tasks + dependsOn(classes) + } + + apply() + + val dokkaJar by tasks.registering(Jar::class) { + from("$buildDir/dokka/javadoc") + archiveClassifier.set("javadoc") + val dokkaJavadoc by tasks + dependsOn(dokkaJavadoc) + } + + tasks.withType { + compression = Compression.GZIP + archiveExtension.set("tar.gz") + } + + tasks.withType { + manifest { + attributes( + "Implementation-Title" to project.name, + "Implementation-Version" to project.version + ) + } + } + + apply() + + val assemble by tasks + assemble.dependsOn(sourcesJar) + assemble.dependsOn(dokkaJar) + + val publishingExtension = extensions.getByName("publishing") + val mavenJar by publishingExtension.publications.creating(MavenPublication::class) { + from(components["java"]) + + artifact(sourcesJar) + artifact(dokkaJar) + + afterEvaluate { + val githubUrl = requireNotNull(extension.githubUrl.orNull) { "Missing githubUrl value in radarPublishing" } + pom { + name.set(project.name) + description.set(project.description) + url.set(githubUrl) + licenses { + license { + name.set("The Apache Software License, Version 2.0") + url.set("https://www.apache.org/licenses/LICENSE-2.0.txt") + distribution.set("repo") + } + } + if (extension.developers.isPresent) { + developers { + val developerBlock = extension.developers.get() + developerBlock() + } + } + issueManagement { + system.set("GitHub") + url.set("$githubUrl/issues") + } + organization { + name.set("RADAR-base") + url.set("https://radar-base.org") + } + scm { + connection.set("scm:git:$githubUrl") + url.set(githubUrl) + } + } + } + } + + apply() + + extensions.configure("signing") { + useGpgCmd() + isRequired = true + sign(tasks["sourcesJar"], tasks["dokkaJar"]) + sign(mavenJar) + } + + tasks.withType { + onlyIf { gradle.taskGraph.hasTask(tasks["publish"]) } + } + } +} diff --git a/radar-commons-gradle/src/main/kotlin/org/radarbase/gradle/plugin/RadarRootProjectPlugin.kt b/radar-commons-gradle/src/main/kotlin/org/radarbase/gradle/plugin/RadarRootProjectPlugin.kt new file mode 100644 index 00000000..e83edc8c --- /dev/null +++ b/radar-commons-gradle/src/main/kotlin/org/radarbase/gradle/plugin/RadarRootProjectPlugin.kt @@ -0,0 +1,65 @@ +package org.radarbase.gradle.plugin + +import io.github.gradlenexus.publishplugin.NexusPublishExtension +import io.github.gradlenexus.publishplugin.NexusPublishPlugin +import org.gradle.api.Plugin +import org.gradle.api.Project +import org.gradle.api.provider.Property +import org.gradle.api.tasks.wrapper.Wrapper +import org.gradle.kotlin.dsl.apply +import org.gradle.kotlin.dsl.configure +import org.gradle.kotlin.dsl.create +import org.gradle.kotlin.dsl.named + +fun Project.radarRootProject(configure: RadarRootProjectExtension.() -> Unit) { + configure(configure) +} + +interface RadarRootProjectExtension { + val group: Property + val projectVersion: Property + val gradleVersion: Property +} + +class RadarRootProjectPlugin : Plugin { + override fun apply(project: Project) = with(project) { + val extension = extensions.create("radarRootProject").apply { + group.convention("org.radarbase") + gradleVersion.convention(Versions.wrapper) + } + + allprojects { + afterEvaluate { + version = extension.projectVersion.get() + group = extension.group.get() + } + } + + afterEvaluate { + tasks.named("wrapper") { + gradleVersion = extension.gradleVersion.get() + } + } + + apply() + + project.extensions.configure { + repositories { + sonatype { + username.set(propertyOrEnv("ossrh.user", "OSSRH_USER")) + password.set(propertyOrEnv("ossrh.password", "OSSRH_PASSWORD")) + } + } + } + } + + companion object { + private fun Project.propertyOrEnv(propertyName: String, envName: String): String? { + return if (hasProperty(propertyName)) { + property(propertyName)?.toString() + } else { + System.getenv(envName) + } + } + } +} diff --git a/radar-commons-gradle/src/main/kotlin/org/radarbase/gradle/plugin/Versions.kt b/radar-commons-gradle/src/main/kotlin/org/radarbase/gradle/plugin/Versions.kt new file mode 100644 index 00000000..f21d4a8d --- /dev/null +++ b/radar-commons-gradle/src/main/kotlin/org/radarbase/gradle/plugin/Versions.kt @@ -0,0 +1,9 @@ +package org.radarbase.gradle.plugin + +object Versions { + const val wrapper = "8.1.1" + const val kotlin = "1.8.21" + const val ktlint = "0.48.2" + const val java = 11 + const val junit = "5.9.3" +} diff --git a/radar-commons-kotlin/.gitignore b/radar-commons-kotlin/.gitignore new file mode 100644 index 00000000..3c0160d0 --- /dev/null +++ b/radar-commons-kotlin/.gitignore @@ -0,0 +1,2 @@ +build/ +out/ diff --git a/radar-commons-kotlin/build.gradle.kts b/radar-commons-kotlin/build.gradle.kts new file mode 100644 index 00000000..33cced0f --- /dev/null +++ b/radar-commons-kotlin/build.gradle.kts @@ -0,0 +1,17 @@ +plugins { + kotlin("plugin.serialization") +} + +description = "Library for Kotlin utility classes and functions" + +dependencies { + api(platform("org.jetbrains.kotlinx:kotlinx-coroutines-bom:${Versions.coroutines}")) + api("org.jetbrains.kotlinx:kotlinx-coroutines-core") + + api(platform("io.ktor:ktor-bom:${Versions.ktor}")) + api("io.ktor:ktor-client-auth") + implementation("io.ktor:ktor-client-content-negotiation") + implementation("io.ktor:ktor-serialization-kotlinx-json") + + testImplementation("org.hamcrest:hamcrest:2.2") +} diff --git a/radar-commons-kotlin/src/main/kotlin/org/radarbase/kotlin/coroutines/CacheConfig.kt b/radar-commons-kotlin/src/main/kotlin/org/radarbase/kotlin/coroutines/CacheConfig.kt new file mode 100644 index 00000000..59a6aa3b --- /dev/null +++ b/radar-commons-kotlin/src/main/kotlin/org/radarbase/kotlin/coroutines/CacheConfig.kt @@ -0,0 +1,26 @@ +package org.radarbase.kotlin.coroutines + +import kotlin.time.Duration +import kotlin.time.Duration.Companion.minutes +import kotlin.time.Duration.Companion.seconds + +data class CacheConfig( + /** Duration after which the cache is considered stale and should be refreshed. */ + val refreshDuration: Duration = 30.minutes, + /** Duration after which the cache may be refreshed if the cache does not fulfill a certain + * requirement. This should be shorter than [refreshDuration] to have effect. */ + val retryDuration: Duration = 1.minutes, + /** Time until the result may be recomputed when an exception is set for the cache. */ + val exceptionCacheDuration: Duration = 10.seconds, + /** + * Number of simultaneous computations that may occur. Increase if the time to computation + * is very variable. + */ + val maxSimultaneousCompute: Int = 1, +) { + init { + require(retryDuration > Duration.ZERO) { "Cache fetch duration $retryDuration must be positive" } + require(refreshDuration >= retryDuration) { "Cache maximum age $refreshDuration must be at least fetch timeout $retryDuration" } + require(maxSimultaneousCompute > 0) { "At least one context must be able to compute the result" } + } +} diff --git a/radar-commons-kotlin/src/main/kotlin/org/radarbase/kotlin/coroutines/CachedMap.kt b/radar-commons-kotlin/src/main/kotlin/org/radarbase/kotlin/coroutines/CachedMap.kt new file mode 100644 index 00000000..54f1f209 --- /dev/null +++ b/radar-commons-kotlin/src/main/kotlin/org/radarbase/kotlin/coroutines/CachedMap.kt @@ -0,0 +1,67 @@ +/* + * Copyright 2020 The Hyve + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.radarbase.kotlin.coroutines + +/** Set of data that is cached for a duration of time. */ +class CachedMap( + cacheConfig: CacheConfig = CacheConfig(), + supplier: suspend () -> Map, +) : CachedValue>(cacheConfig, supplier) { + /** Whether the cache contains [key]. If it does not contain the value and [CacheConfig.retryDuration] + * has passed since the last try, it will update the cache and try once more. */ + suspend fun contains(key: K): Boolean = test { key in it } + + /** + * Find a pair matching [predicate]. + * If it does not contain the value and [CacheConfig.retryDuration] + * has passed since the last try, it will update the cache and try once more. + * @return value if found and null otherwise + */ + suspend fun find(predicate: (K, V) -> Boolean): Pair? = query( + { map -> + map.entries + .find { (k, v) -> predicate(k, v) } + ?.toPair() + }, + { it != null }, + ).value + + /** + * Find a pair matching [predicate]. + * If it does not contain the value and [CacheConfig.retryDuration] + * has passed since the last try, it will update the cache and try once more. + * @return value if found and null otherwise + */ + suspend fun findValue(predicate: (V) -> Boolean): V? = query( + { map -> map.values.find { predicate(it) } }, + { it != null }, + ).value + + /** + * Get the value. + * If the cache is empty and [CacheConfig.retryDuration] + * has passed since the last try, it will update the cache and try once more. + */ + override suspend fun get(): Map = get { it.isNotEmpty() }.value + + /** + * Get the value. + * If the cache is empty and [CacheConfig.retryDuration] + * has passed since the last try, it will update the cache and try once more. + */ + suspend fun get(key: K): V? = query({ it[key] }, { it != null }).value +} diff --git a/radar-commons-kotlin/src/main/kotlin/org/radarbase/kotlin/coroutines/CachedSet.kt b/radar-commons-kotlin/src/main/kotlin/org/radarbase/kotlin/coroutines/CachedSet.kt new file mode 100644 index 00000000..4b79c495 --- /dev/null +++ b/radar-commons-kotlin/src/main/kotlin/org/radarbase/kotlin/coroutines/CachedSet.kt @@ -0,0 +1,46 @@ +/* + * Copyright 2020 The Hyve + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.radarbase.kotlin.coroutines + +/** + * Set of data that is cached for a duration of time. + * + * @param supplier How to update the cache. + */ +class CachedSet( + cacheConfig: CacheConfig = CacheConfig(), + supplier: suspend () -> Set, +) : CachedValue>(cacheConfig, supplier) { + /** Whether the cache contains [value]. If it does not contain the value and [CacheConfig.retryDuration] + * has passed since the last try, it will update the cache and try once more. */ + suspend fun contains(value: T): Boolean = test { value in it } + + /** + * Find a value matching [predicate]. + * If it does not contain the value and [CacheConfig.retryDuration] + * has passed since the last try, it will update the cache and try once more. + * @return value if found and null otherwise + */ + suspend fun find(predicate: (T) -> Boolean): T? = query({ it.find(predicate) }, { it != null }).value + + /** + * Get the value. + * If the cache is empty and [CacheConfig.retryDuration] + * has passed since the last try, it will update the cache and try once more. + */ + override suspend fun get(): Set = get { it.isNotEmpty() }.value +} diff --git a/radar-commons-kotlin/src/main/kotlin/org/radarbase/kotlin/coroutines/CachedValue.kt b/radar-commons-kotlin/src/main/kotlin/org/radarbase/kotlin/coroutines/CachedValue.kt new file mode 100644 index 00000000..109068aa --- /dev/null +++ b/radar-commons-kotlin/src/main/kotlin/org/radarbase/kotlin/coroutines/CachedValue.kt @@ -0,0 +1,270 @@ +package org.radarbase.kotlin.coroutines + +import kotlinx.coroutines.CancellationException +import kotlinx.coroutines.CompletableDeferred +import kotlinx.coroutines.isActive +import kotlinx.coroutines.sync.Semaphore +import java.util.concurrent.atomic.AtomicReference +import kotlin.coroutines.coroutineContext +import kotlin.time.Duration +import kotlin.time.ExperimentalTime +import kotlin.time.TimeMark +import kotlin.time.TimeSource + +internal typealias DeferredCache = CompletableDeferred> + +/** + * Caches a value with full support for coroutines. The value that will be cached is computed by + * [supplier]. + * Only one coroutine context will compute the value at a time, other coroutine contexts will wait + * for it to finish. + */ +open class CachedValue( + val config: CacheConfig, + private val supplier: suspend () -> T, +) { + private val cache = AtomicReference>>() + private val semaphore: Semaphore? = if (config.maxSimultaneousCompute > 1) { + Semaphore(config.maxSimultaneousCompute - 1) + } else { + null + } + + /** + * Query the cached value by running [transform] and return its result if valid. If + * [evaluateValid] returns false on the result, the cache computation is reevaluated if + * [CacheConfig.retryDuration] has been reached. + */ + suspend fun query( + transform: suspend (T) -> R, + evaluateValid: (R) -> Boolean = { true }, + ): CacheResult { + val deferredResult = raceForDeferred() + val deferred = deferredResult.value + + return if (deferredResult is CacheMiss) { + val result = deferred.computeAndCache() + CacheMiss(transform(result)) + } else { + val concurrentResult = deferred.concurrentComputeAndCache() + if (concurrentResult != null) { + CacheMiss(transform(concurrentResult)) + } else { + deferred.awaitCache(transform, evaluateValid) + } + } + } + + /** + * Whether the contained value is stale. + * If [duration] is provided, it is considered stale only if the value is older than [duration]. + * If no value is cache, it is not considered stale. + */ + suspend fun isStale(duration: Duration? = null): Boolean { + val result = getFromCache() + return when { + result == null -> false + duration != null -> result.isExpired(duration) + else -> result.isExpired() + } + } + + /** + * Get the current contents from cache. This will not cause a computation. + * @return contents if it is present in cache, null otherwise. + */ + suspend fun getFromCache(): CacheContents? { + val currentDeferred = cache.get() + if (currentDeferred == null || !currentDeferred.isCompleted) { + return null + } + return currentDeferred.await() + } + + /** + * Set cached value. + */ + suspend fun set(value: T) { + while (coroutineContext.isActive) { + val deferred = raceForDeferred().value + + val newValue = CacheValue(value) + deferred.complete(newValue) + if (deferred.await() == newValue) { + return + } else { + cache.compareAndSet(deferred, null) + } + } + } + + /** + * Get cached value. If the cache is expired, fetch it again. The first coroutine context + * that reaches this method will call [computeAndCache], others coroutine contexts will use the + * value computed by the first. The result is not computed more + * often than [CacheConfig.retryDuration]. If the result was an exception, the exception is + * rethrown from cache. It is recomputed if the [CacheConfig.exceptionCacheDuration] has passed. + */ + open suspend fun get(): T = query({ it }) { false }.value + + /** + * Get cached value. If the cache is expired, fetch it again. The first coroutine context + * that reaches this method will call [computeAndCache], others coroutine contexts will use the + * value computed by the first. If the value was retrieved from cache and [evaluateValid] + * returns false for that value, the result is recomputed. The result is not computed more + * often than [CacheConfig.retryDuration]. If the result was an exception, the exception is + * rethrown from cache. It is recomputed if the [CacheConfig.exceptionCacheDuration] has passed. + */ + suspend inline fun get(noinline evaluateValid: (T) -> Boolean): CacheResult = query({ it }, evaluateValid) + + /** + * Test the cached value by running [predicate] and return its result if true. If + * [predicate] returns false on the result, the cache computation is reevaluated if + * [CacheConfig.retryDuration] has been reached. + */ + suspend inline fun test(noinline predicate: (T) -> Boolean): Boolean { + return query(predicate) { it }.value + } + + private suspend fun DeferredCache.computeAndCache(): T { + val result = try { + val value = supplier() + complete(CacheValue(value)) + value + } catch (ex: Throwable) { + complete(CacheError(ex)) + throw ex + } + return result + } + + private suspend fun DeferredCache.concurrentComputeAndCache(): T? { + if (isCompleted) return null + + return semaphore?.tryWithPermitOrNull { + if (isCompleted) { + null + } else { + computeAndCache() + } + } + } + + private suspend fun DeferredCache.awaitCache( + transform: suspend (T) -> R, + evaluateValid: (R) -> Boolean, + ): CacheResult { + val result = await().map(transform) + return if (result.isExpired(evaluateValid)) { + // Either no new coroutine context had updated the cache value, then update it to + // null. Otherwise, another suspend context is active and get() will await the + // result from that context + cache.compareAndSet(this, null) + query(transform) { false } + } else { + val value = result.getOrThrow() + CacheHit(value) + } + } + + /** + * Race for the first suspend context to create a CompletableDeferred object. All other contexts + * will use that context to read their values. + * + * @return a pair of a CompletableDeferred value and a boolean, if true this context is the + * winner, if false this should use the deferred to read its value. + */ + private fun raceForDeferred(): CacheResult> { + var result: CacheResult> + + do { + val previousDeferred = cache.get() + result = if (previousDeferred == null) { + CacheMiss(CompletableDeferred()) + } else { + CacheHit(previousDeferred) + } + } while (!cache.compareAndSet(previousDeferred, result.value)) + + return result + } + + inline fun CacheContents.isExpired( + evaluateValid: (R) -> Boolean = { true }, + ): Boolean = if (this is CacheError) { + isExpired(config.exceptionCacheDuration) + } else { + this as CacheValue + isExpired(config.refreshDuration) || + (!evaluateValid(value) && isExpired(config.retryDuration)) + } + + /** + * Remove value from cache. Note that this does not cancel existing computations for the + * value, but the computed value will then not be stored. + */ + fun clear() { + cache.set(null) + } + + sealed class CacheContents + @ExperimentalTime + constructor(time: TimeMark?) { + + @OptIn(ExperimentalTime::class) + constructor() : this(null) + + @ExperimentalTime + protected val time: TimeMark = time ?: TimeSource.Monotonic.markNow() + + @OptIn(ExperimentalTime::class) + open fun isExpired(age: Duration): Boolean = (time + age).hasPassedNow() + + abstract fun getOrThrow(): T + + abstract suspend fun map(transform: suspend (T) -> R): CacheContents + } + + class CacheError + internal constructor( + val exception: Throwable, + ) : CacheContents() { + override fun isExpired(age: Duration): Boolean = exception is CancellationException || super.isExpired(age) + override fun getOrThrow(): T = throw exception + + @Suppress("UNCHECKED_CAST") + override suspend fun map(transform: suspend (T) -> R): CacheContents = this as CacheError + } + + @OptIn(ExperimentalTime::class) + class CacheValue + @ExperimentalTime + internal constructor( + val value: T, + time: TimeMark?, + ) : CacheContents(time) { + + @OptIn(ExperimentalTime::class) + constructor(value: T) : this(value, null) + + override fun getOrThrow(): T = value + + @OptIn(ExperimentalTime::class) + override suspend fun map(transform: suspend (T) -> R): CacheContents = try { + CacheValue(transform(value), time = time) + } catch (ex: Throwable) { + CacheError(ex) + } + } + + /** Result from cache of type [T]. */ + sealed interface CacheResult { + val value: T + } + + /** Cache hit, meaning the value was computed by another coroutine. */ + data class CacheHit(override val value: T) : CacheResult + + /** Cache miss, meaning the value was computed by the current coroutine. */ + data class CacheMiss(override val value: T) : CacheResult +} diff --git a/radar-commons-kotlin/src/main/kotlin/org/radarbase/kotlin/coroutines/Extensions.kt b/radar-commons-kotlin/src/main/kotlin/org/radarbase/kotlin/coroutines/Extensions.kt new file mode 100644 index 00000000..33d6fc7e --- /dev/null +++ b/radar-commons-kotlin/src/main/kotlin/org/radarbase/kotlin/coroutines/Extensions.kt @@ -0,0 +1,136 @@ +@file:Suppress("unused") + +package org.radarbase.kotlin.coroutines + +import kotlinx.coroutines.* +import kotlinx.coroutines.channels.Channel +import kotlinx.coroutines.channels.consume +import kotlinx.coroutines.sync.Semaphore +import java.util.concurrent.Future +import java.util.concurrent.TimeUnit +import kotlin.coroutines.CoroutineContext +import kotlin.coroutines.EmptyCoroutineContext +import kotlin.time.Duration + +/** + * Try to acquire a semaphore permit, and run [block] if successful. + * If this cannot be achieved without blocking, return null. + * @return result of [block] or null if no permit could be acquired. + */ +suspend fun Semaphore.tryWithPermitOrNull(block: suspend () -> T): T? { + if (!tryAcquire()) return null + return try { + block() + } finally { + release() + } +} + +/** + * Get a future value via coroutine suspension. + * The future is evaluated in context [Dispatchers.IO]. + */ +suspend fun Future.suspendGet( + duration: Duration? = null, +): T = coroutineScope { + val channel = Channel() + launch { + try { + channel.receive() + } catch (ex: CancellationException) { + cancel(true) + } + } + try { + withContext(Dispatchers.IO) { + if (duration != null) { + get(duration.inWholeMilliseconds, TimeUnit.MILLISECONDS) + } else { + get() + } + } + } catch (ex: InterruptedException) { + throw CancellationException("Future was interrupted", ex) + } finally { + channel.send(Unit) + } +} + +/** + * Transform each value in the iterable in a separate coroutine and await termination. + */ +suspend inline fun Iterable.forkJoin( + coroutineContext: CoroutineContext = Dispatchers.Default, + crossinline transform: suspend CoroutineScope.(T) -> R, +): List = coroutineScope { + map { t -> async(coroutineContext) { transform(t) } } + .awaitAll() +} + +/** + * Consume the first value produced by the producer on its provided channel. Once a value is sent + * by the producer, its coroutine is cancelled. + * @throws kotlinx.coroutines.channels.ClosedReceiveChannelException if the producer does not + * produce any values. + */ +suspend inline fun consumeFirst( + coroutineContext: CoroutineContext = Dispatchers.Default, + crossinline producer: suspend CoroutineScope.(emit: suspend (T) -> Unit) -> Unit, +): T = coroutineScope { + val channel = Channel() + + val producerJob = launch(coroutineContext) { + try { + producer(channel::send) + } finally { + channel.close() + } + } + + val result = channel.consume { receive() } + producerJob.cancel() + result +} + +/** + * Transforms each value with [transform] and returns the first value where [predicate] returns + * true. Each value is transformed and evaluated in its own async context. If no transformed value + * satisfies predicate, null is returned. + */ +suspend fun Iterable.forkFirstOfOrNull( + coroutineContext: CoroutineContext = EmptyCoroutineContext, + transform: suspend CoroutineScope.(T) -> R, + predicate: suspend CoroutineScope.(R) -> Boolean, +): R? = consumeFirst(coroutineContext) { emit -> + forkJoin(coroutineContext) { t -> + val result = transform(t) + if (predicate(result)) { + emit(result) + } + } + emit(null) +} + +suspend fun Iterable.forkFirstOfNotNullOrNull( + coroutineContext: CoroutineContext = EmptyCoroutineContext, + transform: suspend CoroutineScope.(T) -> R?, +): R? = forkFirstOfOrNull(coroutineContext, transform) { it != null } + +/** + * Returns true as soon as [predicate] returns true on a value, or false if [predicate] does + * not return true on any of the values. All values are evaluated in a separate async context using + * [forkJoin]. + */ +suspend fun Iterable.forkAny( + coroutineContext: CoroutineContext = EmptyCoroutineContext, + predicate: suspend CoroutineScope.(T) -> Boolean, +): Boolean = forkFirstOfOrNull(coroutineContext, predicate) { it } ?: false + +operator fun Set.plus(elements: Set): Set = when { + isEmpty() -> elements + elements.isEmpty() -> this + else -> buildSet(size + elements.size) { + addAll(this) + addAll(elements) + } +} diff --git a/radar-commons-kotlin/src/main/kotlin/org/radarbase/ktor/auth/AuthTokenHolder.kt b/radar-commons-kotlin/src/main/kotlin/org/radarbase/ktor/auth/AuthTokenHolder.kt new file mode 100644 index 00000000..ac1abfa7 --- /dev/null +++ b/radar-commons-kotlin/src/main/kotlin/org/radarbase/ktor/auth/AuthTokenHolder.kt @@ -0,0 +1,51 @@ +package org.radarbase.ktor.auth + +import kotlinx.coroutines.CompletableDeferred +import java.util.concurrent.atomic.AtomicReference + +internal class AuthTokenHolder( + private val loadTokens: suspend () -> T?, +) { + private val refreshTokensDeferred = AtomicReference?>(null) + private val loadTokensDeferred = AtomicReference?>(null) + + internal fun clearToken() { + loadTokensDeferred.set(null) + refreshTokensDeferred.set(null) + } + + internal suspend fun loadToken(): T? { + var deferred: CompletableDeferred? + do { + deferred = loadTokensDeferred.get() + val newValue = deferred ?: CompletableDeferred() + } while (!loadTokensDeferred.compareAndSet(deferred, newValue)) + + return if (deferred != null) { + deferred.await() + } else { + val newTokens = loadTokens() + loadTokensDeferred.get()!!.complete(newTokens) + newTokens + } + } + + internal suspend fun setToken(block: suspend () -> T?): T? { + var deferred: CompletableDeferred? + do { + deferred = refreshTokensDeferred.get() + val newValue = deferred ?: CompletableDeferred() + } while (!refreshTokensDeferred.compareAndSet(deferred, newValue)) + + val newToken = if (deferred == null) { + val newTokens = block() + refreshTokensDeferred.get()!!.complete(newTokens) + refreshTokensDeferred.set(null) + newTokens + } else { + deferred.await() + } + loadTokensDeferred.set(CompletableDeferred(newToken)) + return newToken + } +} diff --git a/radar-commons-kotlin/src/main/kotlin/org/radarbase/ktor/auth/ClientCredentialsConfig.kt b/radar-commons-kotlin/src/main/kotlin/org/radarbase/ktor/auth/ClientCredentialsConfig.kt new file mode 100644 index 00000000..3584c895 --- /dev/null +++ b/radar-commons-kotlin/src/main/kotlin/org/radarbase/ktor/auth/ClientCredentialsConfig.kt @@ -0,0 +1,24 @@ +package org.radarbase.ktor.auth + +data class ClientCredentialsConfig( + val tokenUrl: String, + val clientId: String? = null, + val clientSecret: String? = null, +) { + /** + * Fill in the client ID and client secret from environment variables. The variables are + * `<prefix>_CLIENT_ID` and `<prefix>_CLIENT_SECRET`. + */ + fun copyWithEnv(prefix: String = "MANAGEMENT_PORTAL"): ClientCredentialsConfig { + var result = this + val envClientId = System.getenv("${prefix}_CLIENT_ID") + if (envClientId != null) { + result = result.copy(clientId = envClientId) + } + val envClientSecret = System.getenv("${prefix}_CLIENT_SECRET") + if (envClientSecret != null) { + result = result.copy(clientSecret = envClientSecret) + } + return result + } +} diff --git a/radar-commons-kotlin/src/main/kotlin/org/radarbase/ktor/auth/Extensions.kt b/radar-commons-kotlin/src/main/kotlin/org/radarbase/ktor/auth/Extensions.kt new file mode 100644 index 00000000..c48bab65 --- /dev/null +++ b/radar-commons-kotlin/src/main/kotlin/org/radarbase/ktor/auth/Extensions.kt @@ -0,0 +1,8 @@ +package org.radarbase.ktor.auth + +import io.ktor.client.request.* +import io.ktor.http.* + +fun HttpRequestBuilder.bearer(token: String) { + headers[HttpHeaders.Authorization] = "Bearer $token" +} diff --git a/radar-commons-kotlin/src/main/kotlin/org/radarbase/ktor/auth/OAuth2AccessToken.kt b/radar-commons-kotlin/src/main/kotlin/org/radarbase/ktor/auth/OAuth2AccessToken.kt new file mode 100644 index 00000000..c8137818 --- /dev/null +++ b/radar-commons-kotlin/src/main/kotlin/org/radarbase/ktor/auth/OAuth2AccessToken.kt @@ -0,0 +1,23 @@ +/* + * Copyright (c) 2021. The Hyve + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * + * See the file LICENSE in the root of this repository. + */ + +package org.radarbase.ktor.auth + +import kotlinx.serialization.SerialName +import kotlinx.serialization.Serializable + +@Serializable +data class OAuth2AccessToken( + @SerialName("access_token") val accessToken: String? = null, + @SerialName("refresh_token") val refreshToken: String? = null, + @SerialName("expires_in") val expiresIn: Long = 0, + @SerialName("token_type") val tokenType: String? = null, + @SerialName("user_id") val externalUserId: String? = null, + @SerialName("scope") val scope: String? = null, +) diff --git a/radar-commons-kotlin/src/main/kotlin/org/radarbase/ktor/auth/OAuthClientProvider.kt b/radar-commons-kotlin/src/main/kotlin/org/radarbase/ktor/auth/OAuthClientProvider.kt new file mode 100644 index 00000000..26df4678 --- /dev/null +++ b/radar-commons-kotlin/src/main/kotlin/org/radarbase/ktor/auth/OAuthClientProvider.kt @@ -0,0 +1,177 @@ +package org.radarbase.ktor.auth + +import io.ktor.client.* +import io.ktor.client.call.* +import io.ktor.client.plugins.auth.* +import io.ktor.client.plugins.auth.providers.* +import io.ktor.client.plugins.contentnegotiation.* +import io.ktor.client.request.* +import io.ktor.client.request.forms.* +import io.ktor.client.statement.* +import io.ktor.http.* +import io.ktor.http.auth.* +import io.ktor.serialization.kotlinx.json.* +import io.ktor.util.* +import kotlinx.coroutines.flow.Flow +import kotlinx.coroutines.flow.MutableStateFlow +import org.slf4j.LoggerFactory + +private val logger = LoggerFactory.getLogger(Auth::class.java) + +/** + * Installs the client's [BearerAuthProvider]. + */ +fun Auth.clientCredentials(block: ClientCredentialsAuthConfig.() -> Unit) { + with(ClientCredentialsAuthConfig().apply(block)) { + this@clientCredentials.providers.add(ClientCredentialsAuthProvider(_requestToken, _loadTokens, _sendWithoutRequest, realm)) + } +} + +fun Auth.clientCredentials( + authConfig: ClientCredentialsConfig, + targetHost: String? = null, +): Flow { + requireNotNull(authConfig.clientId) { "Missing client ID" } + requireNotNull(authConfig.clientSecret) { "Missing client secret" } + val flow = MutableStateFlow(null) + + clientCredentials { + if (targetHost != null) { + sendWithoutRequest { request -> + request.url.host == targetHost + } + } + requestToken { + val response = client.submitForm( + url = authConfig.tokenUrl, + formParameters = Parameters.build { + append("grant_type", "client_credentials") + append("client_id", authConfig.clientId) + append("client_secret", authConfig.clientSecret) + }, + ) { + accept(ContentType.Application.Json) + markAsRequestTokenRequest() + } + val refreshTokenInfo: OAuth2AccessToken? = if (!response.status.isSuccess()) { + logger.error("Failed to fetch new token: {}", response.bodyAsText()) + null + } else { + response.body() + } + flow.value = refreshTokenInfo + refreshTokenInfo + } + } + + return flow +} + +/** + * Parameters to be passed to [BearerAuthConfig.refreshTokens] lambda. + */ +class RequestTokenParams( + val client: HttpClient, +) { + /** + * Marks that this request is for requesting auth tokens, resulting in a special handling of it. + */ + fun HttpRequestBuilder.markAsRequestTokenRequest() { + attributes.put(Auth.AuthCircuitBreaker, Unit) + } +} + +/** + * A configuration for [BearerAuthProvider]. + */ +@KtorDsl +class ClientCredentialsAuthConfig { + internal var _requestToken: suspend RequestTokenParams.() -> OAuth2AccessToken? = { null } + internal var _loadTokens: suspend () -> OAuth2AccessToken? = { null } + internal var _sendWithoutRequest: (HttpRequestBuilder) -> Boolean = { true } + + var realm: String? = null + + /** + * Configures a callback that refreshes a token when the 401 status code is received. + */ + fun requestToken(block: suspend RequestTokenParams.() -> OAuth2AccessToken?) { + _requestToken = block + } + + /** + * Configures a callback that loads a cached token from a local storage. + * Note: Using the same client instance here to make a request will result in a deadlock. + */ + fun loadTokens(block: suspend () -> OAuth2AccessToken?) { + _loadTokens = block + } + + /** + * Sends credentials without waiting for [HttpStatusCode.Unauthorized]. + */ + fun sendWithoutRequest(block: (HttpRequestBuilder) -> Boolean) { + _sendWithoutRequest = block + } +} + +/** + * An authentication provider for the Bearer HTTP authentication scheme. + * Bearer authentication involves security tokens called bearer tokens. + * As an example, these tokens can be used as a part of OAuth flow to authorize users of your application + * by using external providers, such as Google, Facebook, Twitter, and so on. + * + * You can learn more from [Bearer authentication](https://ktor.io/docs/bearer-client.html). + */ +class ClientCredentialsAuthProvider( + private val requestToken: suspend RequestTokenParams.() -> OAuth2AccessToken?, + loadTokens: suspend () -> OAuth2AccessToken?, + private val sendWithoutRequestCallback: (HttpRequestBuilder) -> Boolean = { true }, + private val realm: String?, +) : AuthProvider { + + @Suppress("OverridingDeprecatedMember") + @Deprecated("Please use sendWithoutRequest function instead", replaceWith = ReplaceWith("sendWithoutRequest(request)")) + override val sendWithoutRequest: Boolean + get() = error("Deprecated") + + private val tokensHolder = AuthTokenHolder(loadTokens) + + override fun sendWithoutRequest(request: HttpRequestBuilder): Boolean = sendWithoutRequestCallback(request) + + /** + * Checks if current provider is applicable to the request. + */ + override fun isApplicable(auth: HttpAuthHeader): Boolean { + if (auth.authScheme != AuthScheme.Bearer) return false + if (realm == null) return true + if (auth !is HttpAuthHeader.Parameterized) return false + + return auth.parameter("realm") == realm + } + + /** + * Adds an authentication method headers and credentials. + */ + override suspend fun addRequestHeaders(request: HttpRequestBuilder, authHeader: HttpAuthHeader?) { + val token = tokensHolder.loadToken() ?: return + + request.headers { + if (contains(HttpHeaders.Authorization)) { + remove(HttpHeaders.Authorization) + } + append(HttpHeaders.Authorization, "Bearer ${token.accessToken}") + } + } + + override suspend fun refreshToken(response: HttpResponse): Boolean { + val newToken = tokensHolder.setToken { + requestToken(RequestTokenParams(response.call.client)) + } + return newToken != null + } + + fun clearToken() { + tokensHolder.clearToken() + } +} diff --git a/radar-commons-kotlin/src/test/kotlin/org/radarbase/kotlin/coroutines/CachedValueTest.kt b/radar-commons-kotlin/src/test/kotlin/org/radarbase/kotlin/coroutines/CachedValueTest.kt new file mode 100644 index 00000000..bbb90c14 --- /dev/null +++ b/radar-commons-kotlin/src/test/kotlin/org/radarbase/kotlin/coroutines/CachedValueTest.kt @@ -0,0 +1,172 @@ +package org.radarbase.kotlin.coroutines + +import kotlinx.coroutines.DelicateCoroutinesApi +import kotlinx.coroutines.GlobalScope +import kotlinx.coroutines.delay +import kotlinx.coroutines.runBlocking +import org.hamcrest.MatcherAssert.assertThat +import org.hamcrest.Matchers.* +import org.junit.jupiter.api.BeforeEach +import org.junit.jupiter.api.Test +import org.junit.jupiter.api.assertThrows +import java.util.concurrent.atomic.AtomicInteger +import kotlin.time.Duration.Companion.milliseconds + +@OptIn(DelicateCoroutinesApi::class) +internal class CachedValueTest { + private lateinit var config: CacheConfig + + private val calls: AtomicInteger = AtomicInteger(0) + + @BeforeEach + fun setUp() { + calls.set(0) + config = CacheConfig( + refreshDuration = 40.milliseconds, + retryDuration = 20.milliseconds, + exceptionCacheDuration = 20.milliseconds, + ) + } + + @Test + fun get() { + val cache = CachedValue(config) { calls.incrementAndGet() } + runBlocking(GlobalScope.coroutineContext) { + assertThat("Initial value should refresh", cache.get(), `is`(1)) + assertThat("No refresh within threshold", cache.get(), `is`(1)) + delay(20.milliseconds) + assertThat("Refresh after threshold", cache.get(), `is`(2)) + assertThat("No refresh after threshold", cache.get(), `is`(2)) + } + } + + @Test + fun getInvalid() { + val cache = CachedValue(config) { calls.incrementAndGet() } + runBlocking { + assertThat("Initial value should refresh", cache.get { it < 0 }, equalTo(CachedValue.CacheMiss(1))) + assertThat("No refresh within threshold", cache.get { it < 0 }, equalTo(CachedValue.CacheHit(1))) + delay(20.milliseconds) + assertThat("Refresh after threshold", cache.get { it < 0 }, equalTo(CachedValue.CacheMiss(2))) + assertThat("No refresh after threshold", cache.get { it < 0 }, equalTo(CachedValue.CacheHit(2))) + } + } + + @Test + fun getValid() { + val cache = CachedValue(config) { calls.incrementAndGet() } + runBlocking { + assertThat("Initial value should refresh", cache.get { it >= 0 }, equalTo(CachedValue.CacheMiss(1))) + assertThat("No refresh within threshold", cache.get { it >= 0 }, equalTo(CachedValue.CacheHit(1))) + delay(20.milliseconds) + assertThat("No refresh after valid value", cache.get { it >= 0 }, equalTo(CachedValue.CacheHit(1))) + } + } + + @Test + fun refresh() { + val cache = CachedValue(config) { calls.incrementAndGet() } + + runBlocking { + assertThat("Initial get calls supplier", cache.get(), `is`(1)) + assertThat("Next get uses cache", cache.get(), `is`(1)) + cache.clear() + assertThat("Next get uses cache", cache.get(), `is`(2)) + } + } + + @Test + fun query() { + val cache = CachedValue(config) { calls.incrementAndGet() } + + runBlocking { + assertThat("Initial value should refresh", cache.query({ it + 1 }, { it > 2 }), equalTo(CachedValue.CacheMiss(2))) + assertThat("No refresh within threshold", cache.query({ it + 1 }, { it > 2 }), equalTo(CachedValue.CacheHit(2))) + delay(20.milliseconds) + assertThat( + "Retry because predicate does not match", + cache.query({ it + 1 }, { it > 2 }), + equalTo(CachedValue.CacheMiss(3)), + ) + assertThat("No refresh within threshold", cache.query({ it + 1 }, { it > 2 }), equalTo(CachedValue.CacheHit(3))) + delay(20.milliseconds) + assertThat( + "No retry because predicate matches", + cache.query({ it + 1 }, { it > 2 }), + equalTo(CachedValue.CacheHit(3)), + ) + delay(20.milliseconds) + assertThat( + "Refresh after refresh threshold since last retry", + cache.query({ it + 1 }, { it > 2 }), + equalTo(CachedValue.CacheMiss(4)), + ) + } + } + + @Test + fun getMultithreaded() { + val cache = CachedValue(config) { + calls.incrementAndGet() + delay(100.milliseconds) + calls.get() + } + + runBlocking { + (0..5) + .forkJoin { + cache.get() + } + .forEach { + assertThat("Get the same value in all contexts", it, `is`(1)) + } + } + + assertThat("No more calls are made", calls.get(), `is`(1)) + } + + @Test + fun getMulti2threaded() { + val cache = CachedValue( + config.copy( + maxSimultaneousCompute = 2, + ), + ) { + calls.incrementAndGet() + delay(100.milliseconds) + calls.get() + } + + runBlocking { + val values = (0..5) + .forkJoin { + cache.get() + } + + assertThat(values[0], lessThan(3)) + values.forEach { + assertThat("Get the same value in all contexts", it, `is`(values[0])) + } + } + + assertThat("Two threads should be computing the value", calls.get(), `is`(2)) + } + + @Test + fun throwTest() { + val cache = CachedValue(config.copy(refreshDuration = 40.milliseconds)) { + val newValue = calls.incrementAndGet() + if (newValue % 2 == 0) throw IllegalStateException() else newValue + } + + runBlocking { + assertThat(cache.get(), `is`(1)) + assertThat(cache.get(), `is`(1)) + delay(42.milliseconds) + assertThrows { cache.get() } + assertThrows { cache.get() } + delay(22.milliseconds) + assertThat(cache.get(), `is`(3)) + } + } +} diff --git a/radar-commons-kotlin/src/test/kotlin/org/radarbase/kotlin/coroutines/ExtensionsKtTest.kt b/radar-commons-kotlin/src/test/kotlin/org/radarbase/kotlin/coroutines/ExtensionsKtTest.kt new file mode 100644 index 00000000..04eb05d9 --- /dev/null +++ b/radar-commons-kotlin/src/test/kotlin/org/radarbase/kotlin/coroutines/ExtensionsKtTest.kt @@ -0,0 +1,87 @@ +package org.radarbase.kotlin.coroutines + +import kotlinx.coroutines.* +import org.hamcrest.MatcherAssert.assertThat +import org.hamcrest.Matchers.greaterThan +import org.hamcrest.Matchers.lessThan +import org.junit.jupiter.api.Assertions.* +import org.junit.jupiter.api.BeforeAll +import org.junit.jupiter.api.Test +import org.junit.jupiter.api.fail +import kotlin.time.Duration +import kotlin.time.Duration.Companion.milliseconds +import kotlin.time.ExperimentalTime +import kotlin.time.measureTime + +@OptIn(ExperimentalTime::class) +class ExtensionsKtTest { + companion object { + @BeforeAll + @JvmStatic + fun setUpClass() { + runBlocking { + println("warmed up coroutines") + } + } + } + + @Test + fun testConsumeFirst() = runBlocking { + val inBlockingTime = measureTime { + val first = consumeFirst { emit -> + listOf( + async(Dispatchers.Default) { + delay(200.milliseconds) + emit("a") + fail("Should be cancelled") + }, + async(Dispatchers.Default) { + delay(50.milliseconds) + emit("b") + }, + ).awaitAll() + } + assertEquals("b", first) + } + assertThat(inBlockingTime, greaterThan(50.milliseconds)) + assertThat(inBlockingTime, lessThan(200.milliseconds)) + } + + @Test + fun testForkJoin() = runBlocking { + val inBlockingTime = measureTime { + val result = listOf(100.milliseconds, 50.milliseconds) + .forkJoin { + delay(it) + it + } + assertEquals(listOf(100.milliseconds, 50.milliseconds), result) + } + assertThat(inBlockingTime, greaterThan(100.milliseconds)) + } + + @Test + fun testForkJoinFirst() = runBlocking { + val inBlockingTime = measureTime { + val result: Duration? = consumeFirst { emit -> + listOf(200.milliseconds, 50.milliseconds) + .forkJoin { + delay(it) + emit(it) + } + emit(null) + } + assertEquals(50.milliseconds, result) + } + assertThat(inBlockingTime, lessThan(200.milliseconds)) + assertThat(inBlockingTime, greaterThan(50.milliseconds)) + } + + @Test + fun testConcurrentAny() { + runBlocking { + assertTrue(listOf(1, 2, 3, 4).forkAny { it > 3 }) + assertFalse(listOf(1, 2, 3, 4).forkAny { it < 1 }) + } + } +} diff --git a/radar-commons-server/build.gradle b/radar-commons-server/build.gradle deleted file mode 100644 index f1f85ade..00000000 --- a/radar-commons-server/build.gradle +++ /dev/null @@ -1,51 +0,0 @@ -/* - * Copyright 2017 The Hyve and King's College London - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -apply plugin: 'com.github.davidmc24.gradle.plugin.avro' - -description = 'RADAR Common server library utilities.' - -dependencies { - api project(':radar-commons') - - // For POJO classes and ConfigLoader - implementation(platform("com.fasterxml.jackson:jackson-bom:$jacksonVersion")) - implementation group: 'com.fasterxml.jackson.core' , name: 'jackson-databind' - implementation group: 'com.fasterxml.jackson.dataformat', name: 'jackson-dataformat-yaml' - - api group: 'org.apache.avro', name: 'avro', version: avroVersion - - implementation group: 'org.apache.kafka', name: 'kafka-clients', version: kafkaVersion - - testImplementation group: 'org.mockito', name: 'mockito-core', version: mockitoVersion - // Direct producer uses KafkaAvroSerializer if initialized - testImplementation group: 'io.confluent', name: 'kafka-avro-serializer', version: confluentVersion - testImplementation group: 'org.radarbase', name: 'radar-schemas-commons', version: radarSchemasVersion - // Direct producer uses KafkaAvroSerializer if initialized - testImplementation group: 'junit', name: 'junit', version: junitVersion - testRuntimeOnly group: 'org.slf4j', name: 'slf4j-simple', version: slf4jVersion -} - -apply from: '../gradle/publishing.gradle' -apply from: '../gradle/codestyle.gradle' - -tasks.withType(Checkstyle) { - exclude '**/org/radarbase/stream/collector/*State.*' -} - -tasks.withType(Pmd) { - exclude '**/org/radarbase/stream/collector/*State.*' -} diff --git a/radar-commons-server/build.gradle.kts b/radar-commons-server/build.gradle.kts new file mode 100644 index 00000000..f3522cda --- /dev/null +++ b/radar-commons-server/build.gradle.kts @@ -0,0 +1,55 @@ +/* + * Copyright 2017 The Hyve and King's College London + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +import org.jetbrains.dokka.gradle.DokkaTask +import org.jetbrains.kotlin.gradle.tasks.KotlinCompile + +plugins { + id("com.github.davidmc24.gradle.plugin.avro") +} + +description = "RADAR Common server library utilities." + +dependencies { + api(project(":radar-commons")) + + // For POJO classes and ConfigLoader + implementation(platform("com.fasterxml.jackson:jackson-bom:${Versions.jackson}")) + implementation("com.fasterxml.jackson.dataformat:jackson-dataformat-yaml") + implementation("com.fasterxml.jackson.core:jackson-databind") + + api("org.apache.avro:avro:${Versions.avro}") + + implementation("org.apache.kafka:kafka-clients:${Versions.kafka}") + + testImplementation("org.mockito:mockito-core:${Versions.mockito}") + // Direct producer uses KafkaAvroSerializer if initialized + testImplementation("io.confluent:kafka-avro-serializer:${Versions.confluent}") + testImplementation("org.radarbase:radar-schemas-commons:${Versions.radarSchemas}") +} + +val generateAvroJava by tasks + +tasks.withType { + dependsOn(generateAvroJava) +} + +tasks.withType { + dependsOn(generateAvroJava) +} + +tasks.withType { + dependsOn(generateAvroJava) +} diff --git a/radar-commons-server/src/main/java/org/radarbase/config/AvroTopicConfig.java b/radar-commons-server/src/main/java/org/radarbase/config/AvroTopicConfig.java index 6e916f6d..1a14e1b4 100644 --- a/radar-commons-server/src/main/java/org/radarbase/config/AvroTopicConfig.java +++ b/radar-commons-server/src/main/java/org/radarbase/config/AvroTopicConfig.java @@ -40,7 +40,7 @@ public class AvroTopicConfig { */ public AvroTopic parseAvroTopic() { try { - return AvroTopic.parse(topic, keySchema, valueSchema); + return AvroTopic.Companion.parse(topic, keySchema, valueSchema); } catch (IllegalArgumentException ex) { throw new IllegalStateException("Topic " + topic + " schema cannot be instantiated", ex); diff --git a/radar-commons-server/src/main/java/org/radarbase/producer/direct/DirectSender.java b/radar-commons-server/src/main/java/org/radarbase/producer/direct/DirectSender.java deleted file mode 100644 index 4617c574..00000000 --- a/radar-commons-server/src/main/java/org/radarbase/producer/direct/DirectSender.java +++ /dev/null @@ -1,95 +0,0 @@ -/* - * Copyright 2017 The Hyve and King's College London - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.radarbase.producer.direct; - -import java.util.Properties; -import org.apache.kafka.clients.producer.KafkaProducer; -import org.apache.kafka.clients.producer.ProducerRecord; -import org.radarbase.data.RecordData; -import org.radarbase.producer.KafkaSender; -import org.radarbase.producer.KafkaTopicSender; -import org.radarbase.topic.AvroTopic; - -/** - * Directly sends a message to Kafka using a KafkaProducer. - */ -public class DirectSender implements KafkaSender { - private final KafkaProducer producer; - - public DirectSender(Properties properties) { - producer = new KafkaProducer(properties); - } - - @Override - public KafkaTopicSender sender(final AvroTopic topic) { - return new DirectTopicSender<>(topic); - } - - @Override - public boolean resetConnection() { - return true; - } - - @Override - public boolean isConnected() { - return true; - } - - @Override - public void close() { - producer.flush(); - producer.close(); - } - - @SuppressWarnings("unchecked") - private class DirectTopicSender implements KafkaTopicSender { - private final String name; - - private DirectTopicSender(AvroTopic topic) { - name = topic.getName(); - } - - @Override - public void send(K key, V value) { - producer.send(new ProducerRecord<>(name, key, value)); - producer.flush(); - } - - @Override - public void send(RecordData records) { - for (V record : records) { - producer.send(new ProducerRecord<>(name, records.getKey(), record)); - } - producer.flush(); - } - - @Override - public void clear() { - // noop - } - - @Override - public void flush() { - // noop - } - - @Override - public void close() { - // noop - } - } -} diff --git a/radar-commons-server/src/main/java/org/radarbase/stream/collector/NumericAggregateCollector.java b/radar-commons-server/src/main/java/org/radarbase/stream/collector/NumericAggregateCollector.java index 4077c9b8..70fff99f 100644 --- a/radar-commons-server/src/main/java/org/radarbase/stream/collector/NumericAggregateCollector.java +++ b/radar-commons-server/src/main/java/org/radarbase/stream/collector/NumericAggregateCollector.java @@ -16,8 +16,6 @@ package org.radarbase.stream.collector; -import static org.radarbase.util.Serialization.floatToDouble; - import java.math.BigDecimal; import java.math.BigInteger; import java.nio.ByteBuffer; @@ -124,7 +122,7 @@ public NumericAggregateCollector add(IndexedRecord record) { /** Add a single sample. */ public NumericAggregateCollector add(float value) { - return this.add(floatToDouble(value)); + return this.add(Double.parseDouble(String.valueOf(value))); } /** diff --git a/radar-commons-server/src/main/java/org/radarbase/util/RollingTimeAverage.java b/radar-commons-server/src/main/java/org/radarbase/util/RollingTimeAverage.java deleted file mode 100644 index a4515c96..00000000 --- a/radar-commons-server/src/main/java/org/radarbase/util/RollingTimeAverage.java +++ /dev/null @@ -1,110 +0,0 @@ -/* - * Copyright 2017 The Hyve and King's College London - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.radarbase.util; - -import java.util.Deque; -import java.util.LinkedList; - -/** - * Get the average of a set of values collected in a sliding time window of fixed duration. At least - * one value is needed to get an average. - */ -public class RollingTimeAverage { - private final long window; - private TimeCount firstTime; - private double total; - private final Deque deque; - - /** - * A rolling time average with a sliding time window of fixed duration. - * @param timeWindowMillis duration of the time window. - */ - public RollingTimeAverage(long timeWindowMillis) { - this.window = timeWindowMillis; - this.total = 0d; - this.firstTime = null; - this.deque = new LinkedList<>(); - } - - /** Whether values have already been added. */ - public boolean hasAverage() { - return firstTime != null; - } - - /** Add a new value. */ - public void add(double x) { - if (firstTime == null) { - firstTime = new TimeCount(x); - } else { - deque.addLast(new TimeCount(x)); - } - total += x; - } - - /** Add a value of one. */ - public void increment() { - add(1d); - } - - /** - * Get the average value per second over a sliding time window of fixed size. - * - *

It takes one value before the window started as a baseline, and adds all values in the - * window. It then divides by the total time window from the first value (outside/before the - * window) to the last value (at the end of the window). - * @return average value per second - */ - public double getAverage() { - if (!hasAverage()) { - throw new IllegalStateException("Cannot get average without values"); - } - - long now = System.currentTimeMillis(); - long currentWindowStart = now - window; - while (!this.deque.isEmpty() && this.deque.getFirst().time < currentWindowStart) { - total -= this.firstTime.value; - this.firstTime = this.deque.removeFirst(); - } - if (this.deque.isEmpty() || this.firstTime.time >= currentWindowStart) { - return 1000d * total / (now - this.firstTime.time); - } else { - long time = this.deque.getLast().time - currentWindowStart; - double removedRate = (currentWindowStart - this.firstTime.time) - / (this.deque.getFirst().time - firstTime.time); - double removedValue = this.firstTime.value + this.deque.getFirst().value * removedRate; - double value = (total - removedValue) / time; - return 1000d * value; - } - } - - /** - * Rounded {@link #getAverage()}. - */ - public int getCount() { - return (int)Math.round(getAverage()); - } - - private static class TimeCount { - private final double value; - private final long time; - - TimeCount(double value) { - this.value = value; - this.time = System.currentTimeMillis(); - } - } -} diff --git a/radar-commons-server/src/main/java/org/radarbase/util/RollingTimeAverage.kt b/radar-commons-server/src/main/java/org/radarbase/util/RollingTimeAverage.kt new file mode 100644 index 00000000..8dd5491a --- /dev/null +++ b/radar-commons-server/src/main/java/org/radarbase/util/RollingTimeAverage.kt @@ -0,0 +1,92 @@ +/* + * Copyright 2017 The Hyve and King's College London + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.radarbase.util + +import java.time.Duration +import java.time.Instant +import java.util.* +import kotlin.math.roundToInt + +/** + * Get the average of a set of values collected in a sliding time window of fixed duration. At least + * one value is needed to get an average. + * @param window duration of the time window. + */ +class RollingTimeAverage( + private val window: Duration, +) { + private var firstTime: TimeCount? = null + private val deque: Deque = LinkedList() + + /** Whether values have already been added. */ + val hasAverage: Boolean + get() = firstTime != null + + /** Add a new value. */ + fun add(x: Double) { + if (firstTime == null) { + firstTime = TimeCount(x) + } else { + deque.addLast(TimeCount(x)) + } + } + + /** Add a value of one. */ + fun increment() { + add(1.0) + } + + /** + * Get the average value per second over a sliding time window of fixed size. + * + * It takes one value before the window started as a baseline, and adds all values in the + * window. It then divides by the total time window from the first value (outside/before the + * window) to the last value (at the end of the window). + * @return average value per second + */ + val average: Double + get() { + var localFirstTime = checkNotNull(firstTime) { "Cannot get average without values" } + val now = Instant.now() + val windowStart = now - window + while (!deque.isEmpty() && deque.first.time < windowStart) { + localFirstTime = deque.removeFirst() + } + val total = localFirstTime.value + deque.sumOf { it.value } + firstTime = localFirstTime + return if (deque.isEmpty() || localFirstTime.time >= windowStart) { + 1000.0 * total / Duration.between(localFirstTime.time, now).toMillis() + } else { + val time = Duration.between(windowStart, deque.last.time) + val removedRate = Duration.between(localFirstTime.time, windowStart).toMillis() / + Duration.between(localFirstTime.time, deque.first.time).toMillis().toDouble() + val removedValue = localFirstTime.value + deque.first.value * removedRate + 1000.0 * (total - removedValue) / time.toMillis() + } + } + + /** + * Rounded [.getAverage]. + */ + val count: Int + get() = average.roundToInt() + + private class TimeCount( + val value: Double, + ) { + val time: Instant = Instant.now() + } +} diff --git a/radar-commons-server/src/test/java/org/radarbase/config/ServerConfigTest.java b/radar-commons-server/src/test/java/org/radarbase/config/ServerConfigTest.java index 11cc14bc..c5b1ba00 100644 --- a/radar-commons-server/src/test/java/org/radarbase/config/ServerConfigTest.java +++ b/radar-commons-server/src/test/java/org/radarbase/config/ServerConfigTest.java @@ -16,16 +16,14 @@ package org.radarbase.config; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.ObjectReader; import com.fasterxml.jackson.dataformat.yaml.YAMLFactory; import java.io.IOException; -import java.net.MalformedURLException; import java.net.URL; -import okhttp3.HttpUrl; -import org.junit.Test; +import org.junit.jupiter.api.Test; /** * Created by joris on 01/05/2017. @@ -61,26 +59,4 @@ public void jacksonUrl() throws IOException { + "path: /schema")) .getUrlString()); } - - @Test - public void getHttpUrl() throws MalformedURLException { - ServerConfig config = new ServerConfig("http://something.else/that"); - HttpUrl url = config.getHttpUrl(); - assertEquals("http://something.else/that/", url.toString()); - assertEquals("something.else", url.host()); - assertEquals("http", url.scheme()); - assertEquals(80, url.port()); - assertEquals("/that/", url.encodedPath()); - } - - @Test - public void getHttpUrlWitoutRoot() throws MalformedURLException { - ServerConfig config = new ServerConfig("http://something.else"); - HttpUrl url = config.getHttpUrl(); - assertEquals("http://something.else/", url.toString()); - assertEquals("something.else", url.host()); - assertEquals("http", url.scheme()); - assertEquals(80, url.port()); - assertEquals("/", url.encodedPath()); - } } diff --git a/radar-commons-server/src/test/java/org/radarbase/stream/collector/AggregateListCollectorTest.java b/radar-commons-server/src/test/java/org/radarbase/stream/collector/AggregateListCollectorTest.java index 3645a24d..282bc7fa 100644 --- a/radar-commons-server/src/test/java/org/radarbase/stream/collector/AggregateListCollectorTest.java +++ b/radar-commons-server/src/test/java/org/radarbase/stream/collector/AggregateListCollectorTest.java @@ -16,9 +16,9 @@ package org.radarbase.stream.collector; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; -import org.junit.Test; +import org.junit.jupiter.api.Test; import org.radarcns.passive.empatica.EmpaticaE4Acceleration; /** diff --git a/radar-commons-server/src/test/java/org/radarbase/stream/collector/NumericAggregateCollectorTest.java b/radar-commons-server/src/test/java/org/radarbase/stream/collector/NumericAggregateCollectorTest.java index 13e60a86..9dbfedad 100644 --- a/radar-commons-server/src/test/java/org/radarbase/stream/collector/NumericAggregateCollectorTest.java +++ b/radar-commons-server/src/test/java/org/radarbase/stream/collector/NumericAggregateCollectorTest.java @@ -16,14 +16,14 @@ package org.radarbase.stream.collector; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertThrows; -import java.io.IOException; import java.math.BigInteger; import java.nio.ByteBuffer; import java.util.Arrays; -import org.junit.Before; -import org.junit.Test; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; import org.radarcns.kafka.AggregateKey; import org.radarcns.monitor.application.ApplicationRecordCounts; import org.radarcns.passive.empatica.EmpaticaE4BloodVolumePulse; @@ -36,7 +36,7 @@ public class NumericAggregateCollectorTest { private NumericAggregateCollector valueCollector; - @Before + @BeforeEach public void setUp() { this.valueCollector = new NumericAggregateCollector("test", true); } @@ -106,21 +106,27 @@ public void testAverageFloat() { assertEquals(36.8508954, valueCollector.getMean(), 0); } - @Test(expected = IllegalStateException.class) + @Test public void testAddRecordWithoutSchema() { - valueCollector.add(new EmpaticaE4BloodVolumePulse(0d, 0d, 0f)); + assertThrows(IllegalStateException.class, () -> + valueCollector.add(new EmpaticaE4BloodVolumePulse(0d, 0d, 0f)) + ); } - @Test(expected = IllegalArgumentException.class) + @Test public void testWrongRecordType() { - this.valueCollector = new NumericAggregateCollector("isPlugged", - PhoneBatteryLevel.getClassSchema(), false); + assertThrows(IllegalArgumentException.class, () -> + this.valueCollector = new NumericAggregateCollector("isPlugged", + PhoneBatteryLevel.getClassSchema(), false) + ); } - @Test(expected = IllegalArgumentException.class) + @Test public void testWrongFieldName() { - this.valueCollector = new NumericAggregateCollector("doesNotExist", - PhoneBatteryLevel.getClassSchema(), false); + assertThrows(IllegalArgumentException.class, () -> + this.valueCollector = new NumericAggregateCollector("doesNotExist", + PhoneBatteryLevel.getClassSchema(), false) + ); } @Test diff --git a/radar-commons-server/src/test/java/org/radarbase/stream/collector/UniformSamplingReservoirTest.java b/radar-commons-server/src/test/java/org/radarbase/stream/collector/UniformSamplingReservoirTest.java index 359289b8..30ac161b 100644 --- a/radar-commons-server/src/test/java/org/radarbase/stream/collector/UniformSamplingReservoirTest.java +++ b/radar-commons-server/src/test/java/org/radarbase/stream/collector/UniformSamplingReservoirTest.java @@ -1,12 +1,12 @@ package org.radarbase.stream.collector; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.util.Iterator; import java.util.List; import java.util.concurrent.ThreadLocalRandom; -import org.junit.Test; +import org.junit.jupiter.api.Test; public class UniformSamplingReservoirTest { @Test @@ -66,4 +66,4 @@ private static > boolean isOrdered(List list) { } return true; } -} \ No newline at end of file +} diff --git a/radar-commons-testing/build.gradle b/radar-commons-testing/build.gradle deleted file mode 100644 index b4bc1b31..00000000 --- a/radar-commons-testing/build.gradle +++ /dev/null @@ -1,60 +0,0 @@ -/* - * Copyright 2017 The Hyve and King's College London - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -apply plugin: 'application' - -mainClassName = 'org.radarbase.mock.MockProducer' - -configurations { - applicationRuntimeOnly -} - -run { - classpath += configurations.applicationRuntimeOnly - if (project.hasProperty("mockConfig")) { - args project.property("mockConfig") - } else { - args 'mock.yml' - } -} - -description = 'RADAR Common testing library mocking code and utilities.' - -dependencies { - api project(':radar-commons') - api project(':radar-commons-server') - api group: 'org.apache.avro', name: 'avro', version: avroVersion - api group: 'org.radarbase', name: 'radar-schemas-commons', version: radarSchemasVersion - - implementation group: 'com.opencsv', name: 'opencsv', version: opencsvVersion - implementation(platform("com.fasterxml.jackson:jackson-bom:$jacksonVersion")) - implementation group: 'com.fasterxml.jackson.core' , name: 'jackson-databind' - implementation group: 'org.apache.kafka', name: 'kafka-clients', version: kafkaVersion - implementation (group: 'io.confluent', name: 'kafka-avro-serializer', version: confluentVersion) { - exclude group: 'com.101tec' - exclude group: 'org.slf4j', module: 'slf4j-log4j12' - } - - applicationRuntimeOnly group: 'org.slf4j', name: 'slf4j-simple', version: slf4jVersion - - // Direct producer uses KafkaAvroSerializer if initialized - testImplementation group: 'junit', name: 'junit', version: junitVersion - testImplementation group: 'org.hamcrest', name: 'hamcrest-all', version: hamcrestVersion - testImplementation group: 'org.slf4j', name: 'slf4j-simple', version: slf4jVersion -} - -apply from: '../gradle/publishing.gradle' -apply from: '../gradle/codestyle.gradle' diff --git a/radar-commons-testing/build.gradle.kts b/radar-commons-testing/build.gradle.kts new file mode 100644 index 00000000..a1513bcc --- /dev/null +++ b/radar-commons-testing/build.gradle.kts @@ -0,0 +1,60 @@ +/* + * Copyright 2017 The Hyve and King's College London + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +plugins { + application +} + +val applicationRuntimeOnly: Configuration by configurations.creating + +application { + mainClass.set("org.radarbase.mock.MockProducer") +} + +tasks.named("run") { + classpath += applicationRuntimeOnly + if (project.hasProperty("mockConfig")) { + args(project.property("mockConfig")) + } else { + args("mock.yml") + } +} + +description = "RADAR Common testing library mocking code and utilities." + +dependencies { + api(project(":radar-commons")) + api(project(":radar-commons-server")) + api(project(":radar-commons-kotlin")) + + api("org.apache.avro:avro:${Versions.avro}") + api("org.radarbase:radar-schemas-commons:${Versions.radarSchemas}") + + implementation("com.opencsv:opencsv:${Versions.opencsv}") + implementation(platform("com.fasterxml.jackson:jackson-bom:${Versions.jackson}")) + implementation("com.fasterxml.jackson.core:jackson-databind") + + implementation("org.apache.kafka:kafka-clients:${Versions.kafka}") + implementation("io.confluent:kafka-avro-serializer:${Versions.confluent}") + + implementation(platform("io.ktor:ktor-bom:${Versions.ktor}")) + implementation("io.ktor:ktor-serialization-kotlinx-json") + + applicationRuntimeOnly("org.slf4j:slf4j-simple:${Versions.slf4j}") + + testImplementation("org.hamcrest:hamcrest:${Versions.hamcrest}") + testImplementation("org.mockito:mockito-core:${Versions.mockito}") +} diff --git a/radar-commons-testing/src/main/java/org/radarbase/mock/MockDevice.java b/radar-commons-testing/src/main/java/org/radarbase/mock/MockDevice.java deleted file mode 100644 index f2592fb2..00000000 --- a/radar-commons-testing/src/main/java/org/radarbase/mock/MockDevice.java +++ /dev/null @@ -1,144 +0,0 @@ -/* - * Copyright 2017 The Hyve and King's College London - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.radarbase.mock; - -import java.io.IOException; -import java.math.BigInteger; -import java.util.ArrayList; -import java.util.Iterator; -import java.util.List; -import java.util.concurrent.atomic.AtomicBoolean; -import org.apache.avro.SchemaValidationException; -import org.apache.avro.specific.SpecificRecord; -import org.radarbase.data.Record; -import org.radarbase.mock.data.RecordGenerator; -import org.radarbase.producer.KafkaSender; -import org.radarbase.producer.KafkaTopicSender; -import org.radarbase.util.Oscilloscope; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -/** - * Mock device that sends data for given topics at a given rate. This can be used to simulate - * any number of real devices. - * @param record key type - */ -public class MockDevice extends Thread { - private static final Logger logger = LoggerFactory.getLogger(MockDevice.class); - private final int baseFrequency; - private final KafkaSender sender; - private final AtomicBoolean stopping; - private final List> generators; - private final K key; - - private Exception exception; - - /** - * Basic constructor. - * @param sender sender to send data with - * @param key key to send all messages with - * @param generators data generators that produce the data we send - */ - public MockDevice(KafkaSender sender, K key, List> generators) { - this.generators = generators; - this.key = key; - baseFrequency = computeBaseFrequency(generators); - this.sender = sender; - this.stopping = new AtomicBoolean(false); - exception = null; - } - - @Override - @SuppressWarnings("PMD.CloseResource") - public void run() { - List> topicSenders = - new ArrayList<>(generators.size()); - List>> recordIterators = - new ArrayList<>(generators.size()); - - try { - for (RecordGenerator generator : generators) { - topicSenders.add(sender.sender(generator.getTopic())); - recordIterators.add(generator.iterateValues(key, 0)); - } - Oscilloscope oscilloscope = new Oscilloscope(baseFrequency); - - try { - while (!stopping.get()) { - // The time keeping is regulated with beats, with baseFrequency beats per - // second. - int beat = oscilloscope.beat(); - - for (int i = 0; i < generators.size(); i++) { - int frequency = generators.get(i).getConfig().getFrequency(); - if (frequency > 0 && beat % (baseFrequency / frequency) == 0) { - Record record = recordIterators.get(i).next(); - topicSenders.get(i).send(record.key, record.value); - } - } - } - } catch (InterruptedException ex) { - // do nothing, just exit the loop - } - - for (KafkaTopicSender topicSender : topicSenders) { - topicSender.close(); - } - } catch (SchemaValidationException | IOException e) { - synchronized (this) { - this.exception = e; - } - logger.error("MockDevice {} failed to send message", key, e); - } - } - - /** - * Shut down the device eventually. - */ - public void shutdown() { - stopping.set(true); - } - - /** Get the exception that occurred in the thread. Returns null if no exception occurred. */ - public synchronized Exception getException() { - return exception; - } - - /** Check whether an exception occurred, and rethrow the exception if that is the case. */ - public synchronized void checkException() throws IOException, SchemaValidationException { - if (exception != null) { - if (exception instanceof IOException) { - throw (IOException) exception; - } else if (exception instanceof SchemaValidationException) { - throw (SchemaValidationException) exception; - } else if (exception instanceof RuntimeException) { - throw (RuntimeException) exception; - } else { - throw new IllegalStateException("Unknown exception occurred", exception); - } - } - } - - private int computeBaseFrequency(List> generators) { - BigInteger lcm = BigInteger.ONE; - for (RecordGenerator generator : generators) { - BigInteger freq = BigInteger.valueOf(generator.getConfig().getFrequency()); - lcm = lcm.multiply(freq.divide(lcm.gcd(freq))); // a * (b / gcd(a, b)); - } - return lcm.intValue(); - } -} diff --git a/radar-commons-testing/src/main/java/org/radarbase/mock/MockDevice.kt b/radar-commons-testing/src/main/java/org/radarbase/mock/MockDevice.kt new file mode 100644 index 00000000..69d06595 --- /dev/null +++ b/radar-commons-testing/src/main/java/org/radarbase/mock/MockDevice.kt @@ -0,0 +1,124 @@ +/* + * Copyright 2017 The Hyve and King's College London + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.radarbase.mock + +import kotlinx.coroutines.CoroutineScope +import kotlinx.coroutines.isActive +import org.apache.avro.SchemaValidationException +import org.apache.avro.specific.SpecificRecord +import org.radarbase.mock.data.RecordGenerator +import org.radarbase.producer.KafkaSender +import org.radarbase.util.Oscilloscope +import org.slf4j.LoggerFactory +import java.io.IOException +import java.math.BigInteger +import java.util.concurrent.atomic.AtomicBoolean + +/** + * Mock device that sends data for given topics at a given rate. This can be used to simulate + * any number of real devices. + * @param record key type + */ +class MockDevice( + sender: KafkaSender, + private val key: K, + private val generators: List>, +) { + private val baseFrequency: Int + private val sender: KafkaSender + private val stopping: AtomicBoolean + + /** Get the exception that occurred in the thread. Returns null if no exception occurred. */ + @get:Synchronized + var exception: Exception? + private set + + /** + * Basic constructor. + * @param sender sender to send data with + * @param key key to send all messages with + * @param generators data generators that produce the data we send + */ + init { + baseFrequency = computeBaseFrequency(generators) + this.sender = sender + stopping = AtomicBoolean(false) + exception = null + } + + suspend fun CoroutineScope.run() { + try { + val topicSenders = generators.map { sender.sender(it.topic) } + val recordIterators = generators.map { it.iterateValues(key, 0) } + val oscilloscope = Oscilloscope(baseFrequency) + try { + while (isActive) { + // The time keeping is regulated with beats, with baseFrequency beats per + // second. + val beat = oscilloscope.beat() + for (i in generators.indices) { + val frequency = generators[i].config.frequency + if (frequency > 0 && beat % (baseFrequency / frequency) == 0) { + val record = recordIterators[i].next() + topicSenders[i].send(record.key, record.value) + } + } + } + } catch (ex: InterruptedException) { + // do nothing, just exit the loop + } + } catch (e: SchemaValidationException) { + synchronized(this) { exception = e } + logger.error("MockDevice {} failed to send message", key, e) + } catch (e: IOException) { + synchronized(this) { exception = e } + logger.error("MockDevice {} failed to send message", key, e) + } + } + + /** + * Shut down the device eventually. + */ + fun shutdown() { + stopping.set(true) + } + + /** Check whether an exception occurred, and rethrow the exception if that is the case. */ + @Synchronized + @Throws(IOException::class, SchemaValidationException::class) + fun checkException() { + when (exception) { + null -> {} + is IOException -> throw exception as IOException + is SchemaValidationException -> throw exception as SchemaValidationException + is RuntimeException -> throw exception as RuntimeException + else -> throw IllegalStateException("Unknown exception occurred", exception) + } + } + + private fun computeBaseFrequency(generators: List>): Int { + var lcm = BigInteger.ONE + for (generator in generators) { + val freq = BigInteger.valueOf(generator.config.frequency.toLong()) + lcm = lcm.multiply(freq.divide(lcm.gcd(freq))) // a * (b / gcd(a, b)); + } + return lcm.toInt() + } + + companion object { + private val logger = LoggerFactory.getLogger(MockDevice::class.java) + } +} diff --git a/radar-commons-testing/src/main/java/org/radarbase/mock/MockFileSender.java b/radar-commons-testing/src/main/java/org/radarbase/mock/MockFileSender.java deleted file mode 100644 index 668fd151..00000000 --- a/radar-commons-testing/src/main/java/org/radarbase/mock/MockFileSender.java +++ /dev/null @@ -1,65 +0,0 @@ -/* - * Copyright 2017 The Hyve and King's College London - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.radarbase.mock; - -import com.opencsv.exceptions.CsvValidationException; -import java.io.IOException; -import org.apache.avro.SchemaValidationException; -import org.radarbase.data.Record; -import org.radarbase.mock.data.MockCsvParser; -import org.radarbase.producer.KafkaSender; -import org.radarbase.producer.KafkaTopicSender; - -/** - * Send mock data from a CSV file. - * - *

The value type is dynamic, so we will not check any of the generics. - */ -public class MockFileSender { - private final KafkaSender sender; - private final MockCsvParser parser; - - public MockFileSender(KafkaSender sender, MockCsvParser parser) { - this.parser = parser; - this.sender = sender; - } - - /** - * Send data from the configured CSV file synchronously. - * @throws IOException if data could not be read or sent. - */ - @SuppressWarnings("unchecked") - public void send() throws IOException { - try (KafkaTopicSender topicSender = sender.sender(parser.getTopic())) { - while (parser.hasNext()) { - Record record = parser.next(); - topicSender.send(record.key, record.value); - } - } catch (SchemaValidationException e) { - throw new IOException("Failed to match schemas", e); - } catch (CsvValidationException e) { - throw new IOException("Failed to read CSV file", e); - } - } - - @Override - public String toString() { - return "MockFileSender{" - + "parser=" + parser - + '}'; - } -} diff --git a/radar-commons-testing/src/main/java/org/radarbase/mock/MockFileSender.kt b/radar-commons-testing/src/main/java/org/radarbase/mock/MockFileSender.kt new file mode 100644 index 00000000..47ba5255 --- /dev/null +++ b/radar-commons-testing/src/main/java/org/radarbase/mock/MockFileSender.kt @@ -0,0 +1,63 @@ +/* + * Copyright 2017 The Hyve and King's College London + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.radarbase.mock + +import com.opencsv.exceptions.CsvValidationException +import org.apache.avro.SchemaValidationException +import org.apache.avro.generic.GenericRecord +import org.radarbase.data.Record +import org.radarbase.mock.data.MockCsvParser +import org.radarbase.producer.KafkaSender +import java.io.IOException + +/** + * Send mock data from a CSV file. + * + * + * The value type is dynamic, so we will not check any of the generics. + */ +class MockFileSender( + private val sender: KafkaSender, + private val parser: MockCsvParser, +) { + /** + * Send data from the configured CSV file synchronously. + * @throws IOException if data could not be read or sent. + */ + @Throws(IOException::class) + suspend fun send() { + parser.initialize() + try { + val topicSender = sender.sender(parser.topic) + while (parser.hasNext()) { + val record: Record<*, *> = parser.next() + topicSender.send(record.key as GenericRecord, record.value as GenericRecord) + } + } catch (e: SchemaValidationException) { + throw IOException("Failed to match schemas", e) + } catch (e: CsvValidationException) { + throw IOException("Failed to read CSV file", e) + } + } + + override fun toString(): String { + return ( + "MockFileSender{" + + "parser=" + parser + + '}' + ) + } +} diff --git a/radar-commons-testing/src/main/java/org/radarbase/mock/MockProducer.java b/radar-commons-testing/src/main/java/org/radarbase/mock/MockProducer.java deleted file mode 100644 index 0452c861..00000000 --- a/radar-commons-testing/src/main/java/org/radarbase/mock/MockProducer.java +++ /dev/null @@ -1,430 +0,0 @@ -/* - * Copyright 2017 The Hyve and King's College London - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.radarbase.mock; - -import static io.confluent.kafka.serializers.AbstractKafkaSchemaSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG; -import static org.apache.kafka.clients.producer.ProducerConfig.BOOTSTRAP_SERVERS_CONFIG; -import static org.apache.kafka.clients.producer.ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG; -import static org.apache.kafka.clients.producer.ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG; - -import com.opencsv.exceptions.CsvValidationException; -import io.confluent.kafka.serializers.KafkaAvroSerializer; -import java.io.IOException; -import java.nio.file.Path; -import java.nio.file.Paths; -import java.time.Instant; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.List; -import java.util.Properties; -import java.util.concurrent.TimeUnit; -import java.util.concurrent.atomic.AtomicBoolean; -import okhttp3.Credentials; -import okhttp3.FormBody; -import okhttp3.Headers; -import okhttp3.OkHttpClient; -import okhttp3.Request; -import okhttp3.Request.Builder; -import okhttp3.Response; -import okhttp3.ResponseBody; -import org.apache.avro.SchemaValidationException; -import org.json.JSONObject; -import org.radarbase.config.ServerConfig; -import org.radarbase.config.YamlConfigLoader; -import org.radarbase.mock.config.AuthConfig; -import org.radarbase.mock.config.MockDataConfig; -import org.radarbase.mock.data.MockCsvParser; -import org.radarbase.mock.data.RecordGenerator; -import org.radarbase.producer.BatchedKafkaSender; -import org.radarbase.producer.KafkaSender; -import org.radarbase.producer.direct.DirectSender; -import org.radarbase.producer.rest.ConnectionState; -import org.radarbase.producer.rest.RestClient; -import org.radarbase.producer.rest.RestSender; -import org.radarbase.producer.rest.SchemaRetriever; -import org.radarcns.kafka.ObservationKey; -import org.radarbase.mock.config.BasicMockConfig; -import org.radarcns.passive.empatica.EmpaticaE4Acceleration; -import org.radarcns.passive.empatica.EmpaticaE4BatteryLevel; -import org.radarcns.passive.empatica.EmpaticaE4BloodVolumePulse; -import org.radarcns.passive.empatica.EmpaticaE4ElectroDermalActivity; -import org.radarcns.passive.empatica.EmpaticaE4InterBeatInterval; -import org.radarcns.passive.empatica.EmpaticaE4Temperature; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -/** - * A Mock Producer class that can be used to stream data. It can use MockFileSender and MockDevice - * for testing purposes, with direct or indirect streaming. - */ -@SuppressWarnings("PMD") -public class MockProducer { - - private static final Logger logger = LoggerFactory.getLogger(MockProducer.class); - - private final List> devices; - private final List files; - private final List senders; - private final SchemaRetriever retriever; - - /** - * MockProducer with files from current directory. The data root directory will be the current - * directory. - * @param mockConfig configuration to mock - * @throws IOException if the data could not be read or sent - */ - public MockProducer(BasicMockConfig mockConfig) throws IOException { - this(mockConfig, null); - } - - /** - * Basic constructor. - * @param mockConfig configuration to mock - * @param root root directory of where mock files are located - * @throws IOException if data could not be sent - */ - public MockProducer(BasicMockConfig mockConfig, Path root) throws IOException { - int numDevices = mockConfig.getNumberOfDevices(); - - retriever = new SchemaRetriever(mockConfig.getSchemaRegistry(), 10); - List tmpSenders = null; - - try { - devices = new ArrayList<>(numDevices); - files = new ArrayList<>(numDevices); - - List dataConfigs = mockConfig.getData(); - if (dataConfigs == null) { - dataConfigs = defaultDataConfig(); - } - - List> generators; - List mockFiles; - generators = createGenerators(dataConfigs); - mockFiles = createMockFiles(dataConfigs, root); - - tmpSenders = createSenders(mockConfig, numDevices + mockFiles.size(), - mockConfig.getAuthConfig()); - - if (!generators.isEmpty()) { - String userId = "UserID_"; - String sourceId = "SourceID_"; - - for (int i = 0; i < numDevices; i++) { - ObservationKey key = new ObservationKey("test", userId + i, sourceId + i); - devices.add(new MockDevice<>(tmpSenders.get(i), key, generators)); - } - } - - for (int i = 0; i < mockFiles.size(); i++) { - files.add(new MockFileSender(tmpSenders.get(i + numDevices), mockFiles.get(i))); - } - } catch (CsvValidationException ex) { - if (tmpSenders != null) { - for (KafkaSender sender : tmpSenders) { - sender.close(); - } - } - throw new IOException("Cannot read CSV file", ex); - } catch (Exception ex) { - if (tmpSenders != null) { - for (KafkaSender sender : tmpSenders) { - sender.close(); - } - } - throw ex; - } - - senders = tmpSenders; - } - - private List createSenders( - BasicMockConfig mockConfig, int numDevices, AuthConfig authConfig) throws IOException { - - if (mockConfig.isDirectProducer()) { - return createDirectSenders(numDevices, mockConfig.getSchemaRegistry().getUrlString(), - mockConfig.getBrokerPaths()); - } else { - return createRestSenders(numDevices, retriever, mockConfig.getRestProxy(), - mockConfig.hasCompression(), authConfig); - } - } - - /** Create senders that directly produce data to Kafka. */ - private List createDirectSenders(int numDevices, - String retrieverUrl, String brokerPaths) { - List result = new ArrayList<>(numDevices); - for (int i = 0; i < numDevices; i++) { - Properties properties = new Properties(); - properties.put(KEY_SERIALIZER_CLASS_CONFIG, KafkaAvroSerializer.class); - properties.put(VALUE_SERIALIZER_CLASS_CONFIG, KafkaAvroSerializer.class); - properties.put(SCHEMA_REGISTRY_URL_CONFIG, retrieverUrl); - properties.put(BOOTSTRAP_SERVERS_CONFIG, brokerPaths); - - result.add(new DirectSender(properties)); - } - return result; - } - - private String requestAccessToken(OkHttpClient okHttpClient, AuthConfig authConfig) - throws IOException { - Request request = new Builder() - .url(authConfig.getTokenUrl()) - .post(new FormBody.Builder() - .add("grant_type", "client_credentials") - .add("client_id", authConfig.getClientId()) - .add("client_secret", authConfig.getClientSecret()) - .build()) - .addHeader("Authorization", Credentials - .basic(authConfig.getClientId(), authConfig.getClientSecret())) - .build(); - - try (Response response = okHttpClient.newCall(request).execute()) { - ResponseBody responseBody = response.body(); - if (responseBody == null) { - throw new IOException("Cannot request token at " + request.url() - + " (" + response.code() + ") returned no body"); - } - if (!response.isSuccessful()) { - throw new IOException("Cannot request token: at " + request.url() - + " (" + response.code() + "): " + responseBody.string()); - } - return new JSONObject(responseBody.string()).getString("access_token"); - } - } - - /** Create senders that produce data to Kafka via the REST proxy. */ - private List createRestSenders(int numDevices, - SchemaRetriever retriever, ServerConfig restProxy, boolean useCompression, - AuthConfig authConfig) throws IOException { - List result = new ArrayList<>(numDevices); - ConnectionState sharedState = new ConnectionState(10, TimeUnit.SECONDS); - - Headers headers; - if (authConfig == null) { - headers = Headers.of(); - } else { - OkHttpClient okHttpClient = new OkHttpClient(); - String token = requestAccessToken(okHttpClient, authConfig); - headers = Headers.of("Authorization", "Bearer " + token); - } - - for (int i = 0; i < numDevices; i++) { - RestClient httpClient = RestClient.newClient() - .server(restProxy) - .gzipCompression(useCompression) - .timeout(10, TimeUnit.SECONDS) - .build(); - - RestSender restSender = new RestSender.Builder() - .schemaRetriever(retriever) - .httpClient(httpClient) - .connectionState(sharedState) - .headers(headers) - .build(); - result.add(new BatchedKafkaSender(restSender, 1000, 1000)); - } - return result; - } - - /** Start sending data. */ - public void start() throws IOException { - for (MockDevice device : devices) { - device.start(); - } - for (MockFileSender file : files) { - file.send(); - logger.info("Sent data {}", file); - } - } - - /** Stop sending data and clean up all resources. */ - public void shutdown() throws IOException, InterruptedException, SchemaValidationException { - if (!devices.isEmpty()) { - logger.info("Shutting down mock devices"); - for (MockDevice device : devices) { - device.shutdown(); - } - logger.info("Waiting for mock devices to finish..."); - for (MockDevice device : devices) { - device.join(5_000L); - } - } - logger.info("Closing channels"); - for (KafkaSender sender : senders) { - sender.close(); - } - - for (MockDevice device : devices) { - device.checkException(); - } - } - - /** - * Runs the MockProducer with given YAML mock config file. - */ - public static void main(String[] args) { - if (args.length != 1) { - logger.error("This command needs a mock file argument"); - System.exit(1); - } - - Path mockFile = Paths.get(args[0]).toAbsolutePath(); - BasicMockConfig config = null; - try { - config = new YamlConfigLoader().load(mockFile, BasicMockConfig.class); - } catch (IOException ex) { - logger.error("Failed to load given mock file {}: {}", mockFile, ex.getMessage()); - System.exit(1); - } - - try { - MockProducer producer = new MockProducer(config, mockFile.getParent()); - producer.start(); - if (!producer.devices.isEmpty()) { - waitForProducer(producer, config.getDuration()); - } - } catch (IllegalArgumentException ex) { - logger.error("{}", ex.getMessage()); - System.exit(1); - } catch (InterruptedException e) { - // during shutdown, not that important. Will shutdown again. - } catch (Exception ex) { - logger.error("Failed to start mock producer", ex); - System.exit(1); - } - } - - /** Wait for given duration and then stop the producer. */ - private static void waitForProducer(final MockProducer producer, long duration) - throws IOException, InterruptedException, SchemaValidationException { - final AtomicBoolean isShutdown = new AtomicBoolean(false); - - Runtime.getRuntime().addShutdownHook(new Thread(() -> { - try { - if (!isShutdown.get()) { - producer.shutdown(); - } - } catch (InterruptedException ex) { - logger.warn("Shutdown interrupted", ex); - } catch (Exception ex) { - logger.warn("Failed to shutdown producer", ex); - } - })); - - if (duration <= 0L) { - try { - logger.info("Producing data until interrupted"); - Thread.sleep(Long.MAX_VALUE); - } catch (InterruptedException ex) { - // this is intended - } - } else { - try { - logger.info("Producing data for {} seconds", duration / 1000d); - Thread.sleep(duration); - } catch (InterruptedException ex) { - logger.warn("Data producing interrupted"); - } - producer.shutdown(); - isShutdown.set(true); - logger.info("Producing data done."); - } - } - - private List defaultDataConfig() { - MockDataConfig acceleration = new MockDataConfig(); - acceleration.setTopic("android_empatica_e4_acceleration"); - acceleration.setFrequency(32); - acceleration.setValueSchema(EmpaticaE4Acceleration.class.getName()); - acceleration.setInterval(-2d, 2d); - acceleration.setValueFields(Arrays.asList("x", "y", "z")); - - MockDataConfig battery = new MockDataConfig(); - battery.setTopic("android_empatica_e4_battery_level"); - battery.setValueSchema(EmpaticaE4BatteryLevel.class.getName()); - battery.setFrequency(1); - battery.setInterval(0d, 1d); - battery.setValueField("batteryLevel"); - - MockDataConfig bvp = new MockDataConfig(); - bvp.setTopic("android_empatica_e4_blood_volume_pulse"); - bvp.setValueSchema(EmpaticaE4BloodVolumePulse.class.getName()); - bvp.setFrequency(64); - bvp.setInterval(60d, 90d); - bvp.setValueField("bloodVolumePulse"); - - MockDataConfig eda = new MockDataConfig(); - eda.setTopic("android_empatica_e4_electrodermal_activity"); - eda.setValueSchema(EmpaticaE4ElectroDermalActivity.class.getName()); - eda.setValueField("electroDermalActivity"); - eda.setFrequency(4); - eda.setInterval(0.01d, 0.05d); - - MockDataConfig ibi = new MockDataConfig(); - ibi.setTopic("android_empatica_e4_inter_beat_interval"); - ibi.setValueSchema(EmpaticaE4InterBeatInterval.class.getName()); - ibi.setValueField("interBeatInterval"); - ibi.setFrequency(1); - ibi.setInterval(40d, 150d); - - MockDataConfig temperature = new MockDataConfig(); - temperature.setTopic("android_empatica_e4_temperature"); - temperature.setValueSchema(EmpaticaE4Temperature.class.getName()); - temperature.setFrequency(4); - temperature.setInterval(20d, 60d); - temperature.setValueField("temperature"); - - return Arrays.asList(acceleration, battery, bvp, eda, ibi, temperature); - } - - private List> createGenerators(List configs) { - - List> result = new ArrayList<>(configs.size()); - - for (MockDataConfig config : configs) { - if (config.getDataFile() == null) { - result.add(new RecordGenerator<>(config, ObservationKey.class)); - } - } - - return result; - } - - private List createMockFiles(List configs, - Path dataRoot) throws IOException, CsvValidationException { - - List result = new ArrayList<>(configs.size()); - - Instant now = Instant.now(); - Path parent = dataRoot; - if (parent == null) { - parent = Paths.get(".").toAbsolutePath(); - } - - for (MockDataConfig config : configs) { - if (config.getDataFile() != null) { - logger.info("Reading mock data from {}", config.getDataFile()); - result.add(new MockCsvParser(config, parent, now, retriever)); - } else { - logger.info("Generating mock data from {}", config); - } - } - - return result; - } -} diff --git a/radar-commons-testing/src/main/java/org/radarbase/mock/MockProducer.kt b/radar-commons-testing/src/main/java/org/radarbase/mock/MockProducer.kt new file mode 100644 index 00000000..a6fbae13 --- /dev/null +++ b/radar-commons-testing/src/main/java/org/radarbase/mock/MockProducer.kt @@ -0,0 +1,357 @@ +/* + * Copyright 2017 The Hyve and King's College London + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.radarbase.mock + +import com.opencsv.exceptions.CsvValidationException +import io.ktor.client.call.* +import io.ktor.client.plugins.* +import io.ktor.client.plugins.auth.* +import io.ktor.client.plugins.auth.providers.* +import io.ktor.client.request.forms.* +import io.ktor.http.* +import kotlinx.coroutines.* +import org.apache.avro.SchemaValidationException +import org.radarbase.config.ServerConfig +import org.radarbase.config.YamlConfigLoader +import org.radarbase.ktor.auth.ClientCredentialsConfig +import org.radarbase.ktor.auth.clientCredentials +import org.radarbase.mock.config.AuthConfig +import org.radarbase.mock.config.BasicMockConfig +import org.radarbase.mock.config.MockDataConfig +import org.radarbase.mock.data.MockCsvParser +import org.radarbase.mock.data.RecordGenerator +import org.radarbase.producer.KafkaSender +import org.radarbase.producer.io.GzipContentEncoding +import org.radarbase.producer.io.timeout +import org.radarbase.producer.rest.ConnectionState +import org.radarbase.producer.rest.RestKafkaSender.Companion.restKafkaSender +import org.radarbase.producer.schema.SchemaRetriever +import org.radarbase.producer.schema.SchemaRetriever.Companion.schemaRetriever +import org.radarcns.kafka.ObservationKey +import org.radarcns.passive.empatica.* +import org.slf4j.LoggerFactory +import java.io.IOException +import java.nio.file.Path +import java.nio.file.Paths +import java.time.Instant +import java.util.* +import java.util.concurrent.atomic.AtomicBoolean +import kotlin.system.exitProcess +import kotlin.time.Duration.Companion.seconds + +/** + * A Mock Producer class that can be used to stream data. It can use MockFileSender and MockDevice + * for testing purposes, with direct or indirect streaming. + * @param mockConfig configuration to mock + * @param root root directory of where mock files are located + * @throws IOException if the data could not be read or sent + */ +class MockProducer @JvmOverloads constructor( + mockConfig: BasicMockConfig, + root: Path? = null, +) { + private var devices: MutableList> + private var files: MutableList + private val senders: List + private val retriever: SchemaRetriever + private val job: Job = SupervisorJob() + + init { + val numDevices = mockConfig.numberOfDevices + retriever = schemaRetriever(mockConfig.schemaRegistry.urlString) { + httpClient { + timeout(10.seconds) + } + } + val dataConfigs = mockConfig.data + ?: defaultDataConfig() + try { + val generators: List> = createGenerators(dataConfigs) + val mockFiles: List = createMockFiles(dataConfigs, root) + senders = createSenders( + mockConfig, + numDevices + mockFiles.size, + mockConfig.authConfig, + ) + + devices = ArrayList(numDevices) + files = ArrayList(numDevices) + + if (generators.isNotEmpty()) { + val userId = "UserID_" + val sourceId = "SourceID_" + for (i in 0 until numDevices) { + val key = ObservationKey("test", userId + i, sourceId + i) + devices.add(MockDevice(senders[i], key, generators)) + } + } + for (i in mockFiles.indices) { + files.add(MockFileSender(senders[i + numDevices], mockFiles[i])) + } + } catch (ex: CsvValidationException) { + throw IOException("Cannot read CSV file", ex) + } catch (ex: Exception) { + throw ex + } + } + + @Throws(IOException::class) + private fun createSenders( + mockConfig: BasicMockConfig, + numDevices: Int, + authConfig: AuthConfig, + ): List = createRestSenders( + numDevices, + retriever, + mockConfig.restProxy, + mockConfig.hasCompression(), + authConfig, + ) + + /** Create senders that produce data to Kafka via the REST proxy. */ + @Throws(IOException::class) + private fun createRestSenders( + numDevices: Int, + retriever: SchemaRetriever, + restProxy: ServerConfig, + useCompression: Boolean, + authConfig: AuthConfig?, + ): List { + val scope = CoroutineScope(job) + val sharedState = ConnectionState(10.seconds, scope) + return (0 until numDevices) + .map { + restKafkaSender { + this.scope = scope + schemaRetriever = retriever + connectionState = sharedState + + httpClient { + defaultRequest { + url(restProxy.urlString) + } + if (authConfig != null) { + install(Auth) { + clientCredentials( + ClientCredentialsConfig( + authConfig.tokenUrl, + authConfig.clientId, + authConfig.clientSecret, + ).copyWithEnv(), + restProxy.host, + ) + } + } + if (useCompression) { + install(GzipContentEncoding) + } + timeout(10.seconds) + } + } + } + } + + /** Start sending data. */ + @Throws(IOException::class) + suspend fun start() { + withContext(Dispatchers.Default + job) { + for (device in devices) { + launch { + with(device) { + run() + } + } + } + for (file in files) { + launch { + file.send() + logger.info("Sent data {}", file) + } + } + } + } + + /** Stop sending data and clean up all resources. */ + @Throws(IOException::class, InterruptedException::class, SchemaValidationException::class) + suspend fun shutdown() { + logger.info("Shutting down mock devices") + job.run { + cancel() + join() + } + for (device in devices) { + device.checkException() + } + } + + private fun defaultDataConfig(): List { + val acceleration = MockDataConfig().apply { + topic = "android_empatica_e4_acceleration" + frequency = 32 + valueSchema = EmpaticaE4Acceleration::class.java.name + setInterval(-2.0, 2.0) + valueFields = listOf("x", "y", "z") + } + val battery = MockDataConfig().apply { + topic = "android_empatica_e4_battery_level" + valueSchema = EmpaticaE4BatteryLevel::class.java.name + frequency = 1 + setInterval(0.0, 1.0) + setValueField("batteryLevel") + } + val bvp = MockDataConfig().apply { + topic = "android_empatica_e4_blood_volume_pulse" + valueSchema = EmpaticaE4BloodVolumePulse::class.java.name + frequency = 64 + setInterval(60.0, 90.0) + setValueField("bloodVolumePulse") + } + val eda = MockDataConfig().apply { + topic = "android_empatica_e4_electrodermal_activity" + valueSchema = EmpaticaE4ElectroDermalActivity::class.java.name + setValueField("electroDermalActivity") + frequency = 4 + setInterval(0.01, 0.05) + } + val ibi = MockDataConfig().apply { + topic = "android_empatica_e4_inter_beat_interval" + valueSchema = EmpaticaE4InterBeatInterval::class.java.name + setValueField("interBeatInterval") + frequency = 1 + setInterval(40.0, 150.0) + } + val temperature = MockDataConfig().apply { + topic = "android_empatica_e4_temperature" + valueSchema = EmpaticaE4Temperature::class.java.name + frequency = 4 + setInterval(20.0, 60.0) + setValueField("temperature") + } + return listOf(acceleration, battery, bvp, eda, ibi, temperature) + } + + private fun createGenerators( + configs: List, + ): List> = configs.mapNotNull { config -> + if (config.dataFile == null) { + RecordGenerator(config, ObservationKey::class.java) + } else { + null + } + } + + @Throws(IOException::class, CsvValidationException::class) + private fun createMockFiles( + configs: List, + dataRoot: Path?, + ): List { + val now = Instant.now() + var parent = dataRoot + if (parent == null) { + parent = Paths.get(".").toAbsolutePath() + } + return configs.mapNotNull { config -> + if (config.dataFile != null) { + logger.info("Reading mock data from {}", config.dataFile) + MockCsvParser(config, parent, now, retriever) + } else { + logger.info("Generating mock data from {}", config) + null + } + } + } + + companion object { + private val logger = LoggerFactory.getLogger(MockProducer::class.java) + + /** + * Runs the MockProducer with given YAML mock config file. + */ + @JvmStatic + fun main(args: Array) { + if (args.size != 1) { + logger.error("This command needs a mock file argument") + exitProcess(1) + } + val mockFile = Paths.get(args[0]).toAbsolutePath() + val config: BasicMockConfig + try { + config = YamlConfigLoader().load(mockFile, BasicMockConfig::class.java) + } catch (ex: IOException) { + logger.error("Failed to load given mock file {}: {}", mockFile, ex.message) + exitProcess(1) + } + try { + val producer = MockProducer(config, mockFile.parent) + runBlocking { + producer.start() + } + if (producer.devices.isNotEmpty()) { + waitForProducer(producer, config.duration) + } + } catch (ex: IllegalArgumentException) { + logger.error("{}", ex.message) + exitProcess(1) + } catch (e: InterruptedException) { + // during shutdown, not that important. Will shutdown again. + } catch (ex: Exception) { + logger.error("Failed to start mock producer", ex) + exitProcess(1) + } + } + + /** Wait for given duration and then stop the producer. */ + @Throws(IOException::class, InterruptedException::class, SchemaValidationException::class) + private fun waitForProducer(producer: MockProducer, duration: Long) { + val isShutdown = AtomicBoolean(false) + Runtime.getRuntime().addShutdownHook( + Thread { + try { + if (!isShutdown.get()) { + runBlocking { + producer.shutdown() + } + } + } catch (ex: InterruptedException) { + logger.warn("Shutdown interrupted", ex) + } catch (ex: Exception) { + logger.warn("Failed to shutdown producer", ex) + } + }, + ) + if (duration <= 0L) { + try { + logger.info("Producing data until interrupted") + Thread.sleep(Long.MAX_VALUE) + } catch (ex: InterruptedException) { + // this is intended + } + } else { + try { + logger.info("Producing data for {} seconds", duration / 1000.0) + Thread.sleep(duration) + } catch (ex: InterruptedException) { + logger.warn("Data producing interrupted") + } + runBlocking { + producer.shutdown() + } + isShutdown.set(true) + logger.info("Producing data done.") + } + } + } +} diff --git a/radar-commons-testing/src/main/java/org/radarbase/mock/config/BasicMockConfig.java b/radar-commons-testing/src/main/java/org/radarbase/mock/config/BasicMockConfig.java index 7adebb64..ad0ddf5a 100644 --- a/radar-commons-testing/src/main/java/org/radarbase/mock/config/BasicMockConfig.java +++ b/radar-commons-testing/src/main/java/org/radarbase/mock/config/BasicMockConfig.java @@ -97,10 +97,6 @@ public void setRestProxy(ServerConfig restProxy) { this.restProxy = restProxy; } - public boolean isDirectProducer() { - return this.producerMode.trim().equalsIgnoreCase("direct"); - } - public boolean isUnsafeProducer() { return this.producerMode.trim().equalsIgnoreCase("unsafe"); } diff --git a/radar-commons-testing/src/main/java/org/radarbase/mock/data/HeaderHierarchy.java b/radar-commons-testing/src/main/java/org/radarbase/mock/data/HeaderHierarchy.java deleted file mode 100644 index 976d0950..00000000 --- a/radar-commons-testing/src/main/java/org/radarbase/mock/data/HeaderHierarchy.java +++ /dev/null @@ -1,91 +0,0 @@ -package org.radarbase.mock.data; - -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.Objects; - -public class HeaderHierarchy { - private final int index; - private final Map children; - private final HeaderHierarchy parent; - private final String name; - - /** Root node. */ - public HeaderHierarchy() { - this(null, -1, null); - } - - /** - * Header hierarchy child node. Usually accessed via {@link #add(int, List)} - * - * @param name name of the node - * @param index index in the csv file. -1 if not a leaf node. - * @param parent parent node. - */ - public HeaderHierarchy(String name, int index, HeaderHierarchy parent) { - this.name = name; - this.index = index; - this.children = new HashMap<>(); - this.parent = parent; - } - - /** - * Add child nodes to this node. Each item in the list will become a new level down, and the - * last item will become a leaf node with given index. - * - * @param i index if the item. - * @param item list of item elements, each one level deeper than the previous. - */ - public void add(int i, List item) { - Objects.requireNonNull(item); - if (item.isEmpty()) { - return; - } - HeaderHierarchy child = this.children.computeIfAbsent(item.get(0), - k -> new HeaderHierarchy(k, item.size() == 1 ? i : -1, this)); - child.add(i, item.subList(1, item.size())); - } - - /** - * Get the index of current element. - * - * @return index - * @throws IllegalStateException if current node is not a leaf. - */ - public int getIndex() { - if (index == -1) { - throw new IllegalStateException("Header does not exist"); - } - return index; - } - - public Map getChildren() { - return children; - } - - private void appendTo(StringBuilder builder) { - if (parent != null) { - parent.appendTo(builder); - } - if (name != null) { - builder.append('.').append(name); - } - } - - @Override - public String toString() { - StringBuilder builder = new StringBuilder(50); - appendTo(builder); - if (index >= 0) { - builder.append('[') - .append(index) - .append(']'); - } - return builder.toString(); - } - - public String getName() { - return name; - } -} diff --git a/radar-commons-testing/src/main/java/org/radarbase/mock/data/HeaderHierarchy.kt b/radar-commons-testing/src/main/java/org/radarbase/mock/data/HeaderHierarchy.kt new file mode 100644 index 00000000..f5815af4 --- /dev/null +++ b/radar-commons-testing/src/main/java/org/radarbase/mock/data/HeaderHierarchy.kt @@ -0,0 +1,64 @@ +package org.radarbase.mock.data + +import java.util.* + +/** + * Header hierarchy child node. Usually accessed via [.add] + * + * @param name name of the node + * @param index index in the csv file. -1 if not a leaf node. + * @param parent parent node. + */ +class HeaderHierarchy( + val name: String? = null, + index: Int = -1, + private val parent: HeaderHierarchy? = null, +) { + /** The index of current element. */ + val index: Int = index + get() { + check(field != -1) { "Header does not exist" } + return field + } + + private val _children: MutableMap = HashMap() + val children: Map + get() = _children.toMap() + + /** + * Add child nodes to this node. Each item in the list will become a new level down, and the + * last item will become a leaf node with given index. + * + * @param index index of the item. + * @param item list of item elements, each one level deeper than the previous. + */ + fun add(index: Int, item: List) { + Objects.requireNonNull(item) + if (item.isEmpty()) { + return + } + val child = _children.computeIfAbsent(item[0]) { k -> + HeaderHierarchy(k, if (item.size == 1) index else -1, this) + } + child.add(index, item.subList(1, item.size)) + } + + private fun StringBuilder.appendHeader() { + parent?.run { + appendHeader() + } + if (name != null) { + append('.') + append(name) + } + } + + override fun toString(): String = buildString(50) { + appendHeader() + if (index >= 0) { + append('[') + append(index) + append(']') + } + } +} diff --git a/radar-commons-testing/src/main/java/org/radarbase/mock/data/MockCsvParser.java b/radar-commons-testing/src/main/java/org/radarbase/mock/data/MockCsvParser.java deleted file mode 100644 index 005c5668..00000000 --- a/radar-commons-testing/src/main/java/org/radarbase/mock/data/MockCsvParser.java +++ /dev/null @@ -1,287 +0,0 @@ -/* - * Copyright 2017 The Hyve and King's College London - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.radarbase.mock.data; - -import com.opencsv.CSVReader; -import com.opencsv.exceptions.CsvValidationException; -import java.io.BufferedReader; -import java.io.Closeable; -import java.io.IOException; -import java.nio.ByteBuffer; -import java.nio.charset.StandardCharsets; -import java.nio.file.Files; -import java.nio.file.Path; -import java.time.Duration; -import java.time.Instant; -import java.util.Base64; -import java.util.LinkedHashMap; -import java.util.List; -import java.util.Map; -import java.util.Objects; -import org.apache.avro.Schema; -import org.apache.avro.Schema.Field; -import org.apache.avro.generic.GenericData; -import org.apache.avro.generic.GenericRecord; -import org.apache.avro.generic.GenericRecordBuilder; -import org.apache.avro.specific.SpecificRecord; -import org.radarbase.data.Record; -import org.radarbase.mock.config.MockDataConfig; -import org.radarbase.producer.rest.SchemaRetriever; -import org.radarbase.topic.AvroTopic; - -/** - * Parse mock data from a CSV file. - */ -@SuppressWarnings("PMD.GodClass") -public class MockCsvParser implements Closeable { - private final AvroTopic topic; - private final CSVReader csvReader; - private final BufferedReader bufferedReader; - private final Instant startTime; - private final Duration rowDuration; - private final HeaderHierarchy headers; - private String[] currentLine; - private int row; - private long rowTime; - - /** - * Base constructor. - * @param config configuration of the stream. - * @param root parent directory of the data file. - * @param retriever schema retriever to fetch schema with if none is supplied. - * @throws IllegalArgumentException if the second row has the wrong number of columns - */ - public MockCsvParser(MockDataConfig config, Path root, Instant startTime, - SchemaRetriever retriever) - throws IOException, CsvValidationException { - Schema keySchema; - Schema valueSchema; - try { - AvroTopic specificTopic = config.parseAvroTopic(); - keySchema = specificTopic.getKeySchema(); - valueSchema = specificTopic.getValueSchema(); - } catch (IllegalStateException ex) { - Objects.requireNonNull(retriever, "Cannot instantiate value schema without " - + "schema retriever."); - keySchema = AvroTopic.parseSpecificRecord(config.getKeySchema()).getSchema(); - valueSchema = retriever.getBySubjectAndVersion( - config.getTopic(), true, 0).getSchema(); - } - topic = new AvroTopic<>(config.getTopic(), - keySchema, valueSchema, - GenericRecord.class, GenericRecord.class); - - this.startTime = startTime; - row = 0; - rowDuration = Duration.ofMillis((long)(1.0 / config.getFrequency())); - rowTime = this.startTime.toEpochMilli(); - - Path dataFile = config.getDataFile(root); - bufferedReader = Files.newBufferedReader(dataFile); - csvReader = new CSVReader(bufferedReader); - headers = new HeaderHierarchy(); - String[] header = csvReader.readNext(); - for (int i = 0; i < header.length; i++) { - headers.add(i, List.of(header[i].split("\\."))); - } - currentLine = csvReader.readNext(); - } - - public AvroTopic getTopic() { - return topic; - } - - /** - * Read the next record in the file. - * @throws NullPointerException if a field from the Avro schema is missing as a column - * @throws IllegalArgumentException if the row has the wrong number of columns - * @throws IllegalStateException if a next row is not available - * @throws IOException if the next row could not be read - */ - public Record next() throws IOException, CsvValidationException { - if (!hasNext()) { - throw new IllegalStateException("No next record available"); - } - - GenericRecord key = parseRecord(currentLine, topic.getKeySchema(), - headers.getChildren().get("key")); - GenericRecord value = parseRecord(currentLine, topic.getValueSchema(), - headers.getChildren().get("value")); - incrementRow(); - return new Record<>(key, value); - } - - private void incrementRow() throws CsvValidationException, IOException { - currentLine = csvReader.readNext(); - row++; - rowTime = startTime - .plus(rowDuration.multipliedBy(row)) - .toEpochMilli(); - } - - /** - * Whether there is a next record in the file. - */ - public boolean hasNext() { - return currentLine != null; - } - - private GenericRecord parseRecord(String[] rawValues, Schema schema, HeaderHierarchy headers) { - GenericRecordBuilder record = new GenericRecordBuilder(schema); - Map children = headers.getChildren(); - - for (Field field : schema.getFields()) { - HeaderHierarchy child = children.get(field.name()); - if (child != null) { - record.set(field, parseValue(rawValues, field.schema(), child)); - } - } - - return record.build(); - } - - /** Parse value from Schema. */ - public Object parseValue(String[] rawValues, Schema schema, HeaderHierarchy headers) { - switch (schema.getType()) { - case NULL: - case INT: - case LONG: - case FLOAT: - case DOUBLE: - case BOOLEAN: - case STRING: - case ENUM: - case BYTES: - return parseScalar(rawValues, schema, headers); - case UNION: - return parseUnion(rawValues, schema, headers); - case RECORD: - return parseRecord(rawValues, schema, headers); - case ARRAY: - return parseArray(rawValues, schema, headers); - case MAP: - return parseMap(rawValues, schema, headers); - default: - throw new IllegalArgumentException("Cannot handle schemas of type " - + schema.getType() + " in " + headers); - } - } - - private Object parseScalar(String[] rawValues, Schema schema, HeaderHierarchy headers) { - int fieldHeader = headers.getIndex(); - if (fieldHeader >= rawValues.length) { - throw new IllegalArgumentException("Row is missing value for " + headers.getName()); - } - String fieldString = rawValues[fieldHeader] - .replace("${timeSeconds}", Double.toString(rowTime / 1000.0)) - .replace("${timeMillis}", Long.toString(rowTime)); - - return parseScalar(fieldString, schema, headers); - } - - private static Object parseScalar(String fieldString, Schema schema, HeaderHierarchy headers) { - switch (schema.getType()) { - case NULL: - if (fieldString == null || fieldString.isEmpty() || fieldString.equals("null")) { - return null; - } else { - throw new IllegalArgumentException("Cannot parse " + fieldString + " as null"); - } - case INT: - return Integer.parseInt(fieldString); - case LONG: - return Long.parseLong(fieldString); - case FLOAT: - return Float.parseFloat(fieldString); - case DOUBLE: - return Double.parseDouble(fieldString); - case BOOLEAN: - return Boolean.parseBoolean(fieldString); - case STRING: - return fieldString; - case ENUM: - return parseEnum(schema, fieldString); - case BYTES: - return parseBytes(fieldString); - default: - throw new IllegalArgumentException("Cannot handle scalar schema of type " - + schema.getType() + " in " + headers); - } - } - - private Map parseMap(String[] rawValues, Schema schema, - HeaderHierarchy headers) { - Map children = headers.getChildren(); - Map map = new LinkedHashMap<>(children.size() * 4 / 3); - for (HeaderHierarchy child : children.values()) { - map.put(child.getName(), parseValue(rawValues, schema.getValueType(), child)); - } - return map; - } - - private static ByteBuffer parseBytes(String fieldString) { - byte[] result = Base64.getDecoder() - .decode(fieldString.getBytes(StandardCharsets.UTF_8)); - return ByteBuffer.wrap(result); - } - - private Object parseUnion(String[] rawValues, Schema schema, HeaderHierarchy headers) { - for (Schema subschema : schema.getTypes()) { - try { - return parseValue(rawValues, subschema, headers); - } catch (IllegalArgumentException ex) { - // skip bad union member - } - } - throw new IllegalArgumentException("Cannot handle union types " - + schema.getTypes() + " in " + headers); - } - - private List parseArray(String[] rawValues, Schema schema, HeaderHierarchy headers) { - Map children = headers.getChildren(); - int arrayLength = children.keySet().stream() - .mapToInt(headerName -> Integer.parseInt(headerName) + 1) - .max() - .orElse(0); - - GenericData.Array array = new GenericData.Array<>(arrayLength, schema); - for (int i = 0; i < arrayLength; i++) { - HeaderHierarchy child = children.get(String.valueOf(i)); - if (child != null) { - array.add(i, parseValue(rawValues, schema.getElementType(), child)); - } else { - array.add(i, null); - } - } - return array; - } - - private static GenericData.EnumSymbol parseEnum(Schema schema, String fieldString) { - return new GenericData.EnumSymbol(schema, fieldString); - } - - @Override - public void close() throws IOException { - csvReader.close(); - bufferedReader.close(); - } - - @Override - public String toString() { - return "MockCsvParser{" + "topic=" + topic + '}'; - } -} diff --git a/radar-commons-testing/src/main/java/org/radarbase/mock/data/MockCsvParser.kt b/radar-commons-testing/src/main/java/org/radarbase/mock/data/MockCsvParser.kt new file mode 100644 index 00000000..76db8e72 --- /dev/null +++ b/radar-commons-testing/src/main/java/org/radarbase/mock/data/MockCsvParser.kt @@ -0,0 +1,281 @@ +/* + * Copyright 2017 The Hyve and King's College London + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.radarbase.mock.data + +import com.opencsv.CSVReader +import com.opencsv.exceptions.CsvValidationException +import org.apache.avro.Schema +import org.apache.avro.generic.GenericData +import org.apache.avro.generic.GenericRecord +import org.apache.avro.generic.GenericRecordBuilder +import org.apache.avro.specific.SpecificRecord +import org.radarbase.data.Record +import org.radarbase.mock.config.MockDataConfig +import org.radarbase.producer.schema.SchemaRetriever +import org.radarbase.topic.AvroTopic +import org.radarbase.topic.AvroTopic.Companion.parseSpecificRecord +import java.io.BufferedReader +import java.io.Closeable +import java.io.IOException +import java.nio.ByteBuffer +import java.nio.charset.StandardCharsets +import java.nio.file.Path +import java.time.Duration +import java.time.Instant +import java.util.* +import kotlin.io.path.bufferedReader + +/** + * Parse mock data from a CSV file. + * + * @param config configuration of the stream. + * @param root parent directory of the data file. + * @param retriever schema retriever to fetch schema with if none is supplied. + * @throws IllegalArgumentException if the second row has the wrong number of columns + */ +class MockCsvParser constructor( + private val config: MockDataConfig, + root: Path?, + private val startTime: Instant, + private val retriever: SchemaRetriever, +) : Closeable { + lateinit var topic: AvroTopic + private val csvReader: CSVReader + private val bufferedReader: BufferedReader + private val rowDuration: Duration = Duration.ofMillis((1.0 / config.frequency).toLong()) + private val headers: HeaderHierarchy + private var currentLine: Array? + private var row: Int = 0 + private var rowTime: Long = this.startTime.toEpochMilli() + + init { + bufferedReader = config.getDataFile(root).bufferedReader() + csvReader = CSVReader(bufferedReader) + headers = HeaderHierarchy() + val header = csvReader.readNext() + for (i in header.indices) { + headers.add( + i, + header[i].split("\\.".toRegex()).dropLastWhile { it.isEmpty() }, + ) + } + currentLine = csvReader.readNext() + } + + suspend fun initialize() { + val (keySchema, valueSchema) = try { + val specificTopic = config.parseAvroTopic() + Pair(specificTopic.keySchema, specificTopic.valueSchema) + } catch (ex: IllegalStateException) { + Pair( + parseSpecificRecord(config.keySchema).schema, + retriever.getByVersion(config.topic, true, 0).schema, + ) + } + + topic = AvroTopic( + config.topic, + keySchema, + valueSchema, + GenericRecord::class.java, + GenericRecord::class.java, + ) + } + + /** + * Read the next record in the file. + * @throws NullPointerException if a field from the Avro schema is missing as a column + * @throws IllegalArgumentException if the row has the wrong number of columns + * @throws IllegalStateException if a next row is not available + * @throws IOException if the next row could not be read + */ + @Throws(IOException::class, CsvValidationException::class) + operator fun next(): Record { + check(hasNext()) { "No next record available" } + val key = parseRecord( + currentLine, + topic.keySchema, + checkNotNull(headers.children["key"]) { "Missing key fields" }, + ) + val value = parseRecord( + currentLine, + topic.valueSchema, + checkNotNull(headers.children["value"]) { "Missing value fields" }, + ) + incrementRow() + return Record(key, value) + } + + @Throws(CsvValidationException::class, IOException::class) + private fun incrementRow() { + currentLine = csvReader.readNext() + row++ + rowTime = startTime + .plus(rowDuration.multipliedBy(row.toLong())) + .toEpochMilli() + } + + /** + * Whether there is a next record in the file. + */ + operator fun hasNext(): Boolean { + return currentLine != null + } + + private fun parseRecord( + rawValues: Array?, + schema: Schema, + headers: HeaderHierarchy, + ): GenericRecord { + val record = GenericRecordBuilder(schema) + val children = headers.children + for (field in schema.fields) { + val child = children[field.name()] + if (child != null) { + record[field] = parseValue(rawValues, field.schema(), child) + } + } + return record.build() + } + + /** Parse value from Schema. */ + fun parseValue(rawValues: Array?, schema: Schema, headers: HeaderHierarchy): Any? { + return when (schema.type) { + Schema.Type.NULL, Schema.Type.INT, Schema.Type.LONG, Schema.Type.FLOAT, Schema.Type.DOUBLE, Schema.Type.BOOLEAN, Schema.Type.STRING, Schema.Type.ENUM, Schema.Type.BYTES -> parseScalar( + rawValues, + schema, + headers, + ) + + Schema.Type.UNION -> parseUnion(rawValues, schema, headers) + Schema.Type.RECORD -> parseRecord(rawValues, schema, headers) + Schema.Type.ARRAY -> parseArray(rawValues, schema, headers) + Schema.Type.MAP -> parseMap(rawValues, schema, headers) + else -> throw IllegalArgumentException( + "Cannot handle schemas of type " + + schema.type + " in " + headers, + ) + } + } + + private fun parseScalar( + rawValues: Array?, + schema: Schema, + headers: HeaderHierarchy, + ): Any? { + val fieldHeader = headers.index + require(fieldHeader < rawValues!!.size) { "Row is missing value for " + headers.name } + val fieldString = rawValues[fieldHeader] + .replace("\${timeSeconds}", java.lang.Double.toString(rowTime / 1000.0)) + .replace("\${timeMillis}", java.lang.Long.toString(rowTime)) + return parseScalar(fieldString, schema, headers) + } + + private fun parseMap( + rawValues: Array?, + schema: Schema, + headers: HeaderHierarchy, + ): Map = buildMap { + for (child in headers.children.values) { + put(child.name!!, parseValue(rawValues, schema.valueType, child)) + } + } + + private fun parseUnion( + rawValues: Array?, + schema: Schema, + headers: HeaderHierarchy, + ): Any = requireNotNull( + schema.types.firstNotNullOfOrNull { subSchema -> + try { + parseValue(rawValues, subSchema, headers) + } catch (ex: IllegalArgumentException) { + // skip bad union member + null + } + }, + ) { "Cannot handle union types ${schema.types} in $headers" } + + private fun parseArray( + rawValues: Array?, + schema: Schema, + headers: HeaderHierarchy, + ): List { + val children = headers.children + val arrayLength = children.keys.stream() + .mapToInt { headerName: String -> headerName.toInt() + 1 } + .max() + .orElse(0) + val array = GenericData.Array(arrayLength, schema) + for (i in 0 until arrayLength) { + val child = children[i.toString()] + if (child != null) { + array.add(i, parseValue(rawValues, schema.elementType, child)) + } else { + array.add(i, null) + } + } + return array + } + + @Throws(IOException::class) + override fun close() { + csvReader.close() + bufferedReader.close() + } + + override fun toString(): String { + return "MockCsvParser{topic=$topic}" + } + + companion object { + private fun parseScalar( + fieldString: String?, + schema: Schema, + headers: HeaderHierarchy, + ): Any? { + return when (schema.type) { + Schema.Type.NULL -> if (fieldString.isNullOrEmpty() || fieldString == "null") { + null + } else { + throw IllegalArgumentException("Cannot parse $fieldString as null") + } + Schema.Type.INT -> fieldString!!.toInt() + Schema.Type.LONG -> fieldString!!.toLong() + Schema.Type.FLOAT -> fieldString!!.toFloat() + Schema.Type.DOUBLE -> fieldString!!.toDouble() + Schema.Type.BOOLEAN -> java.lang.Boolean.parseBoolean(fieldString) + Schema.Type.STRING -> fieldString + Schema.Type.ENUM -> parseEnum(schema, fieldString) + Schema.Type.BYTES -> parseBytes(fieldString) + else -> throw IllegalArgumentException( + "Cannot handle scalar schema of type " + + schema.type + " in " + headers, + ) + } + } + + private fun parseBytes(fieldString: String?): ByteBuffer { + val result = Base64.getDecoder() + .decode(fieldString!!.toByteArray(StandardCharsets.UTF_8)) + return ByteBuffer.wrap(result) + } + + private fun parseEnum(schema: Schema, fieldString: String?): GenericData.EnumSymbol { + return GenericData.EnumSymbol(schema, fieldString) + } + } +} diff --git a/radar-commons-testing/src/main/java/org/radarbase/mock/data/MockRecordValidator.java b/radar-commons-testing/src/main/java/org/radarbase/mock/data/MockRecordValidator.java deleted file mode 100644 index 55886bde..00000000 --- a/radar-commons-testing/src/main/java/org/radarbase/mock/data/MockRecordValidator.java +++ /dev/null @@ -1,144 +0,0 @@ -/* - * Copyright 2017 The Hyve and King's College London - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.radarbase.mock.data; - -import com.opencsv.exceptions.CsvValidationException; -import java.io.IOException; -import java.nio.file.Path; -import java.time.Instant; -import org.apache.avro.Schema; -import org.apache.avro.Schema.Field; -import org.apache.avro.generic.GenericRecord; -import org.radarbase.data.Record; -import org.radarbase.mock.config.MockDataConfig; -import org.radarbase.producer.rest.SchemaRetriever; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -/** - * CSV files must be validated before using since MockAggregator can handle only files containing - * unique User_ID and Source_ID and having increasing timestamp at each raw. - */ -public class MockRecordValidator { - private static final Logger logger = LoggerFactory.getLogger(MockRecordValidator.class); - private final MockDataConfig config; - private final long duration; - private final Path root; - private int timePos; - private double time; - private double startTime; - private final SchemaRetriever retriever; - - /** Create a new validator for given configuration. */ - public MockRecordValidator(MockDataConfig config, long duration, Path root, - SchemaRetriever retriever) { - this.config = config; - this.duration = duration; - this.root = root; - this.retriever = retriever; - this.time = Double.NaN; - this.startTime = Double.NaN; - } - - /** - * Verify whether the CSV file can be used or not. - * @throws IllegalArgumentException if the CSV file does not respect the constraints. - */ - public void validate() { - Instant now = Instant.now(); - try (MockCsvParser parser = new MockCsvParser(config, root, now, retriever)) { - if (!parser.hasNext()) { - throw new IllegalArgumentException("CSV file is empty"); - } - - Schema valueSchema = config.parseAvroTopic().getValueSchema(); - Field timeField = valueSchema.getField("timeReceived"); - if (timeField == null) { - timeField = valueSchema.getField("time"); - } - timePos = timeField.pos(); - - Record last = null; - long line = 1L; - - while (parser.hasNext()) { - Record record = parser.next(); - checkRecord(record, last, line++); - last = record; - } - - checkDuration(); - checkFrequency(line); - } catch (IOException | CsvValidationException e) { - error("Cannot open file", -1, e); - } - } - - private void checkFrequency(long line) { - long expected = config.getFrequency() * duration / 1000L + 1L; - if (line != config.getFrequency() * duration / 1000L + 1L) { - error("CSV contains fewer messages " + line + " than expected " + expected, -1L, null); - } - } - - private void checkRecord(Record record, - Record last, long line) { - double previousTime = time; - time = (Double) record.value.get(timePos); - - if (last == null) { - // no checks, only update initial time stamp - startTime = time; - } else if (!last.key.equals(record.key)) { - error("It is possible to test only one user/source at time.", line, null); - } else if (time < previousTime) { - error("Time must increase row by row.", line, null); - } - } - - private void error(String message, long line, Exception ex) { - StringBuilder messageBuilder = new StringBuilder(150); - messageBuilder - .append(config.getDataFile()) - .append(" with topic ") - .append(config.getTopic()) - .append(" is invalid"); - if (line > 0L) { - messageBuilder - .append(" on line ") - .append(line); - } - String fullMessage = messageBuilder - .append(". ") - .append(message) - .toString(); - logger.error(fullMessage); - throw new IllegalArgumentException(fullMessage, ex); - } - - private void checkDuration() { - long interval = (long)(time * 1000d) - (long)(startTime * 1000d); - - // add a margin of 50 for clock error purposes - long margin = 50L; - - if (duration <= interval - margin || duration > interval + 1000L + margin) { - error("Data does not cover " + duration + " milliseconds but " - + interval + " instead.", -1L, null); - } - } -} diff --git a/radar-commons-testing/src/main/java/org/radarbase/mock/data/MockRecordValidator.kt b/radar-commons-testing/src/main/java/org/radarbase/mock/data/MockRecordValidator.kt new file mode 100644 index 00000000..de15f9a9 --- /dev/null +++ b/radar-commons-testing/src/main/java/org/radarbase/mock/data/MockRecordValidator.kt @@ -0,0 +1,147 @@ +/* + * Copyright 2017 The Hyve and King's College London + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.radarbase.mock.data + +import com.opencsv.exceptions.CsvValidationException +import org.apache.avro.generic.GenericRecord +import org.apache.avro.specific.SpecificRecord +import org.radarbase.data.Record +import org.radarbase.mock.config.MockDataConfig +import org.radarbase.producer.schema.SchemaRetriever +import org.slf4j.LoggerFactory +import java.io.IOException +import java.nio.file.Path +import java.time.Instant + +/** + * CSV files must be validated before using since MockAggregator can handle only files containing + * unique User_ID and Source_ID and having increasing timestamp at each raw. + */ +class MockRecordValidator( + private val config: MockDataConfig, + private val duration: Long, + private val root: Path, + private val retriever: SchemaRetriever, +) { + private var timePos = 0 + private var time: Double + private var startTime: Double + + /** Create a new validator for given configuration. */ + init { + time = Double.NaN + startTime = Double.NaN + } + + /** + * Verify whether the CSV file can be used or not. + * @throws IllegalArgumentException if the CSV file does not respect the constraints. + */ + suspend fun validate() { + val now = Instant.now() + try { + MockCsvParser(config, root, now, retriever).use { parser -> + parser.initialize() + require(parser.hasNext()) { "CSV file is empty" } + val valueSchema = + config.parseAvroTopic().valueSchema + var timeField = valueSchema.getField("timeReceived") + if (timeField == null) { + timeField = valueSchema.getField("time") + } + timePos = timeField!!.pos() + var last: Record? = null + var line = 1L + while (parser.hasNext()) { + val record = parser.next() + checkRecord(record, last, line++) + last = record + } + checkDuration() + checkFrequency(line) + } + } catch (e: IOException) { + error("Cannot open file", -1, e) + } catch (e: CsvValidationException) { + error("Cannot open file", -1, e) + } + } + + private fun checkFrequency(line: Long) { + val expected = config.frequency * duration / 1000L + 1L + if (line != config.frequency * duration / 1000L + 1L) { + error("CSV contains fewer messages $line than expected $expected", -1L, null) + } + } + + private fun checkRecord( + record: Record, + last: Record?, + line: Long, + ) { + val previousTime = time + time = record.value[timePos] as Double + if (last == null) { + // no checks, only update initial time stamp + startTime = time + } else if (last.key != record.key) { + error("It is possible to test only one user/source at time.", line, null) + } else if (time < previousTime) { + error("Time must increase row by row.", line, null) + } + } + + private fun error(message: String, line: Long, ex: Exception?) { + val messageBuilder = StringBuilder(150) + messageBuilder + .append(config.dataFile) + .append(" with topic ") + .append(config.topic) + .append(" is invalid") + if (line > 0L) { + messageBuilder + .append(" on line ") + .append(line) + } + val fullMessage = messageBuilder + .append(". ") + .append(message) + .toString() + logger.error(fullMessage) + throw IllegalArgumentException(fullMessage, ex) + } + + private fun checkDuration() { + val interval = (time * 1000.0).toLong() - (startTime * 1000.0).toLong() + + // add a margin of 50 for clock error purposes + val margin = 50L + if (duration <= interval - margin || duration > interval + 1000L + margin) { + error( + "Data does not cover " + duration + " milliseconds but " + + interval + " instead.", + -1L, + null, + ) + } + } + + companion object { + private val logger = LoggerFactory.getLogger( + MockRecordValidator::class.java, + ) + } +} diff --git a/radar-commons-testing/src/main/java/org/radarbase/mock/data/RecordGenerator.java b/radar-commons-testing/src/main/java/org/radarbase/mock/data/RecordGenerator.java index 60f4cddf..2bdc0b72 100644 --- a/radar-commons-testing/src/main/java/org/radarbase/mock/data/RecordGenerator.java +++ b/radar-commons-testing/src/main/java/org/radarbase/mock/data/RecordGenerator.java @@ -191,15 +191,15 @@ public boolean hasNext() { public String[] next() { Record record = baseIterator.next(); - int keyFieldsSize = record.key.getSchema().getFields().size(); - int valueFieldsSize = record.value.getSchema().getFields().size(); + int keyFieldsSize = record.getKey().getSchema().getFields().size(); + int valueFieldsSize = record.getValue().getSchema().getFields().size(); String[] result = new String[keyFieldsSize + valueFieldsSize]; for (int i = 0; i < keyFieldsSize; i++) { - result[i] = record.key.get(i).toString(); + result[i] = record.getKey().get(i).toString(); } for (int i = 0; i < valueFieldsSize; i++) { - result[i + keyFieldsSize] = record.value.get(i).toString(); + result[i + keyFieldsSize] = record.getValue().get(i).toString(); } return result; } diff --git a/radar-commons-testing/src/main/java/org/radarbase/mock/model/ExpectedValue.java b/radar-commons-testing/src/main/java/org/radarbase/mock/model/ExpectedValue.java index dae02afc..233202b5 100644 --- a/radar-commons-testing/src/main/java/org/radarbase/mock/model/ExpectedValue.java +++ b/radar-commons-testing/src/main/java/org/radarbase/mock/model/ExpectedValue.java @@ -70,12 +70,12 @@ public void add(Record record) { if (timeReceivedPos == -1) { throw new IllegalStateException("Cannot parse record without a schema."); } - long timeMillis = (long) ((Double) record.value.get(timeReceivedPos) * 1000d); + long timeMillis = (long) ((Double) record.getValue().get(timeReceivedPos) * 1000d); if (timeMillis >= lastTimestamp + DURATION || lastCollector == null) { lastTimestamp = timeMillis - (timeMillis % DURATION); lastCollector = createCollector(); getSeries().put(lastTimestamp, lastCollector); } - lastCollector.add(record.value); + lastCollector.add(record.getValue()); } } diff --git a/radar-commons-testing/src/main/java/org/radarbase/mock/model/MockAggregator.java b/radar-commons-testing/src/main/java/org/radarbase/mock/model/MockAggregator.java index 933884b3..a4d6d27c 100644 --- a/radar-commons-testing/src/main/java/org/radarbase/mock/model/MockAggregator.java +++ b/radar-commons-testing/src/main/java/org/radarbase/mock/model/MockAggregator.java @@ -26,7 +26,7 @@ import org.apache.avro.Schema; import org.radarbase.mock.config.MockDataConfig; import org.radarbase.mock.data.MockCsvParser; -import org.radarbase.producer.rest.SchemaRetriever; +import org.radarbase.producer.schema.SchemaRetriever; import org.slf4j.Logger; import org.slf4j.LoggerFactory; diff --git a/radar-commons-testing/src/main/java/org/radarbase/util/Oscilloscope.java b/radar-commons-testing/src/main/java/org/radarbase/util/Oscilloscope.java deleted file mode 100644 index 28492cd2..00000000 --- a/radar-commons-testing/src/main/java/org/radarbase/util/Oscilloscope.java +++ /dev/null @@ -1,70 +0,0 @@ -/* - * Copyright 2017 The Hyve and King's College London - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.radarbase.util; - -/** - * Oscilloscope gives out a regular beat, at a given frequency per second. The intended way to use - * this is with a do-while loop, with the {@link #beat()} retrieved at the start of the loop, and - * {@link #willRestart()} in the condition of the loop. - */ -public class Oscilloscope { - private final int frequency; - private final long timeStep; - - private int iteration; - private long baseTime; - - /** - * Frequency which will give beat at given frequency per second. - * @param frequency number of beats per second. - */ - public Oscilloscope(int frequency) { - this.frequency = frequency; - this.baseTime = System.nanoTime(); - this.timeStep = 1_000_000_000L / this.frequency; - this.iteration = 0; - } - - /** Restart the oscilloscope at zero. */ - public void reset() { - this.baseTime = System.nanoTime(); - this.iteration = 0; - } - - /** Whether the next beat will restart at one. */ - public boolean willRestart() { - return iteration % frequency == 0; - } - - /** - * One oscilloscope beat, sleeping if necessary to not exceed the frequency per second. The beat - * number starts at one, increases to the frequency, and then goes to one again. - * @return one up to the given frequency - * @throws InterruptedException when the sleep was interrupted. - */ - public int beat() throws InterruptedException { - long currentTime = System.nanoTime(); - long nextSend = baseTime + iteration * timeStep; - if (currentTime < nextSend) { - long timeToSleep = nextSend - currentTime; - Thread.sleep(timeToSleep / 1_000_000L, ((int) timeToSleep) % 1_000_000); - } - int beatNumber = iteration % frequency + 1; - iteration++; - return beatNumber; - } -} diff --git a/radar-commons-testing/src/main/java/org/radarbase/util/Oscilloscope.kt b/radar-commons-testing/src/main/java/org/radarbase/util/Oscilloscope.kt new file mode 100644 index 00000000..0e4473fb --- /dev/null +++ b/radar-commons-testing/src/main/java/org/radarbase/util/Oscilloscope.kt @@ -0,0 +1,63 @@ +/* + * Copyright 2017 The Hyve and King's College London + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.radarbase.util + +import kotlinx.coroutines.delay +import org.slf4j.LoggerFactory +import java.util.concurrent.atomic.AtomicInteger +import kotlin.time.Duration +import kotlin.time.Duration.Companion.nanoseconds +import kotlin.time.Duration.Companion.seconds + +/** + * Oscilloscope gives out a regular beat, at a given frequency per second. The intended way to use + * this is with a do-while loop, with the [.beat] retrieved at the start of the loop, and + * [.willRestart] in the condition of the loop. + */ +class Oscilloscope( + private val frequency: Int, +) { + private val timeStep: Duration = 1.seconds / frequency + private val baseTime: Long = System.nanoTime() + private var iteration: AtomicInteger = AtomicInteger(0) + + /** Whether the next beat will restart at one. */ + fun willRestart(): Boolean { + return iteration.get() % frequency == 0 + } + + /** + * One oscilloscope beat, sleeping if necessary to not exceed the frequency per second. The beat + * number starts at one, increases to the frequency, and then goes to one again. + * @return one up to the given frequency + * @throws InterruptedException when the sleep was interrupted. + */ + @Throws(InterruptedException::class) + suspend fun beat(): Int { + val currentTime = System.nanoTime() + val currentIteration = iteration.getAndIncrement() + val timeToSleep = (baseTime - currentTime).nanoseconds + timeStep * currentIteration + if (timeToSleep > Duration.ZERO) { + logger.info("delaying {} millis", timeToSleep.inWholeMilliseconds) + delay(timeToSleep) + } + return currentIteration % frequency + 1 + } + + companion object { + private val logger = LoggerFactory.getLogger(Oscilloscope::class.java) + } +} diff --git a/radar-commons-testing/src/test/java/org/radarbase/mock/CsvGeneratorTest.java b/radar-commons-testing/src/test/java/org/radarbase/mock/CsvGeneratorTest.java index d0aaba2f..9420f175 100644 --- a/radar-commons-testing/src/test/java/org/radarbase/mock/CsvGeneratorTest.java +++ b/radar-commons-testing/src/test/java/org/radarbase/mock/CsvGeneratorTest.java @@ -16,9 +16,9 @@ package org.radarbase.mock; -import static org.junit.Assert.assertArrayEquals; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotEquals; +import static org.junit.jupiter.api.Assertions.assertArrayEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotEquals; import com.opencsv.CSVReader; import com.opencsv.exceptions.CsvValidationException; @@ -28,29 +28,25 @@ import java.nio.file.Path; import java.nio.file.Paths; import java.util.List; -import org.junit.Rule; -import org.junit.Test; -import org.junit.rules.TemporaryFolder; -import org.radarbase.mock.data.MockRecordValidatorTest; -import org.radarcns.kafka.ObservationKey; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.io.TempDir; import org.radarbase.mock.config.MockDataConfig; import org.radarbase.mock.data.CsvGenerator; +import org.radarbase.mock.data.MockRecordValidatorTest; import org.radarbase.mock.data.RecordGenerator; +import org.radarcns.kafka.ObservationKey; public class CsvGeneratorTest { - @Rule - public TemporaryFolder folder = new TemporaryFolder(); - - private MockDataConfig makeConfig() throws IOException { + private MockDataConfig makeConfig(Path folder) throws IOException { return MockRecordValidatorTest.makeConfig(folder); } @Test - public void generateMockConfig() throws IOException, CsvValidationException { + public void generateMockConfig(@TempDir Path folder) throws IOException, CsvValidationException { CsvGenerator generator = new CsvGenerator(); - MockDataConfig config = makeConfig(); - generator.generate(config, 100_000L, folder.getRoot().toPath()); + MockDataConfig config = makeConfig(folder); + generator.generate(config, 100_000L, folder.getRoot()); Path p = Paths.get(config.getDataFile()); try (Reader reader = Files.newBufferedReader(p); @@ -74,10 +70,10 @@ public void generateMockConfig() throws IOException, CsvValidationException { } @Test - public void generateGenerator() throws IOException, CsvValidationException { + public void generateGenerator(@TempDir Path folder) throws IOException, CsvValidationException { CsvGenerator generator = new CsvGenerator(); - MockDataConfig config = makeConfig(); + MockDataConfig config = makeConfig(folder); final String time = Double.toString(System.currentTimeMillis() / 1000d); diff --git a/radar-commons-testing/src/test/java/org/radarbase/mock/RecordGeneratorTest.java b/radar-commons-testing/src/test/java/org/radarbase/mock/RecordGeneratorTest.java index 783b39d4..d392586b 100644 --- a/radar-commons-testing/src/test/java/org/radarbase/mock/RecordGeneratorTest.java +++ b/radar-commons-testing/src/test/java/org/radarbase/mock/RecordGeneratorTest.java @@ -16,17 +16,17 @@ package org.radarbase.mock; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.util.Arrays; import java.util.Iterator; import org.apache.avro.specific.SpecificRecord; -import org.junit.Test; +import org.junit.jupiter.api.Test; import org.radarbase.data.Record; -import org.radarcns.kafka.ObservationKey; import org.radarbase.mock.config.MockDataConfig; import org.radarbase.mock.data.RecordGenerator; +import org.radarcns.kafka.ObservationKey; import org.radarcns.passive.empatica.EmpaticaE4Acceleration; /** @@ -35,7 +35,7 @@ public class RecordGeneratorTest { @Test - public void generate() throws Exception { + public void generate() { MockDataConfig config = new MockDataConfig(); config.setTopic("test"); config.setFrequency(10); @@ -49,23 +49,23 @@ public void generate() throws Exception { Iterator> iter = generator .iterateValues(new ObservationKey("test", "a", "b"), 0); Record record = iter.next(); - assertEquals(new ObservationKey("test", "a", "b"), record.key); - float x = ((EmpaticaE4Acceleration)record.value).getX(); + assertEquals(new ObservationKey("test", "a", "b"), record.getKey()); + float x = ((EmpaticaE4Acceleration)record.getValue()).getX(); assertTrue(x >= 0.1f && x < 9.9f); - float y = ((EmpaticaE4Acceleration)record.value).getX(); + float y = ((EmpaticaE4Acceleration)record.getValue()).getX(); assertTrue(y >= 0.1f && y < 9.9f); - float z = ((EmpaticaE4Acceleration)record.value).getX(); + float z = ((EmpaticaE4Acceleration)record.getValue()).getX(); assertTrue(z >= 0.1f && z < 9.9f); - double time = ((EmpaticaE4Acceleration)record.value).getTime(); + double time = ((EmpaticaE4Acceleration)record.getValue()).getTime(); assertTrue(time > System.currentTimeMillis() / 1000d - 1d && time <= System.currentTimeMillis() / 1000d); Record nextRecord = iter.next(); - assertEquals(time + 0.1d, (Double)nextRecord.value.get(0), 1e-6); + assertEquals(time + 0.1d, (Double)nextRecord.getValue().get(0), 1e-6); } @Test - public void getHeaders() throws Exception { + public void getHeaders() { MockDataConfig config = new MockDataConfig(); config.setTopic("test"); config.setValueSchema(EmpaticaE4Acceleration.class.getName()); diff --git a/radar-commons-testing/src/test/java/org/radarbase/mock/data/MockRecordValidatorTest.java b/radar-commons-testing/src/test/java/org/radarbase/mock/data/MockRecordValidatorTest.java deleted file mode 100644 index a3a3569e..00000000 --- a/radar-commons-testing/src/test/java/org/radarbase/mock/data/MockRecordValidatorTest.java +++ /dev/null @@ -1,224 +0,0 @@ -/* - * Copyright 2017 The Hyve and King's College London - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.radarbase.mock.data; - -import static org.junit.Assert.assertThrows; - -import java.io.IOException; -import java.io.Writer; -import java.nio.file.Files; -import java.nio.file.Path; -import java.util.Arrays; -import org.junit.Before; -import org.junit.Rule; -import org.junit.Test; -import org.junit.rules.TemporaryFolder; -import org.radarbase.mock.config.MockDataConfig; -import org.radarbase.producer.rest.SchemaRetriever; -import org.radarcns.monitor.application.ApplicationServerStatus; -import org.radarcns.kafka.ObservationKey; -import org.radarcns.passive.phone.PhoneAcceleration; -import org.radarcns.passive.phone.PhoneLight; - -public class MockRecordValidatorTest { - @Rule - public TemporaryFolder folder = new TemporaryFolder(); - - private Path root; - private SchemaRetriever retriever; - - private MockDataConfig makeConfig() throws IOException { - return makeConfig(folder); - } - - @Before - public void setUp() { - root = folder.getRoot().toPath(); - retriever = null; - } - - public static MockDataConfig makeConfig(TemporaryFolder folder) throws IOException { - MockDataConfig config = new MockDataConfig(); - config.setKeySchema(ObservationKey.class.getName()); - config.setDataFile(folder.newFile().getAbsolutePath()); - config.setValueSchema(PhoneLight.class.getName()); - config.setValueField("light"); - config.setTopic("test"); - return config; - } - - @Test - public void validateEnum() throws IOException { - MockDataConfig config = makeConfig(); - config.setValueSchema(ApplicationServerStatus.class.getName()); - - try (Writer writer = Files.newBufferedWriter(config.getDataFile(root))) { - writer.append("key.projectId,key.userId,key.sourceId,value.time,value.serverStatus,value.ipAddress\n"); - writer.append("test,a,b,1,UNKNOWN,\n"); - writer.append("test,a,b,2,CONNECTED,\n"); - } - - new MockRecordValidator(config, 2_000L, root, retriever).validate(); - } - - - @Test - public void validateEnumGenerated() throws IOException { - MockDataConfig config = makeConfig(); - config.setValueSchema(ApplicationServerStatus.class.getName()); - config.setValueField("serverStatus"); - CsvGenerator generator = new CsvGenerator(); - generator.generate(config, 2_000L, root); - new MockRecordValidator(config, 2_000L, root, retriever).validate(); - } - - @Test - public void validate() throws Exception { - CsvGenerator generator = new CsvGenerator(); - - MockDataConfig config = makeConfig(); - generator.generate(config, 100_000L, root); - - new MockRecordValidator(config, 100_000L, root, retriever).validate(); - } - - @Test - public void validateWrongDuration() throws Exception { - CsvGenerator generator = new CsvGenerator(); - - MockDataConfig config = makeConfig(); - generator.generate(config, 100_000L, root); - - assertValidateThrows(IllegalArgumentException.class, config); - } - - @Test - public void validateCustom() throws Exception { - MockDataConfig config = makeConfig(); - - try (Writer writer = Files.newBufferedWriter(config.getDataFile(root))) { - writer.append("key.projectId,key.userId,key.sourceId,value.time,value.timeReceived,value.light\n"); - writer.append("test,a,b,1,1,1\n"); - writer.append("test,a,b,1,2,1\n"); - } - - assertValidate(config); - } - - @Test - public void validateWrongKey() throws Exception { - MockDataConfig config = makeConfig(); - - try (Writer writer = Files.newBufferedWriter(config.getDataFile(root))) { - writer.append("key.projectId,key.userId,key.sourceId,value.time,value.timeReceived,value.light\n"); - writer.append("test,a,b,1,1,1\n"); - writer.append("test,a,c,1,2,1\n"); - } - - assertValidateThrows(IllegalArgumentException.class, config); - } - - @Test - public void validateWrongTime() throws Exception { - MockDataConfig config = makeConfig(); - - try (Writer writer = Files.newBufferedWriter(config.getDataFile(root))) { - writer.append("key.projectId,key.userId,key.sourceId,value.time,value.timeReceived,value.light\n"); - writer.append("test,a,b,1,1,1\n"); - writer.append("test,a,b,1,0,1\n"); - } - - assertValidateThrows(IllegalArgumentException.class, config); - } - - - @Test - public void validateMissingKeyField() throws Exception { - MockDataConfig config = makeConfig(); - - try (Writer writer = Files.newBufferedWriter(config.getDataFile(root))) { - writer.append("key.projectId,key.userId,key.sourceId,value.time,value.timeReceived,value.light\n"); - writer.append("test,a,1,1,1\n"); - writer.append("test,a,1,2,1\n"); - } - - assertValidateThrows(IllegalArgumentException.class, config); - } - - @Test - public void validateMissingValueField() throws Exception { - MockDataConfig config = makeConfig(); - - try (Writer writer = Files.newBufferedWriter(config.getDataFile(root))) { - writer.append("key.projectId,key.userId,key.sourceId,value.time,value.timeReceived,value.light\n"); - writer.append("test,a,b,1,1\n"); - writer.append("test,a,b,1,2\n"); - } - - assertValidateThrows(IllegalArgumentException.class, config); - } - - @Test - public void validateMissingValue() throws Exception { - MockDataConfig config = makeConfig(); - - try (Writer writer = Files.newBufferedWriter(config.getDataFile(root))) { - writer.append("key.projectId,key.userId,key.sourceId,value.time,value.timeReceived,value.light\n"); - writer.append("test,a,b,1,1\n"); - writer.append("test,a,b,1,2,1\n"); - } - - assertValidateThrows(IllegalArgumentException.class, config); - } - - @Test - public void validateWrongValueType() throws Exception { - MockDataConfig config = makeConfig(); - - try (Writer writer = Files.newBufferedWriter(config.getDataFile(root))) { - writer.append("key.projectId,key.userId,key.sourceId,value.time,value.timeReceived,value.light\n"); - writer.append("test,a,b,1,1,a\n"); - writer.append("test,a,b,1,2,b\n"); - } - - assertValidateThrows(NumberFormatException.class, config); - } - - @Test - public void validateMultipleFields() throws Exception { - MockDataConfig config = makeConfig(); - config.setValueSchema(PhoneAcceleration.class.getName()); - config.setValueFields(Arrays.asList("x", "y", "z")); - - try (Writer writer = Files.newBufferedWriter(config.getDataFile(root))) { - writer.append("key.projectId,key.userId,key.sourceId,value.time,value.timeReceived,value.x,value.y,value.z\n"); - writer.append("test,a,b,1,1,1,1,1\n"); - writer.append("test,a,b,1,2,1,1,1\n"); - } - - assertValidate(config); - } - - private void assertValidateThrows(Class ex, MockDataConfig config) { - MockRecordValidator validator = new MockRecordValidator(config, 2_000L, root, retriever); - assertThrows(ex, validator::validate); - } - - private void assertValidate(MockDataConfig config) { - new MockRecordValidator(config, 2_000L, root, retriever).validate(); - } -} diff --git a/radar-commons-testing/src/test/java/org/radarbase/mock/data/MockRecordValidatorTest.kt b/radar-commons-testing/src/test/java/org/radarbase/mock/data/MockRecordValidatorTest.kt new file mode 100644 index 00000000..ce7b1cfa --- /dev/null +++ b/radar-commons-testing/src/test/java/org/radarbase/mock/data/MockRecordValidatorTest.kt @@ -0,0 +1,229 @@ +/* + * Copyright 2017 The Hyve and King's College London + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.radarbase.mock.data + +import kotlinx.coroutines.Dispatchers +import kotlinx.coroutines.ExperimentalCoroutinesApi +import kotlinx.coroutines.test.runTest +import kotlinx.coroutines.withContext +import org.junit.jupiter.api.BeforeEach +import org.junit.jupiter.api.Test +import org.junit.jupiter.api.io.TempDir +import org.mockito.Mockito.mock +import org.radarbase.mock.config.MockDataConfig +import org.radarbase.producer.schema.SchemaRetriever +import org.radarcns.kafka.ObservationKey +import org.radarcns.monitor.application.ApplicationServerStatus +import org.radarcns.passive.phone.PhoneAcceleration +import org.radarcns.passive.phone.PhoneLight +import java.io.IOException +import java.io.Writer +import java.nio.file.Files +import java.nio.file.Path +import java.util.* +import kotlin.io.path.bufferedWriter + +@OptIn(ExperimentalCoroutinesApi::class) +class MockRecordValidatorTest { + @TempDir + lateinit var folder: Path + private lateinit var root: Path + private lateinit var retriever: SchemaRetriever + + @Throws(IOException::class) + private fun makeConfig(): MockDataConfig { + return makeConfig(folder) + } + + @BeforeEach + fun setUp(@TempDir folder: Path) { + root = folder.root + retriever = mock(SchemaRetriever::class.java) + } + + @Test + @Throws(IOException::class) + fun validateEnum() = runTest { + val config = makeConfig() + config.valueSchema = ApplicationServerStatus::class.java.name + withContext(Dispatchers.IO) { + config.getDataFile(root).bufferedWriter().use { writer -> + writer.append("key.projectId,key.userId,key.sourceId,value.time,value.serverStatus,value.ipAddress\n") + writer.append("test,a,b,1,UNKNOWN,\n") + writer.append("test,a,b,2,CONNECTED,\n") + } + } + MockRecordValidator(config, 2000L, root, retriever).validate() + } + + @Test + @Throws(IOException::class) + fun validateEnumGenerated() = runTest { + val config = makeConfig() + config.valueSchema = ApplicationServerStatus::class.java.name + config.setValueField("serverStatus") + val generator = CsvGenerator() + generator.generate(config, 2000L, root) + MockRecordValidator(config, 2000L, root, retriever).validate() + } + + @Test + @Throws(Exception::class) + fun validate() = runTest { + val generator = CsvGenerator() + val config = makeConfig() + generator.generate(config, 100000L, root) + MockRecordValidator(config, 100000L, root, retriever).validate() + } + + @Test + @Throws(Exception::class) + fun validateWrongDuration() = runTest { + val generator = CsvGenerator() + val config = makeConfig() + generator.generate(config, 100000L, root) + assertValidateThrows(config) + } + + @Test + @Throws(Exception::class) + fun validateCustom() = runTest { + val config = writeConfig { + append("key.projectId,key.userId,key.sourceId,value.time,value.timeReceived,value.light\n") + append("test,a,b,1,1,1\n") + append("test,a,b,1,2,1\n") + } + assertValidate(config) + } + + @Test + @Throws(Exception::class) + fun validateWrongKey() = runTest { + val config = writeConfig { + append("key.projectId,key.userId,key.sourceId,value.time,value.timeReceived,value.light\n") + append("test,a,b,1,1,1\n") + append("test,a,c,1,2,1\n") + } + assertValidateThrows(config) + } + + @Test + @Throws(Exception::class) + fun validateWrongTime() = runTest { + val config = writeConfig { + append("key.projectId,key.userId,key.sourceId,value.time,value.timeReceived,value.light\n") + append("test,a,b,1,1,1\n") + append("test,a,b,1,0,1\n") + } + assertValidateThrows(config) + } + + @Test + @Throws(Exception::class) + fun validateMissingKeyField() = runTest { + val config = writeConfig { + append("key.projectId,key.userId,key.sourceId,value.time,value.timeReceived,value.light\n") + append("test,a,1,1,1\n") + append("test,a,1,2,1\n") + } + assertValidateThrows(config) + } + + @Test + @Throws(Exception::class) + fun validateMissingValueField() = runTest { + val config = writeConfig { + append("key.projectId,key.userId,key.sourceId,value.time,value.timeReceived,value.light\n") + append("test,a,b,1,1\n") + append("test,a,b,1,2\n") + } + assertValidateThrows(config) + } + + @Test + @Throws(Exception::class) + fun validateMissingValue() = runTest { + val config = writeConfig { + append("key.projectId,key.userId,key.sourceId,value.time,value.timeReceived,value.light\n") + append("test,a,b,1,1\n") + append("test,a,b,1,2,1\n") + } + assertValidateThrows(config) + } + + @Test + @Throws(Exception::class) + fun validateWrongValueType() = runTest { + val config = writeConfig { + append("key.projectId,key.userId,key.sourceId,value.time,value.timeReceived,value.light\n") + append("test,a,b,1,1,a\n") + append("test,a,b,1,2,b\n") + } + assertValidateThrows(config) + } + + @Test + @Throws(Exception::class) + fun validateMultipleFields() = runTest { + val config = writeConfig { + append("key.projectId,key.userId,key.sourceId,value.time,value.timeReceived,value.x,value.y,value.z\n") + append("test,a,b,1,1,1,1,1\n") + append("test,a,b,1,2,1,1,1\n") + } + config.valueSchema = PhoneAcceleration::class.java.name + config.valueFields = listOf("x", "y", "z") + assertValidate(config) + } + + private suspend inline fun assertValidateThrows(config: MockDataConfig) { + val validator = MockRecordValidator(config, 2000L, root, retriever) + try { + validator.validate() + throw AssertionError("No exception thrown (expected ${T::class.java})") + } catch (ex: Throwable) { + if (!ex.javaClass.isAssignableFrom(T::class.java)) { + throw AssertionError("Another exception than ${T::class.java} thrown", ex) + } + } + } + + private suspend fun writeConfig(write: Writer.() -> Unit): MockDataConfig { + val config = makeConfig() + withContext(Dispatchers.IO) { + config.getDataFile(root).bufferedWriter().use(write) + } + return config + } + + private suspend fun assertValidate(config: MockDataConfig) { + MockRecordValidator(config, 2000L, root, retriever).validate() + } + + companion object { + @JvmStatic + @Throws(IOException::class) + fun makeConfig(folder: Path): MockDataConfig { + val config = MockDataConfig() + val dataFile = Files.createTempFile(folder, "datafile", ".csv") + config.keySchema = ObservationKey::class.java.name + config.dataFile = dataFile.toAbsolutePath().toString() + config.valueSchema = PhoneLight::class.java.name + config.setValueField("light") + config.topic = "test" + return config + } + } +} diff --git a/radar-commons-testing/src/test/java/org/radarbase/util/MetronomeTest.java b/radar-commons-testing/src/test/java/org/radarbase/util/MetronomeTest.java index 1a5f3343..024dbebe 100644 --- a/radar-commons-testing/src/test/java/org/radarbase/util/MetronomeTest.java +++ b/radar-commons-testing/src/test/java/org/radarbase/util/MetronomeTest.java @@ -20,11 +20,9 @@ import static org.hamcrest.Matchers.greaterThanOrEqualTo; import static org.hamcrest.Matchers.lessThan; import static org.hamcrest.core.Is.is; -import static org.junit.Assert.assertThrows; +import static org.junit.jupiter.api.Assertions.assertThrows; -import org.junit.Rule; -import org.junit.Test; -import org.junit.rules.ExpectedException; +import org.junit.jupiter.api.Test; public class MetronomeTest { private void check(Metronome it, long expectedMin) { diff --git a/radar-commons-testing/src/test/java/org/radarbase/util/OscilloscopeTest.java b/radar-commons-testing/src/test/java/org/radarbase/util/OscilloscopeTest.java deleted file mode 100644 index 3941f7de..00000000 --- a/radar-commons-testing/src/test/java/org/radarbase/util/OscilloscopeTest.java +++ /dev/null @@ -1,56 +0,0 @@ -/* - * Copyright 2017 The Hyve and King's College London - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.radarbase.util; - -import static org.hamcrest.MatcherAssert.assertThat; -import static org.hamcrest.Matchers.greaterThanOrEqualTo; -import static org.hamcrest.Matchers.lessThan; -import static org.hamcrest.Matchers.lessThanOrEqualTo; -import static org.hamcrest.core.Is.is; - -import org.junit.Test; - -public class OscilloscopeTest { - @Test - public void beat() throws Exception { - Oscilloscope oscilloscope = new Oscilloscope(128); - - long time = System.currentTimeMillis(); - int iteration = 1; - do { - int beat = oscilloscope.beat(); - assertThat(beat, is(iteration++)); - if (beat == 2) { - // time of one beat is about 1/128 seconds = 7.8125 milliseconds - long beatDiff = System.currentTimeMillis() - time; - assertThat(beatDiff, greaterThanOrEqualTo(7L)); - assertThat(beatDiff, lessThanOrEqualTo(14L)); - } - } while (!oscilloscope.willRestart()); - - // frequency must match - assertThat(iteration, is(129)); - // restarts every frequency, the willRestart function does not reset - assertThat(oscilloscope.willRestart(), is(true)); - // beat starts at 1 again - assertThat(oscilloscope.beat(), is(1)); - // total time, from one cycle to the next, is about 1 second - long cycleDiff = System.currentTimeMillis() - time; - assertThat(cycleDiff, greaterThanOrEqualTo(996L)); - assertThat(cycleDiff, lessThan(1020L)); - } -} diff --git a/radar-commons-testing/src/test/java/org/radarbase/util/OscilloscopeTest.kt b/radar-commons-testing/src/test/java/org/radarbase/util/OscilloscopeTest.kt new file mode 100644 index 00000000..6272b22a --- /dev/null +++ b/radar-commons-testing/src/test/java/org/radarbase/util/OscilloscopeTest.kt @@ -0,0 +1,52 @@ +/* + * Copyright 2017 The Hyve and King's College London + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.radarbase.util + +import kotlinx.coroutines.runBlocking +import org.hamcrest.MatcherAssert.assertThat +import org.hamcrest.Matchers.* +import org.junit.jupiter.api.Test + +class OscilloscopeTest { + @Test + @Throws(Exception::class) + fun beat() = runBlocking { + val oscilloscope = Oscilloscope(128) + val time = System.currentTimeMillis() + var iteration = 1 + do { + val beat = oscilloscope.beat() + assertThat(beat, `is`(iteration++)) + if (beat == 2) { + // time of one beat is about 1/128 seconds = 7.8125 milliseconds + val beatDiff = System.currentTimeMillis() - time + assertThat(beatDiff, greaterThanOrEqualTo(5L)) + assertThat(beatDiff, lessThanOrEqualTo(14L)) + } + } while (!oscilloscope.willRestart()) + + // frequency must match + assertThat(iteration, `is`(129)) + // restarts every frequency, the willRestart function does not reset + assertThat(oscilloscope.willRestart(), `is`(true)) + // beat starts at 1 again + assertThat(oscilloscope.beat(), `is`(1)) + // total time, from one cycle to the next, is about 1 second + val cycleDiff = System.currentTimeMillis() - time + assertThat(cycleDiff, greaterThanOrEqualTo(996L)) + assertThat(cycleDiff, lessThan(1020L)) + } +} diff --git a/radar-commons/build.gradle b/radar-commons/build.gradle deleted file mode 100644 index 4249a990..00000000 --- a/radar-commons/build.gradle +++ /dev/null @@ -1,39 +0,0 @@ -description = 'RADAR Common utilities library.' - -//---------------------------------------------------------------------------// -// Sources and classpath configurations // -//---------------------------------------------------------------------------// - -configurations { - implementation { - resolutionStrategy.cacheChangingModulesFor 0, 'SECONDS' - } -} - -// In this section you declare where to find the dependencies of your project -repositories { - maven { url 'https://jitpack.io' } -} - -// In this section you declare the dependencies for your production and test code -dependencies { - api (group: 'org.apache.avro', name: 'avro', version: avroVersion) - - // to implement producers and consumers - api group: 'com.squareup.okhttp3', name: 'okhttp', version: okhttpVersion - api group: 'org.json', name: 'json', version: orgJsonVersion - - // The production code uses the SLF4J logging API at compile time - implementation group: 'org.slf4j', name:'slf4j-api', version: slf4jVersion - - testImplementation(platform("com.fasterxml.jackson:jackson-bom:$jacksonVersion")) - testImplementation group: 'com.fasterxml.jackson.core' , name: 'jackson-databind' - testImplementation group: 'org.radarbase', name: 'radar-schemas-commons', version: radarSchemasVersion - testImplementation group: 'junit', name: 'junit', version: junitVersion - testImplementation group: 'org.mockito', name: 'mockito-core', version: mockitoVersion - testImplementation group: 'com.squareup.okhttp3', name: 'mockwebserver', version: okhttpVersion - testRuntimeOnly group: 'org.slf4j', name: 'slf4j-simple', version: slf4jVersion -} - -apply from: '../gradle/publishing.gradle' -apply from: '../gradle/codestyle.gradle' diff --git a/radar-commons/build.gradle.kts b/radar-commons/build.gradle.kts new file mode 100644 index 00000000..02d3db94 --- /dev/null +++ b/radar-commons/build.gradle.kts @@ -0,0 +1,38 @@ +plugins { + kotlin("plugin.serialization") +} + +description = "RADAR Common utilities library." + +// ---------------------------------------------------------------------------// +// Sources and classpath configurations // +// ---------------------------------------------------------------------------// + +// In this section you declare where to find the dependencies of your project +repositories { + maven(url = "https://jitpack.io") +} + +// In this section you declare the dependencies for your production and test code +dependencies { + api("org.apache.avro:avro:${Versions.avro}") + api(kotlin("reflect")) + + implementation(project(":radar-commons-kotlin")) + + api(platform("io.ktor:ktor-bom:${Versions.ktor}")) + api("io.ktor:ktor-client-core") + api("io.ktor:ktor-client-cio") + api("io.ktor:ktor-client-auth") + implementation("io.ktor:ktor-client-content-negotiation") + implementation("io.ktor:ktor-serialization-kotlinx-json") + + api("org.jetbrains.kotlinx:kotlinx-coroutines-core:${Versions.coroutines}") + + testImplementation(platform("com.fasterxml.jackson:jackson-bom:${Versions.jackson}")) + testImplementation("com.fasterxml.jackson.core:jackson-databind") + testImplementation("org.radarbase:radar-schemas-commons:${Versions.radarSchemas}") + testImplementation("org.mockito:mockito-core:${Versions.mockito}") + testImplementation("org.mockito.kotlin:mockito-kotlin:${Versions.mockitoKotlin}") + testImplementation("com.squareup.okhttp3:mockwebserver:${Versions.okhttp}") +} diff --git a/radar-commons/src/main/java/org/radarbase/config/ServerConfig.java b/radar-commons/src/main/java/org/radarbase/config/ServerConfig.java deleted file mode 100644 index 2050b376..00000000 --- a/radar-commons/src/main/java/org/radarbase/config/ServerConfig.java +++ /dev/null @@ -1,281 +0,0 @@ -/* - * Copyright 2017 The Hyve and King's College London - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.radarbase.config; - -import java.net.InetSocketAddress; -import java.net.MalformedURLException; -import java.net.Proxy; -import java.net.Proxy.Type; -import java.net.URL; -import java.util.List; -import java.util.Objects; -import java.util.regex.Matcher; -import java.util.regex.Pattern; -import okhttp3.HttpUrl; - -/** - * POJO representing a ServerConfig configuration. - */ -@SuppressWarnings("PMD.GodClass") -public class ServerConfig { - private static final Pattern URL_PATTERN = Pattern.compile( - "(?:(?\\w+)://)?(?[^:/]+)(?::(?\\d+))?(?/.*)?"); - - private String host; - private int port = -1; - private String protocol; - private String path = null; - private String proxyHost; - private int proxyPort = -1; - private boolean unsafe = false; - - /** Pojo initializer. */ - public ServerConfig() { - // POJO initializer - } - - /** Parses the config from a URL. */ - public ServerConfig(URL url) { - host = url.getHost(); - port = url.getPort(); - protocol = url.getProtocol(); - setPath(url.getFile()); - } - - /** Parses the config from a URL string. */ - public ServerConfig(String urlString) throws MalformedURLException { - Matcher matcher = URL_PATTERN.matcher(urlString); - if (!matcher.matches()) { - throw new MalformedURLException("Cannot create URL from string " + urlString); - } - protocol = matcher.group("protocol"); - host = matcher.group("host"); - String portString = matcher.group("port"); - if (portString != null && !portString.isEmpty()) { - port = Integer.parseInt(portString); - } - setPath(matcher.group("path")); - } - - /** Get the path of the server as a string. This does not include proxyHost information. */ - public String getUrlString() { - StringBuilder builder = new StringBuilder(host.length() - + (path != null ? path.length() : 0) + 20); - appendUrlString(builder); - return builder.toString(); - } - - /** Get the path of the server as a string. This does not include proxyHost information. */ - private void appendUrlString(StringBuilder builder) { - if (protocol != null) { - builder.append(protocol).append("://"); - } - builder.append(host); - if (port != -1) { - builder.append(':').append(port); - } - if (path != null) { - builder.append(path); - } - } - - /** Get the paths of a list of servers, concatenated with commas. */ - public static String getPaths(List configList) { - StringBuilder builder = new StringBuilder(configList.size() * 40); - boolean first = true; - for (ServerConfig server : configList) { - if (first) { - first = false; - } else { - builder.append(','); - } - server.appendUrlString(builder); - } - return builder.toString(); - } - - /** - * Get the server as a URL. - * - * @return URL to the server. - * @throws IllegalStateException if the URL is invalid - */ - public URL getUrl() { - if (protocol == null || host == null) { - throw new IllegalStateException("Cannot create URL without protocol and host"); - } - try { - return new URL(protocol, host, port, path == null ? "" : path); - } catch (MalformedURLException ex) { - throw new IllegalStateException("Already parsed a URL but it turned out invalid", ex); - } - } - - /** - * Get the server as an HttpUrl. - * @return HttpUrl to the server - * @throws IllegalStateException if the URL is invalid - */ - public HttpUrl getHttpUrl() { - if (protocol == null) { - protocol = "http"; - } - return HttpUrl.get(getUrlString()); - } - - /** - * Get the HTTP proxyHost associated to given server. - * @return http proxyHost if specified, or null if none is specified. - * @throws IllegalStateException if proxyHost is set but proxyPort is not or if the server - * protocol is not HTTP(s) - */ - public Proxy getHttpProxy() { - if (proxyHost == null) { - return null; - } else if (proxyPort == -1) { - throw new IllegalStateException("proxy_port is not specified for server " - + getUrlString() + " with proxyHost"); - } - if (protocol != null - && !protocol.equalsIgnoreCase("http") - && !protocol.equalsIgnoreCase("https")) { - throw new IllegalStateException( - "Server is not an HTTP(S) server, so it cannot use a HTTP proxyHost."); - } - return new Proxy(Type.HTTP, new InetSocketAddress(proxyHost, proxyPort)); - } - - /** Server host name or IP address. */ - public String getHost() { - return host; - } - - /** Set server host name or IP address. */ - public void setHost(String host) { - this.host = host; - } - - /** Server port. Defaults to -1. */ - public int getPort() { - return port; - } - - /** Set server port. */ - public void setPort(int port) { - this.port = port; - } - - /** Server protocol. */ - public String getProtocol() { - return protocol; - } - - /** Set server protocol. */ - public void setProtocol(String protocol) { - this.protocol = protocol; - } - - /** Proxy host name. Null if not set. */ - public String getProxyHost() { - return proxyHost; - } - - /** Set proxyHost host name. */ - public void setProxyHost(String proxyHost) { - this.proxyHost = proxyHost; - } - - /** Proxy port. Defaults to -1. */ - public int getProxyPort() { - return proxyPort; - } - - /** Set proxyHost port. */ - public void setProxyPort(int proxyPort) { - this.proxyPort = proxyPort; - } - - public String getPath() { - return path; - } - - /** - * Set the absolute path. If the path is empty, it will be set to the root. The path - * will be ended with a single slash. The path will be prepended with a single slash if needed. - * @param path path string - * @throws IllegalArgumentException if the path contains a question mark. - */ - public final void setPath(String path) { - this.path = cleanPath(path); - } - - @SuppressWarnings("PMD.UseStringBufferForStringAppends") - private static String cleanPath(String path) { - if (path == null) { - return null; - } - if (path.contains("?") || path.contains("#")) { - throw new IllegalArgumentException("Cannot set server path with query string"); - } - String newPath = path.trim(); - if (newPath.isEmpty()) { - return "/"; - } - if (newPath.charAt(0) != '/') { - newPath = '/' + newPath; - } - if (newPath.charAt(newPath.length() - 1) != '/') { - newPath += '/'; - } - return newPath; - } - - @Override - public String toString() { - return getUrlString(); - } - - @Override - public boolean equals(Object other) { - if (this == other) { - return true; - } - if (other == null || getClass() != other.getClass()) { - return false; - } - ServerConfig otherConfig = (ServerConfig) other; - return Objects.equals(host, otherConfig.host) - && port == otherConfig.port - && unsafe == otherConfig.unsafe - && Objects.equals(protocol, otherConfig.protocol) - && Objects.equals(proxyHost, otherConfig.proxyHost) - && proxyPort == otherConfig.proxyPort; - } - - @Override - public int hashCode() { - return Objects.hash(protocol, host, port); - } - - public boolean isUnsafe() { - return unsafe; - } - - public void setUnsafe(boolean unsafe) { - this.unsafe = unsafe; - } -} diff --git a/radar-commons/src/main/java/org/radarbase/config/ServerConfig.kt b/radar-commons/src/main/java/org/radarbase/config/ServerConfig.kt new file mode 100644 index 00000000..43421590 --- /dev/null +++ b/radar-commons/src/main/java/org/radarbase/config/ServerConfig.kt @@ -0,0 +1,192 @@ +/* + * Copyright 2017 The Hyve and King's College London + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.radarbase.config + +import java.net.InetSocketAddress +import java.net.MalformedURLException +import java.net.Proxy +import java.net.URL +import java.util.* + +/** + * POJO representing a ServerConfig configuration. + */ +class ServerConfig { + /** Server host name or IP address. */ + /** Set server host name or IP address. */ + var host: String? = null + /** Server port. Defaults to -1. */ + /** Set server port. */ + var port = -1 + /** Server protocol. */ + /** Set server protocol. */ + var protocol: String? = null + + /** + * Set the absolute path. If the path is empty, it will be set to the root. The path + * will be ended with a single slash. The path will be prepended with a single slash if needed. + * @throws IllegalArgumentException if the path contains a question mark. + */ + var path: String = "" + set(value) { + field = value.toUrlPath() + } + /** Proxy host name. Null if not set. */ + /** Set proxyHost host name. */ + var proxyHost: String? = null + /** Proxy port. Defaults to -1. */ + /** Set proxyHost port. */ + var proxyPort = -1 + var isUnsafe = false + + /** Pojo initializer. */ + constructor() { + // POJO initializer + } + + /** Parses the config from a URL. */ + constructor(url: URL) { + host = url.host + port = url.port + protocol = url.protocol + path = url.file + } + + /** Parses the config from a URL string. */ + constructor(urlString: String) { + val matcher = URL_PATTERN.matchEntire(urlString) + ?: throw MalformedURLException("Cannot create URL from string $this") + val groups = matcher.groups + protocol = groups[1]?.value ?: "https" + host = requireNotNull(groups[2]?.value) { "Cannot create URL without host name from $this" } + port = groups[3]?.value?.toIntOrNull() ?: -1 + path = groups[4]?.value.toUrlPath() + } + + /** Get the path of the server as a string. This does not include proxyHost information. */ + val urlString: String + get() = buildString(host!!.length + path.length + 20) { + appendUrlString(this) + } + + /** Get the path of the server as a string. This does not include proxyHost information. */ + private fun appendUrlString(builder: StringBuilder) = builder.run { + if (protocol != null) { + append(protocol) + append("://") + } + append(host) + if (port != -1) { + append(':') + append(port) + } + append(path) + } + + /** + * Get the server as a URL. + * + * @return URL to the server. + * @throws IllegalStateException if the URL is invalid + */ + val url: URL + get() { + checkNotNull(protocol) { "Cannot create URL without protocol" } + checkNotNull(host) { "Cannot create URL without host" } + return try { + URL(protocol, host, port, path) + } catch (ex: MalformedURLException) { + throw IllegalStateException("Already parsed a URL but it turned out invalid", ex) + } + } + + /** + * Get the HTTP proxyHost associated to given server. + * @return http proxyHost if specified, or null if none is specified. + * @throws IllegalStateException if proxyHost is set but proxyPort is not or if the server + * protocol is not HTTP(s) + */ + val httpProxy: Proxy? + get() { + proxyHost ?: return null + check(proxyPort != -1) { "proxy_port is not specified for server $urlString with proxyHost" } + + check( + protocol == null || + protocol.equals("http", ignoreCase = true) || + protocol.equals("https", ignoreCase = true), + ) { "Server is not an HTTP(S) server, so it cannot use a HTTP proxyHost." } + return Proxy(Proxy.Type.HTTP, InetSocketAddress(proxyHost, proxyPort)) + } + + override fun toString(): String = urlString + + override fun equals(other: Any?): Boolean { + if (this === other) { + return true + } + if (other == null || javaClass != other.javaClass) { + return false + } + other as ServerConfig + return host == other.host && + port == other.port && + isUnsafe == other.isUnsafe && + protocol == other.protocol && + proxyHost == other.proxyHost && + proxyPort == other.proxyPort + } + + override fun hashCode(): Int { + return Objects.hash(host, path) + } + + companion object { + private val URL_PATTERN = "(?:(\\w+)://)?([^:/]+)(?::(\\d+))?(/.*)?".toRegex() + private val BAD_SLASHES_REGEX = "/(\\.*/)+".toRegex() + + /** Get the paths of a list of servers, concatenated with commas. */ + @JvmStatic + fun getPaths(configList: List): String = buildString(configList.size * 40) { + var first = true + for (server in configList) { + if (first) { + first = false + } else { + append(',') + } + server.appendUrlString(this) + } + } + + private fun String?.toUrlPath(): String { + this ?: return "" + require(!contains("?")) { "Cannot set server path with query string" } + require(!contains("#")) { "Cannot set server path with location string" } + var newPath = trim { it <= ' ' } + if (newPath.isEmpty()) { + return "/" + } + if (newPath.first() != '/') { + newPath = "/$newPath" + } + if (newPath.last() != '/') { + newPath += '/' + } + return newPath.replace(BAD_SLASHES_REGEX, "/") + } + } +} diff --git a/radar-commons/src/main/java/org/radarbase/data/AvroDatumDecoder.java b/radar-commons/src/main/java/org/radarbase/data/AvroDatumDecoder.java deleted file mode 100644 index 21417ec1..00000000 --- a/radar-commons/src/main/java/org/radarbase/data/AvroDatumDecoder.java +++ /dev/null @@ -1,82 +0,0 @@ -/* - * Copyright 2017 The Hyve and King's College London - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.radarbase.data; - -import java.io.ByteArrayInputStream; -import java.io.IOException; -import org.apache.avro.Schema; -import org.apache.avro.generic.GenericData; -import org.apache.avro.io.BinaryDecoder; -import org.apache.avro.io.DatumReader; -import org.apache.avro.io.Decoder; -import org.apache.avro.io.DecoderFactory; - -/** An AvroDecoder to decode known SpecificRecord classes. */ -public class AvroDatumDecoder implements AvroDecoder { - private final DecoderFactory decoderFactory; - private final boolean binary; - private final GenericData genericData; - - /** - * Decoder for Avro data. - * @param genericData instance of GenericData or SpecificData that should implement - * {@link GenericData#createDatumReader(Schema)}. - * @param binary true if the read data has Avro binary encoding, false if it has Avro JSON - * encoding. - */ - public AvroDatumDecoder(GenericData genericData, boolean binary) { - this.genericData = genericData; - this.decoderFactory = DecoderFactory.get(); - this.binary = binary; - } - - @Override - public AvroReader reader(Schema schema, Class clazz) { - @SuppressWarnings("unchecked") - DatumReader reader = genericData.createDatumReader(schema); - return new AvroRecordReader<>(schema, reader); - } - - private class AvroRecordReader implements AvroReader { - private final DatumReader reader; - private final Schema schema; - private Decoder decoder; - - private AvroRecordReader(Schema schema, DatumReader reader) { - this.reader = reader; - this.schema = schema; - this.decoder = null; - } - - @Override - public T decode(byte[] record) throws IOException { - return decode(record, 0); - } - - @Override - public T decode(byte[] record, int offset) throws IOException { - if (binary) { - decoder = decoderFactory.binaryDecoder(record, offset, record.length - offset, - (BinaryDecoder) decoder); - } else { - decoder = decoderFactory.jsonDecoder(schema, - new ByteArrayInputStream(record, offset, record.length - offset)); - } - return reader.read(null, decoder); - } - } -} diff --git a/radar-commons/src/main/java/org/radarbase/data/AvroDatumDecoder.kt b/radar-commons/src/main/java/org/radarbase/data/AvroDatumDecoder.kt new file mode 100644 index 00000000..b6ffbbfa --- /dev/null +++ b/radar-commons/src/main/java/org/radarbase/data/AvroDatumDecoder.kt @@ -0,0 +1,77 @@ +/* + * Copyright 2017 The Hyve and King's College London + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.radarbase.data + +import org.apache.avro.Schema +import org.apache.avro.generic.GenericData +import org.apache.avro.io.BinaryDecoder +import org.apache.avro.io.DatumReader +import org.apache.avro.io.Decoder +import org.apache.avro.io.DecoderFactory +import org.radarbase.data.AvroDecoder.AvroReader +import java.io.ByteArrayInputStream +import java.io.IOException + +/** An AvroDecoder to decode known SpecificRecord classes. */ +/** + * Decoder for Avro data. + * @param genericData instance of GenericData or SpecificData that should implement + * [GenericData.createDatumReader]. + * @param binary true if the read data has Avro binary encoding, false if it has Avro JSON + * encoding. + */ +class AvroDatumDecoder( + private val genericData: GenericData, + private val binary: Boolean, +) : AvroDecoder { + private val decoderFactory: DecoderFactory = DecoderFactory.get() + + @Suppress("UNCHECKED_CAST") + override fun reader(schema: Schema, clazz: Class): AvroReader { + val reader = genericData.createDatumReader(schema) as DatumReader + return AvroRecordReader(schema, reader) + } + + private inner class AvroRecordReader( + private val schema: Schema, + private val reader: DatumReader, + ) : AvroReader { + private var decoder: Decoder? = null + + @Throws(IOException::class) + override fun decode(`object`: ByteArray): T { + return decode(`object`, 0) + } + + @Throws(IOException::class) + override fun decode(`object`: ByteArray, offset: Int): T { + decoder = if (binary) { + decoderFactory.binaryDecoder( + `object`, + offset, + `object`.size - offset, + decoder as? BinaryDecoder, + ) + } else { + decoderFactory.jsonDecoder( + schema, + ByteArrayInputStream(`object`, offset, `object`.size - offset), + ) + } + return reader.read(null, decoder) + } + } +} diff --git a/radar-commons/src/main/java/org/radarbase/data/AvroDatumEncoder.java b/radar-commons/src/main/java/org/radarbase/data/AvroDatumEncoder.java deleted file mode 100644 index fc364a05..00000000 --- a/radar-commons/src/main/java/org/radarbase/data/AvroDatumEncoder.java +++ /dev/null @@ -1,47 +0,0 @@ -/* - * Copyright 2017 The Hyve and King's College London - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.radarbase.data; - -import java.io.IOException; -import org.apache.avro.Schema; -import org.apache.avro.generic.GenericData; -import org.apache.avro.io.DatumWriter; -import org.apache.avro.io.EncoderFactory; - -/** An AvroEncoder to encode known SpecificRecord classes. */ -public class AvroDatumEncoder implements AvroEncoder { - private final EncoderFactory encoderFactory; - private final boolean binary; - private final GenericData genericData; - - /** - * Create a SpecificRecordEncoder. - * @param binary whether to use binary encoding or JSON. - */ - public AvroDatumEncoder(GenericData genericData, boolean binary) { - this.genericData = genericData; - this.encoderFactory = EncoderFactory.get(); - this.binary = binary; - } - - @Override - public AvroWriter writer(Schema schema, Class clazz) throws IOException { - @SuppressWarnings("unchecked") - DatumWriter writer = (DatumWriter)genericData.createDatumWriter(schema); - return new AvroRecordWriter<>(encoderFactory, schema, writer, binary); - } -} diff --git a/radar-commons/src/main/java/org/radarbase/data/AvroDatumEncoder.kt b/radar-commons/src/main/java/org/radarbase/data/AvroDatumEncoder.kt new file mode 100644 index 00000000..23d5704e --- /dev/null +++ b/radar-commons/src/main/java/org/radarbase/data/AvroDatumEncoder.kt @@ -0,0 +1,41 @@ +/* + * Copyright 2017 The Hyve and King's College London + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.radarbase.data + +import org.apache.avro.Schema +import org.apache.avro.generic.GenericData +import org.apache.avro.io.DatumWriter +import org.apache.avro.io.EncoderFactory +import org.radarbase.data.AvroEncoder.AvroWriter +import java.io.IOException + +/** + * An AvroEncoder to encode known SpecificRecord classes. + * @param binary whether to use binary encoding or JSON. + */ +class AvroDatumEncoder( + private val genericData: GenericData, + private val binary: Boolean, +) : AvroEncoder { + private val encoderFactory: EncoderFactory = EncoderFactory.get() + + @Suppress("UNCHECKED_CAST") + @Throws(IOException::class) + override fun writer(schema: Schema, clazz: Class, readerSchema: Schema): AvroWriter { + val writer = genericData.createDatumWriter(schema) as DatumWriter + return AvroRecordWriter(encoderFactory, schema, writer, binary) + } +} diff --git a/radar-commons/src/main/java/org/radarbase/data/AvroDecoder.java b/radar-commons/src/main/java/org/radarbase/data/AvroDecoder.kt similarity index 66% rename from radar-commons/src/main/java/org/radarbase/data/AvroDecoder.java rename to radar-commons/src/main/java/org/radarbase/data/AvroDecoder.kt index bf411098..0b48bc07 100644 --- a/radar-commons/src/main/java/org/radarbase/data/AvroDecoder.java +++ b/radar-commons/src/main/java/org/radarbase/data/AvroDecoder.kt @@ -13,29 +13,30 @@ * See the License for the specific language governing permissions and * limitations under the License. */ +package org.radarbase.data -package org.radarbase.data; - -import java.io.IOException; -import org.apache.avro.Schema; - -/** Decode Avro values with a given encoder. */ -public interface AvroDecoder { - /** Create a new reader. This method is thread-safe, but the class it returns is not. */ - AvroReader reader(Schema schema, Class clazz) throws IOException; +import org.apache.avro.Schema +import java.io.IOException +/** Decode Avro values with a given encoder. */ +interface AvroDecoder { + /** Create a new reader. This method is thread-safe, but the class it returns is not. */ + @Throws(IOException::class) + fun reader(schema: Schema, clazz: Class): AvroReader interface AvroReader { /** * Decode an object from bytes. This method is not thread-safe. Equivalent to calling * decode(object, 0). */ - T decode(byte[] object) throws IOException; + @Throws(IOException::class) + fun decode(`object`: ByteArray): T /** * Decode an object from bytes. This method is not thread-safe. * @param object bytes to decode from - * @param start start offset to decode from. + * @param offset start offset to decode from. */ - T decode(byte[] object, int start) throws IOException; + @Throws(IOException::class) + fun decode(`object`: ByteArray, offset: Int): T } } diff --git a/radar-commons/src/main/java/org/radarbase/data/AvroEncoder.java b/radar-commons/src/main/java/org/radarbase/data/AvroEncoder.java deleted file mode 100644 index fcfc9fab..00000000 --- a/radar-commons/src/main/java/org/radarbase/data/AvroEncoder.java +++ /dev/null @@ -1,53 +0,0 @@ -/* - * Copyright 2017 The Hyve and King's College London - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.radarbase.data; - -import java.io.IOException; -import org.apache.avro.Schema; -import org.apache.avro.SchemaValidationException; -import org.radarbase.producer.rest.ParsedSchemaMetadata; - -/** Encode Avro values with a given encoder. The encoder may take into account the schema - * that the schema registry has listed for a given topic. */ -public interface AvroEncoder { - /** Create a new writer. This method is thread-safe, but the class it returns is not. */ - AvroWriter writer(Schema schema, Class clazz) throws IOException; - - interface AvroWriter { - /** - * Encode an object. This method is not thread-safe. Call - * {@link #setReaderSchema(ParsedSchemaMetadata)} before calling encode. - * @param object object to encode - * @return byte array with serialized object. - */ - byte[] encode(T object) throws IOException; - - /** - * Update the schema that the server is lists for the current topic. - * @param readerSchema schema listed by the schema registry. - * @throws SchemaValidationException if the server schema is incompatible with the writer - * schema. - */ - void setReaderSchema(ParsedSchemaMetadata readerSchema) throws SchemaValidationException; - - /** - * Get the schema that the server lists. - * @return schema as set by setReaderSchema or null if not called yet. - */ - ParsedSchemaMetadata getReaderSchema(); - } -} diff --git a/radar-commons/src/main/java/org/radarbase/data/AvroEncoder.kt b/radar-commons/src/main/java/org/radarbase/data/AvroEncoder.kt new file mode 100644 index 00000000..09ad87c0 --- /dev/null +++ b/radar-commons/src/main/java/org/radarbase/data/AvroEncoder.kt @@ -0,0 +1,42 @@ +/* + * Copyright 2017 The Hyve and King's College London + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.radarbase.data + +import org.apache.avro.Schema +import java.io.IOException + +/** Encode Avro values with a given encoder. The encoder may take into account the schema + * that the schema registry has listed for a given topic. */ +interface AvroEncoder { + /** Create a new writer. This method is thread-safe, but the class it returns is not. */ + @Throws(IOException::class) + fun writer(schema: Schema, clazz: Class): AvroWriter = + writer(schema, clazz, schema) + + @Throws(IOException::class) + fun writer(schema: Schema, clazz: Class, readerSchema: Schema): AvroWriter + + interface AvroWriter { + /** + * Encode an object. This method is not thread-safe. Call + * [.setReaderSchema] before calling encode. + * @param object object to encode + * @return byte array with serialized object. + */ + @Throws(IOException::class) + fun encode(`object`: T): ByteArray + } +} diff --git a/radar-commons/src/main/java/org/radarbase/data/AvroRecordData.java b/radar-commons/src/main/java/org/radarbase/data/AvroRecordData.java deleted file mode 100644 index 9180e7a6..00000000 --- a/radar-commons/src/main/java/org/radarbase/data/AvroRecordData.java +++ /dev/null @@ -1,59 +0,0 @@ -package org.radarbase.data; - -import java.util.Iterator; -import java.util.List; -import java.util.Objects; -import org.radarbase.topic.AvroTopic; - -/** - * Avro record data belonging to a single key. - * @param key type - * @param value type - */ -public class AvroRecordData implements RecordData { - private final AvroTopic topic; - private final K key; - private final List records; - - /** - * Data from a topic. - * @param topic data topic - * @param key data key - * @param values non-empty data values. - * @throws IllegalArgumentException if the values are empty. - * @throws NullPointerException if any of the parameters are null. - */ - public AvroRecordData(AvroTopic topic, K key, List values) { - this.topic = Objects.requireNonNull(topic); - this.key = Objects.requireNonNull(key); - this.records = Objects.requireNonNull(values); - if (this.records.isEmpty()) { - throw new IllegalArgumentException("Records should not be empty."); - } - } - - @Override - public AvroTopic getTopic() { - return topic; - } - - @Override - public K getKey() { - return key; - } - - @Override - public Iterator iterator() { - return records.iterator(); - } - - @Override - public boolean isEmpty() { - return false; - } - - @Override - public int size() { - return records.size(); - } -} diff --git a/radar-commons/src/main/java/org/radarbase/data/AvroRecordData.kt b/radar-commons/src/main/java/org/radarbase/data/AvroRecordData.kt new file mode 100644 index 00000000..7289c988 --- /dev/null +++ b/radar-commons/src/main/java/org/radarbase/data/AvroRecordData.kt @@ -0,0 +1,39 @@ +package org.radarbase.data + +import org.apache.avro.generic.IndexedRecord +import org.radarbase.topic.AvroTopic + +/** + * Avro record data belonging to a single key. + * @param key type + * @param value type + */ +/** + * Data from a topic. + * @param topic data topic + * @param key data key + * @param records non-empty data values. + * @throws IllegalArgumentException if the values are empty. + * @throws NullPointerException if any of the parameters are null. + */ +class AvroRecordData( + override val topic: AvroTopic, + override val key: K, + private val records: List, +) : RecordData { + init { + require(records.isNotEmpty()) { "Records should not be empty." } + } + + override val sourceId: String? by lazy { + val sourceIdField = topic.keySchema.getField("sourceId") ?: return@lazy null + if (key !is IndexedRecord) return@lazy null + key.get(sourceIdField.pos()).toString() + } + + override fun iterator(): Iterator = records.iterator() + + override val isEmpty: Boolean = records.isEmpty() + + override fun size(): Int = records.size +} diff --git a/radar-commons/src/main/java/org/radarbase/data/AvroRecordWriter.java b/radar-commons/src/main/java/org/radarbase/data/AvroRecordWriter.java deleted file mode 100644 index bec2b3ba..00000000 --- a/radar-commons/src/main/java/org/radarbase/data/AvroRecordWriter.java +++ /dev/null @@ -1,77 +0,0 @@ -/* - * Copyright 2017 The Hyve and King's College London - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.radarbase.data; - -import java.io.ByteArrayOutputStream; -import java.io.IOException; -import org.apache.avro.Schema; -import org.apache.avro.io.DatumWriter; -import org.apache.avro.io.Encoder; -import org.apache.avro.io.EncoderFactory; -import org.radarbase.producer.rest.ParsedSchemaMetadata; -import org.radarbase.data.AvroEncoder.AvroWriter; - -/** - * Encodes Avro records to bytes. - */ -public class AvroRecordWriter implements AvroWriter { - private final Encoder encoder; - private final ByteArrayOutputStream out; - private final DatumWriter writer; - private ParsedSchemaMetadata serverSchema; - - /** - * Writer for a given encoder, schema and writer. - * @param encoderFactory encoder factory to use. - * @param schema schema to write records with. - * @param writer data writer - * @param binary true if the data should be serialized with binary Avro encoding, false if it - * should be with JSON encoding. - * @throws IOException if an encoder cannot be constructed. - */ - public AvroRecordWriter(EncoderFactory encoderFactory, Schema schema, DatumWriter writer, - boolean binary) throws IOException { - this.writer = writer; - out = new ByteArrayOutputStream(); - if (binary) { - encoder = encoderFactory.binaryEncoder(out, null); - } else { - encoder = encoderFactory.jsonEncoder(schema, out); - } - } - - @Override - public byte[] encode(T record) throws IOException { - try { - writer.write(record, encoder); - encoder.flush(); - return out.toByteArray(); - } finally { - out.reset(); - } - } - - @Override - public void setReaderSchema(ParsedSchemaMetadata readerSchema) { - this.serverSchema = readerSchema; - } - - @Override - public ParsedSchemaMetadata getReaderSchema() { - return serverSchema; - } -} diff --git a/radar-commons/src/main/java/org/radarbase/data/AvroRecordWriter.kt b/radar-commons/src/main/java/org/radarbase/data/AvroRecordWriter.kt new file mode 100644 index 00000000..868983c6 --- /dev/null +++ b/radar-commons/src/main/java/org/radarbase/data/AvroRecordWriter.kt @@ -0,0 +1,67 @@ +/* + * Copyright 2017 The Hyve and King's College London + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.radarbase.data + +import org.apache.avro.Schema +import org.apache.avro.io.DatumWriter +import org.apache.avro.io.Encoder +import org.apache.avro.io.EncoderFactory +import org.radarbase.data.AvroEncoder.AvroWriter +import java.io.ByteArrayOutputStream +import java.io.IOException + +/** + * Encodes Avro records to bytes. + * + * @param encoderFactory encoder factory to use. + * @param schema schema to write records with. + * @param writer data writer + * @param binary true if the data should be serialized with binary Avro encoding, false if it + * should be with JSON encoding. + * @throws IOException if an encoder cannot be constructed. + */ +class AvroRecordWriter( + encoderFactory: EncoderFactory, + schema: Schema, + private val writer: DatumWriter, + binary: Boolean, +) : AvroWriter { + private val out: ByteArrayOutputStream = ByteArrayOutputStream() + private var encoder: Encoder = if (binary) { + encoderFactory.binaryEncoder(out, null) + } else { + encoderFactory.jsonEncoder(schema, out) + } + + init { + encoder = if (binary) { + encoderFactory.binaryEncoder(out, null) + } else { + encoderFactory.jsonEncoder(schema, out) + } + } + + @Throws(IOException::class) + override fun encode(`object`: T): ByteArray { + return try { + writer.write(`object`, encoder) + encoder.flush() + out.toByteArray() + } finally { + out.reset() + } + } +} diff --git a/radar-commons/src/main/java/org/radarbase/data/Record.java b/radar-commons/src/main/java/org/radarbase/data/Record.kt similarity index 68% rename from radar-commons/src/main/java/org/radarbase/data/Record.java rename to radar-commons/src/main/java/org/radarbase/data/Record.kt index 91f526a5..4c3d4c8e 100644 --- a/radar-commons/src/main/java/org/radarbase/data/Record.java +++ b/radar-commons/src/main/java/org/radarbase/data/Record.kt @@ -13,26 +13,17 @@ * See the License for the specific language governing permissions and * limitations under the License. */ - -package org.radarbase.data; +package org.radarbase.data /** * A single data record. * * @param key type * @param value type - */ -public class Record { - public final K key; - public final V value; - - /** - * Single record, with current time as time added. - * @param key key - * @param value value - */ - public Record(K key, V value) { - this.key = key; - this.value = value; - } -} + */ +class Record +/** + * Single record, with current time as time added. + * @param key key + * @param value value + */(val key: K, val value: V) diff --git a/radar-commons/src/main/java/org/radarbase/data/RecordData.java b/radar-commons/src/main/java/org/radarbase/data/RecordData.kt similarity index 59% rename from radar-commons/src/main/java/org/radarbase/data/RecordData.java rename to radar-commons/src/main/java/org/radarbase/data/RecordData.kt index 54b0cb5c..8b1d513b 100644 --- a/radar-commons/src/main/java/org/radarbase/data/RecordData.java +++ b/radar-commons/src/main/java/org/radarbase/data/RecordData.kt @@ -1,34 +1,37 @@ -package org.radarbase.data; +package org.radarbase.data -import org.radarbase.topic.AvroTopic; +import org.radarbase.topic.AvroTopic /** * Record data belonging to a single key. * @param key type * @param value type - */ -public interface RecordData extends Iterable { + */ +interface RecordData : Iterable { /** * Topic that the data belongs to. * @return Avro topic. */ - AvroTopic getTopic(); + val topic: AvroTopic /** * Key of each of the entries in the data set. * @return key */ - K getKey(); + val key: K + + /** Source ID linked to record data, if any. */ + val sourceId: String? /** * Whether the list of values is empty. * @return true if empty, false otherwise. */ - boolean isEmpty(); + val isEmpty: Boolean /** * The size of the value list. * @return size. */ - int size(); + fun size(): Int } diff --git a/radar-commons/src/main/java/org/radarbase/data/RemoteSchemaEncoder.java b/radar-commons/src/main/java/org/radarbase/data/RemoteSchemaEncoder.java deleted file mode 100644 index bbd65b48..00000000 --- a/radar-commons/src/main/java/org/radarbase/data/RemoteSchemaEncoder.java +++ /dev/null @@ -1,88 +0,0 @@ -package org.radarbase.data; - -import java.io.IOException; -import org.apache.avro.Schema; -import org.apache.avro.SchemaValidationException; -import org.apache.avro.generic.GenericData; -import org.apache.avro.specific.SpecificData; -import org.apache.avro.specific.SpecificRecord; -import org.radarbase.producer.rest.AvroDataMapper; -import org.radarbase.producer.rest.AvroDataMapperFactory; -import org.radarbase.producer.rest.ParsedSchemaMetadata; - -/** - * Encodes data according to an Avro schema to the format and schema of the server. - */ -public class RemoteSchemaEncoder implements AvroEncoder { - private final boolean binary; - - /** - * Schema encoder. - * @param binary true if the server wants binary encoding, false if it wants JSON encoding. - */ - public RemoteSchemaEncoder(boolean binary) { - this.binary = binary; - } - - @Override - public AvroWriter writer(Schema schema, Class clazz) { - return new SchemaEncoderWriter<>(schema, clazz); - } - - private class SchemaEncoderWriter implements AvroWriter { - private final AvroEncoder recordEncoder; - private AvroEncoder.AvroWriter encoder; - private final boolean isGeneric; - private ParsedSchemaMetadata serverSchema; - private AvroDataMapper mapper; - private final Schema schema; - - SchemaEncoderWriter(Schema schema, Class clazz) { - this.schema = schema; - - GenericData genericData; - ClassLoader classLoader = Thread.currentThread().getContextClassLoader(); - if (SpecificRecord.class.isAssignableFrom(clazz)) { - genericData = new SpecificData(classLoader); - isGeneric = false; - } else { - genericData = new GenericData(classLoader); - isGeneric = true; - } - recordEncoder = new AvroDatumEncoder(genericData, binary); - } - - @Override - public byte[] encode(T object) throws IOException { - return encoder.encode(mapper.convertAvro(object)); - } - - @Override - public final void setReaderSchema(ParsedSchemaMetadata readerSchema) - throws SchemaValidationException { - if (this.serverSchema != null - && readerSchema.getSchema().equals(this.serverSchema.getSchema())) { - return; - } - try { - if (!isGeneric) { - this.mapper = AvroDataMapperFactory.IDENTITY_MAPPER; - encoder = recordEncoder.writer(schema, Object.class); - } else { - this.mapper = AvroDataMapperFactory.get() - .createMapper(schema, readerSchema.getSchema(), - null); - encoder = recordEncoder.writer(readerSchema.getSchema(), Object.class); - } - this.serverSchema = readerSchema; - } catch (IOException ex) { - throw new IllegalStateException("Cannot construct Avro writer", ex); - } - } - - @Override - public ParsedSchemaMetadata getReaderSchema() { - return serverSchema; - } - } -} diff --git a/radar-commons/src/main/java/org/radarbase/data/RemoteSchemaEncoder.kt b/radar-commons/src/main/java/org/radarbase/data/RemoteSchemaEncoder.kt new file mode 100644 index 00000000..c65793e0 --- /dev/null +++ b/radar-commons/src/main/java/org/radarbase/data/RemoteSchemaEncoder.kt @@ -0,0 +1,62 @@ +package org.radarbase.data + +import org.apache.avro.Schema +import org.apache.avro.generic.GenericData +import org.apache.avro.specific.SpecificData +import org.apache.avro.specific.SpecificRecord +import org.radarbase.data.AvroEncoder.AvroWriter +import org.radarbase.producer.avro.AvroDataMapper +import org.radarbase.producer.avro.AvroDataMapperFactory +import org.radarbase.producer.avro.AvroDataMapperFactory.validationException +import java.io.IOException + +/** + * Encodes data according to an Avro schema to the format and schema of the server. + * + * @param binary true if the server wants binary encoding, false if it wants JSON encoding. + */ +class RemoteSchemaEncoder( + private val binary: Boolean, +) : AvroEncoder { + override fun writer(schema: Schema, clazz: Class, readerSchema: Schema): AvroWriter { + return SchemaEncoderWriter(binary, schema, clazz, readerSchema) + } + + class SchemaEncoderWriter( + binary: Boolean, + schema: Schema, + clazz: Class, + readerSchema: Schema, + ) : AvroWriter { + private val recordEncoder: AvroEncoder + private val encoder: AvroWriter + private val isGeneric: Boolean + private val mapper: AvroDataMapper + + init { + if (schema.type !== Schema.Type.RECORD) throw validationException(schema, readerSchema, "Can only map records.") + val genericData: GenericData + val classLoader = Thread.currentThread().contextClassLoader + val useReaderSchema: Schema + if (SpecificRecord::class.java.isAssignableFrom(clazz)) { + genericData = SpecificData(classLoader) + useReaderSchema = schema + isGeneric = false + } else { + genericData = GenericData(classLoader) + useReaderSchema = readerSchema + isGeneric = true + } + recordEncoder = AvroDatumEncoder(genericData, binary) + mapper = AvroDataMapperFactory.createMapper(schema, useReaderSchema, null) + encoder = recordEncoder.writer(useReaderSchema, Any::class.java) + } + + @Throws(IOException::class) + override fun encode(`object`: T): ByteArray = encoder.encode( + requireNotNull(mapper.convertAvro(`object`)) { + "Cannot map $`object` to Avro" + }, + ) + } +} diff --git a/radar-commons/src/main/java/org/radarbase/data/StringEncoder.java b/radar-commons/src/main/java/org/radarbase/data/StringEncoder.java deleted file mode 100644 index 0169be8d..00000000 --- a/radar-commons/src/main/java/org/radarbase/data/StringEncoder.java +++ /dev/null @@ -1,65 +0,0 @@ -/* - * Copyright 2017 The Hyve and King's College London - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.radarbase.data; - -import com.fasterxml.jackson.databind.ObjectMapper; -import com.fasterxml.jackson.databind.ObjectWriter; -import java.io.IOException; -import org.apache.avro.Schema; -import org.apache.avro.Schema.Type; -import org.apache.avro.SchemaValidationException; -import org.radarbase.producer.rest.ParsedSchemaMetadata; - -/** Encodes a String as Avro. */ -public class StringEncoder implements AvroEncoder, AvroEncoder.AvroWriter { - private static final ObjectWriter JSON_ENCODER = new ObjectMapper().writer(); - private ParsedSchemaMetadata readerSchema; - - @SuppressWarnings("unchecked") - @Override - public AvroWriter writer(Schema schema, Class clazz) { - if (schema.getType() != Schema.Type.STRING || !clazz.equals(String.class)) { - throw new IllegalArgumentException( - "Cannot encode String with a different type than STRING."); - } - - return (AvroWriter) this; - } - - @Override - public byte[] encode(String object) throws IOException { - return JSON_ENCODER.writeValueAsBytes(object); - } - - @Override - public void setReaderSchema(ParsedSchemaMetadata readerSchema) - throws SchemaValidationException { - if (readerSchema.getSchema().getType() != Type.STRING) { - throw new SchemaValidationException( - Schema.create(Type.STRING), - readerSchema.getSchema(), - new IllegalArgumentException("Cannot convert type to STRING")); - } - this.readerSchema = readerSchema; - - } - - @Override - public ParsedSchemaMetadata getReaderSchema() { - return readerSchema; - } -} diff --git a/radar-commons/src/main/java/org/radarbase/data/TimedInt.java b/radar-commons/src/main/java/org/radarbase/data/TimedInt.java deleted file mode 100644 index 7c065bba..00000000 --- a/radar-commons/src/main/java/org/radarbase/data/TimedInt.java +++ /dev/null @@ -1,75 +0,0 @@ -/* - * Copyright 2017 The Hyve and King's College London - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.radarbase.data; - -import java.util.concurrent.atomic.AtomicInteger; -import java.util.concurrent.atomic.AtomicLong; - -/** - * A single int, with modification times timed with system milliseconds time. - * This class can be used from multiple threads. - */ -public class TimedInt { - private final AtomicInteger value = new AtomicInteger(); - private final AtomicLong time = new AtomicLong(-1L); - - /** - * Value of the int. - */ - public int getValue() { - return value.get(); - } - - /** - * Time that the int got modified. - */ - public long getTime() { - return time.get(); - } - - /** - * Add value to the int. This updates the time variable to now. - * @param delta value to add. - */ - public void add(int delta) { - value.addAndGet(delta); - time.set(System.currentTimeMillis()); - } - - /** - * Set value to the int. This updates the time variable to now. - * @param value new value - */ - public void set(int value) { - this.value.set(value); - time.set(System.currentTimeMillis()); - } - - @Override - public synchronized boolean equals(Object other) { - if (other == null || !getClass().equals(other.getClass())) { - return false; - } - TimedInt timedOther = (TimedInt)other; - return value.equals(timedOther.value) && time.equals(timedOther.time); - } - - @Override - public int hashCode() { - return 31 * value.hashCode() + time.hashCode(); - } -} diff --git a/radar-commons/src/main/java/org/radarbase/producer/AuthenticationException.java b/radar-commons/src/main/java/org/radarbase/producer/AuthenticationException.kt similarity index 56% rename from radar-commons/src/main/java/org/radarbase/producer/AuthenticationException.java rename to radar-commons/src/main/java/org/radarbase/producer/AuthenticationException.kt index dcc81fab..7899de5d 100644 --- a/radar-commons/src/main/java/org/radarbase/producer/AuthenticationException.java +++ b/radar-commons/src/main/java/org/radarbase/producer/AuthenticationException.kt @@ -13,30 +13,12 @@ * See the License for the specific language governing permissions and * limitations under the License. */ - -package org.radarbase.producer; - -import java.io.IOException; +package org.radarbase.producer /** * Failed to authenticate to server. */ -public class AuthenticationException extends IOException { - private static final long serialVersionUID = 1; - - public AuthenticationException() { - super(); - } - - public AuthenticationException(String message, Throwable cause) { - super(message, cause); - } - - public AuthenticationException(String message) { - super(message); - } - - public AuthenticationException(Throwable cause) { - super(cause); - } +class AuthenticationException : RuntimeException { + constructor(message: String?) : super(message) {} + constructor(message: String?, cause: Throwable?) : super(message, cause) {} } diff --git a/radar-commons/src/main/java/org/radarbase/producer/BatchedKafkaSender.java b/radar-commons/src/main/java/org/radarbase/producer/BatchedKafkaSender.java deleted file mode 100644 index 8bc7f22c..00000000 --- a/radar-commons/src/main/java/org/radarbase/producer/BatchedKafkaSender.java +++ /dev/null @@ -1,169 +0,0 @@ -/* - * Copyright 2017 The Hyve and King's College London - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.radarbase.producer; - -import java.io.IOException; -import java.util.ArrayList; -import java.util.List; -import java.util.Objects; -import java.util.concurrent.TimeUnit; -import org.apache.avro.SchemaValidationException; -import org.radarbase.data.AvroRecordData; -import org.radarbase.data.RecordData; -import org.radarbase.topic.AvroTopic; - -/** - * A Kafka REST Proxy sender that batches up records. It will send data once the batch size is - * exceeded, or when at a send call the first record in the batch is older than given age. If send, - * flush or close are not called within this given age, the data will also not be sent. Calling - * {@link #close()} will not flush or close the KafkaTopicSender that were created. That must be - * done separately. - */ -public class BatchedKafkaSender implements KafkaSender { - private final KafkaSender wrappedSender; - private final long ageNanos; - private final int maxBatchSize; - - /** - * Kafka sender that sends data along. - * @param sender kafka sender to send data with. - * @param ageMillis threshold time after which a record should be sent. - * @param maxBatchSize threshold batch size over which records should be sent. - */ - public BatchedKafkaSender(KafkaSender sender, int ageMillis, int maxBatchSize) { - this.wrappedSender = sender; - this.ageNanos = TimeUnit.MILLISECONDS.toNanos(ageMillis); - this.maxBatchSize = maxBatchSize; - } - - @Override - public KafkaTopicSender sender(final AvroTopic topic) - throws IOException, SchemaValidationException { - return new BatchedKafkaTopicSender<>(topic); - } - - @Override - public boolean isConnected() throws AuthenticationException { - return wrappedSender.isConnected(); - } - - @Override - public boolean resetConnection() throws AuthenticationException { - return wrappedSender.resetConnection(); - } - - @Override - public synchronized void close() throws IOException { - wrappedSender.close(); - } - - /** Batched kafka topic sender. This does the actual data batching. */ - private class BatchedKafkaTopicSender implements KafkaTopicSender { - private long nanoAdded; - private K cachedKey; - private final List cache; - private final KafkaTopicSender topicSender; - private final AvroTopic topic; - - private BatchedKafkaTopicSender(AvroTopic topic) - throws IOException, SchemaValidationException { - cache = new ArrayList<>(); - this.topic = topic; - topicSender = wrappedSender.sender(topic); - } - - @Override - public void send(K key, V value) throws IOException, SchemaValidationException { - if (!isConnected()) { - throw new IOException("Cannot send records to unconnected producer."); - } - trySend(key, value); - } - - @Override - public void send(RecordData records) throws IOException, SchemaValidationException { - if (records.isEmpty()) { - return; - } - K key = records.getKey(); - for (V value : records) { - trySend(key, value); - } - } - - private void trySend(K key, V record) throws IOException, SchemaValidationException { - boolean keysMatch; - - if (cache.isEmpty()) { - cachedKey = key; - nanoAdded = System.nanoTime(); - keysMatch = true; - } else { - keysMatch = Objects.equals(key, cachedKey); - } - - if (keysMatch) { - cache.add(record); - if (exceedsBuffer(cache)) { - doSend(); - } - } else { - doSend(); - trySend(key, record); - } - } - - private void doSend() throws IOException, SchemaValidationException { - topicSender.send(new AvroRecordData<>(topic, cachedKey, cache)); - cache.clear(); - cachedKey = null; - } - - @Override - public void clear() { - cache.clear(); - topicSender.clear(); - } - - @Override - public void flush() throws IOException { - if (!cache.isEmpty()) { - try { - doSend(); - } catch (SchemaValidationException ex) { - throw new IOException("Schemas do not match", ex); - } - } - topicSender.flush(); - } - - @Override - @SuppressWarnings("PMD.UseTryWithResources") - public void close() throws IOException { - try { - flush(); - } finally { - wrappedSender.close(); - } - } - - private boolean exceedsBuffer(List records) { - return records.size() >= maxBatchSize - || System.nanoTime() - nanoAdded >= ageNanos; - } - } -} diff --git a/radar-commons/src/main/java/org/radarbase/producer/KafkaSender.java b/radar-commons/src/main/java/org/radarbase/producer/KafkaSender.java deleted file mode 100644 index f5e3ac70..00000000 --- a/radar-commons/src/main/java/org/radarbase/producer/KafkaSender.java +++ /dev/null @@ -1,50 +0,0 @@ -/* - * Copyright 2017 The Hyve and King's College London - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.radarbase.producer; - -import java.io.Closeable; -import java.io.IOException; -import org.apache.avro.SchemaValidationException; -import org.radarbase.topic.AvroTopic; - -/** - * Thread-safe sender. Calling {@link #close()} must be done after all {@link KafkaTopicSender} - * senders created with {@link #sender(AvroTopic)} have been called. - */ -public interface KafkaSender extends Closeable { - /** Get a non thread-safe sender instance. */ - KafkaTopicSender sender(AvroTopic topic) - throws IOException, SchemaValidationException; - - /** - * If the sender is no longer connected, try to reconnect. - * @return whether the connection has been restored. - * @throws AuthenticationException if the headers caused an authentication error - * in the current request or in a previous one. - */ - boolean resetConnection() throws AuthenticationException; - - /** - * Get the current connection state to Kafka. If the connection state is unknown, this will - * trigger a connection check. - * @return true if connected, false if not connected. - * @throws AuthenticationException if the headers caused an authentication error - * in a previous request or during an additional connection - * check. - */ - boolean isConnected() throws AuthenticationException; -} diff --git a/radar-commons/src/main/java/org/radarbase/producer/KafkaSender.kt b/radar-commons/src/main/java/org/radarbase/producer/KafkaSender.kt new file mode 100644 index 00000000..8b816c11 --- /dev/null +++ b/radar-commons/src/main/java/org/radarbase/producer/KafkaSender.kt @@ -0,0 +1,47 @@ +/* + * Copyright 2017 The Hyve and King's College London + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.radarbase.producer + +import io.ktor.client.* +import io.ktor.client.engine.* +import io.ktor.client.engine.cio.* +import io.ktor.client.plugins.* +import kotlinx.coroutines.flow.Flow +import org.apache.avro.SchemaValidationException +import org.radarbase.producer.rest.ConnectionState +import org.radarbase.topic.AvroTopic +import java.io.IOException + +/** + * Thread-safe sender. Calling [.close] must be done after all [KafkaTopicSender] + * senders created with [.sender] have been called. + */ +interface KafkaSender { + /** Get a non thread-safe sender instance. */ + @Throws(IOException::class, SchemaValidationException::class) + fun sender(topic: AvroTopic): KafkaTopicSender + + /** + * If the sender is no longer connected, try to reconnect. + * @return whether the connection has been restored. + * @throws AuthenticationException if the headers caused an authentication error + * in the current request or in a previous one. + */ + @Throws(AuthenticationException::class) + suspend fun resetConnection(): Boolean + + val connectionState: Flow +} diff --git a/radar-commons/src/main/java/org/radarbase/producer/KafkaTopicSender.java b/radar-commons/src/main/java/org/radarbase/producer/KafkaTopicSender.java deleted file mode 100644 index 21e26a32..00000000 --- a/radar-commons/src/main/java/org/radarbase/producer/KafkaTopicSender.java +++ /dev/null @@ -1,44 +0,0 @@ -package org.radarbase.producer; - -import java.io.Closeable; -import java.io.IOException; -import org.apache.avro.SchemaValidationException; -import org.radarbase.data.RecordData; - -/** - * Sender for a single topic. Should be created through a {@link KafkaSender}. - */ -public interface KafkaTopicSender extends Closeable { - /** - * Send a message to Kafka eventually. - * - * @param key key of a kafka record to send - * @param value value of a kafka record to send - * @throws AuthenticationException if the client failed to authenticate itself - * @throws IOException if the client could not send a message - */ - void send(K key, V value) throws IOException, SchemaValidationException; - - /** - * Send a message to Kafka eventually. Contained offsets must be strictly monotonically - * increasing for subsequent calls. - * - * @param records records to send. - * @throws AuthenticationException if the client failed to authenticate itself - * @throws IOException if the client could not send a message - */ - void send(RecordData records) throws IOException, SchemaValidationException; - - /** - * Clears any messages still in cache. - */ - void clear(); - - /** - * Flush all remaining messages. - * - * @throws AuthenticationException if the client failed to authenticate itself - * @throws IOException if the client could not send a message - */ - void flush() throws IOException; -} diff --git a/radar-commons/src/main/java/org/radarbase/producer/KafkaTopicSender.kt b/radar-commons/src/main/java/org/radarbase/producer/KafkaTopicSender.kt new file mode 100644 index 00000000..e9e73c8d --- /dev/null +++ b/radar-commons/src/main/java/org/radarbase/producer/KafkaTopicSender.kt @@ -0,0 +1,47 @@ +package org.radarbase.producer + +import org.apache.avro.SchemaValidationException +import org.radarbase.data.AvroRecordData +import org.radarbase.data.RecordData +import org.radarbase.topic.AvroTopic +import java.io.IOException + +/** + * Sender for a single topic. Should be created through a [KafkaSender]. + */ +interface KafkaTopicSender { + val topic: AvroTopic + + /** + * Send a message to Kafka eventually. + * + * @param key key of a kafka record to send + * @param value value of a kafka record to send + * @throws AuthenticationException if the client failed to authenticate itself + * @throws IOException if the client could not send a message + */ + @Throws(IOException::class, SchemaValidationException::class) + suspend fun send(key: K, value: V) = send(key, listOf(value)) + + /** + * Send a message to Kafka eventually. + * + * @param key key of a kafka record to send + * @param values values for kafka records to send + * @throws AuthenticationException if the client failed to authenticate itself + * @throws IOException if the client could not send a message + */ + @Throws(IOException::class, SchemaValidationException::class) + suspend fun send(key: K, values: List) = send(AvroRecordData(topic, key, values)) + + /** + * Send a message to Kafka eventually. Contained offsets must be strictly monotonically + * increasing for subsequent calls. + * + * @param records records to send. + * @throws AuthenticationException if the client failed to authenticate itself + * @throws IOException if the client could not send a message + */ + @Throws(IOException::class, SchemaValidationException::class) + suspend fun send(records: RecordData) +} diff --git a/radar-commons/src/main/java/org/radarbase/producer/rest/AvroDataMapper.java b/radar-commons/src/main/java/org/radarbase/producer/avro/AvroDataMapper.kt similarity index 52% rename from radar-commons/src/main/java/org/radarbase/producer/rest/AvroDataMapper.java rename to radar-commons/src/main/java/org/radarbase/producer/avro/AvroDataMapper.kt index f2027223..e502eec7 100644 --- a/radar-commons/src/main/java/org/radarbase/producer/rest/AvroDataMapper.java +++ b/radar-commons/src/main/java/org/radarbase/producer/avro/AvroDataMapper.kt @@ -1,16 +1,14 @@ -package org.radarbase.producer.rest; - -import org.apache.avro.Schema; +package org.radarbase.producer.avro /** * Maps data from one avro record schema to another. Create it by calling - * {@link AvroDataMapperFactory#createMapper(Schema, Schema, Object)}. + * [AvroDataMapperFactory.createMapper]. */ -public interface AvroDataMapper { +fun interface AvroDataMapper { /** * Convert an Avro GenericData to another Avro GenericData representation. * @param object Avro object * @return Avro object */ - Object convertAvro(Object object); + fun convertAvro(`object`: Any?): Any? } diff --git a/radar-commons/src/main/java/org/radarbase/producer/avro/AvroDataMapperFactory.kt b/radar-commons/src/main/java/org/radarbase/producer/avro/AvroDataMapperFactory.kt new file mode 100644 index 00000000..3b839860 --- /dev/null +++ b/radar-commons/src/main/java/org/radarbase/producer/avro/AvroDataMapperFactory.kt @@ -0,0 +1,407 @@ +package org.radarbase.producer.avro + +import org.apache.avro.JsonProperties +import org.apache.avro.Schema +import org.apache.avro.SchemaValidationException +import org.apache.avro.generic.* +import org.radarbase.util.Base64Encoder +import org.slf4j.Logger +import org.slf4j.LoggerFactory +import java.nio.ByteBuffer +import java.util.* + +object AvroDataMapperFactory { + /** + * Create a mapper for data in one Avro schema to that in another Avro schema. + * @param from originating Avro schema + * @param to resulting Avro schema + * @param defaultVal default value as defined in an Avro record field, + * may be null if there is no default value. + * @return Avro data mapper + * @throws SchemaValidationException if the given schemas are incompatible. + */ + @Throws(SchemaValidationException::class) + fun createMapper(from: Schema, to: Schema, defaultVal: Any?): AvroDataMapper { + if (from == to) { + logger.debug("Using identity schema mapping from {} to {}", from, to) + return IDENTITY_MAPPER + } + logger.debug("Computing custom mapping from {} to {}", from, to) + return try { + if (to.type == Schema.Type.UNION || from.type == Schema.Type.UNION) { + return mapUnion(from, to, defaultVal) + } + if (to.type == Schema.Type.ENUM || from.type == Schema.Type.ENUM) { + return mapEnum(from, to, defaultVal) + } + when (to.type) { + Schema.Type.INT, Schema.Type.LONG, Schema.Type.DOUBLE, Schema.Type.FLOAT -> + return mapNumber(from, to, defaultVal) + else -> {} + } + when (from.type) { + Schema.Type.RECORD -> mapRecord(from, to) + Schema.Type.ARRAY -> mapArray(from, to) + Schema.Type.MAP -> mapMap(from, to) + Schema.Type.FIXED, Schema.Type.BYTES -> mapBytes(from, to, defaultVal) + Schema.Type.INT, Schema.Type.LONG, Schema.Type.DOUBLE, Schema.Type.FLOAT -> + mapNumber(from, to, defaultVal) + to.type -> IDENTITY_MAPPER + else -> throw validationException(to, from, "Schema types of from and to don't match") + } + } catch (ex: SchemaValidationException) { + defaultVal ?: throw ex + if (defaultVal === JsonProperties.NULL_VALUE) { + AvroDataMapper { null } + } else { + AvroDataMapper { defaultVal } + } + } + } + + /** Map one union to another, or a union to non-union, or non-union to union. */ + @Throws(SchemaValidationException::class) + private fun mapUnion(from: Schema, to: Schema, defaultVal: Any?): AvroDataMapper { + // Do not create a custom mapper for trivial changes. + if ( + from.type == Schema.Type.UNION && + to.type == Schema.Type.UNION && + from.types.size == from.types.size + ) { + val matches = from.types.indices.all { i -> + val fromType = from.types[i].type + val toType = to.types[i].type + fromType == toType && fromType.isPrimitive() + } + if (matches) { + return IDENTITY_MAPPER + } + } + val resolvedFrom = if (from.type == Schema.Type.UNION) { + nonNullUnionSchema(from) + } else { + from + } + + return if (from.type == Schema.Type.UNION && to.type != Schema.Type.UNION) { + defaultVal ?: throw validationException(to, from, "Cannot map union to non-union without a default value") + val actualDefault = getDefaultValue(defaultVal, to) + val subMapper = createMapper(resolvedFrom, to, defaultVal) + AvroDataMapper { obj -> + if (obj == null) { + actualDefault + } else { + subMapper.convertAvro(obj) + } + } + } else { + val toNonNull = nonNullUnionSchema(to) + val unionMapper = createMapper(resolvedFrom, toNonNull, defaultVal) + AvroDataMapper { obj -> + obj ?: return@AvroDataMapper null + unionMapper.convertAvro(obj) + } + } + } + + /** Map an array to another. */ + @Throws(SchemaValidationException::class) + private fun mapArray(from: Schema, to: Schema): AvroDataMapper { + if (to.type != Schema.Type.ARRAY) { + throw validationException(to, from, "Cannot map array to non-array") + } + val subMapper = createMapper(from.elementType, to.elementType, null) + return AvroDataMapper { obj -> + obj.asAvroType>(from, to).map { subMapper.convertAvro(it) } + } + } + + /** Map a map to another. */ + @Throws(SchemaValidationException::class) + private fun mapMap(from: Schema, to: Schema): AvroDataMapper { + if (to.type != Schema.Type.MAP) { + throw validationException(to, from, "Cannot map map to non-map") + } + val subMapper = createMapper(from.valueType, to.valueType, null) + return AvroDataMapper { obj -> + buildMap { + obj.asAvroType>(from, to).forEach { (k, v) -> + put(k.toString(), subMapper.convertAvro(v)) + } + } + } + } + + @Throws(SchemaValidationException::class) + private fun mapBytes(from: Schema, to: Schema, defaultVal: Any?): AvroDataMapper { + return if (from.type == Schema.Type.BYTES && to.type == Schema.Type.BYTES) { + IDENTITY_MAPPER + } else if (from.type == Schema.Type.FIXED && to.type == Schema.Type.FIXED && + from.fixedSize == to.fixedSize + ) { + IDENTITY_MAPPER + } else if (from.type == Schema.Type.FIXED && to.type == Schema.Type.BYTES) { + AvroDataMapper { `object` -> + ByteBuffer.wrap(`object`.asAvroType(from, to).bytes()) + } + } else if (from.type == Schema.Type.BYTES && to.type == Schema.Type.FIXED) { + defaultVal ?: throw validationException(to, from, "Cannot map bytes to fixed without default value") + + AvroDataMapper { `object`: Any? -> + val bytes = `object`.asAvroType(from, to).array() + val value = if (bytes.size == to.fixedSize) { + bytes + } else { + defaultVal as? ByteArray + } + GenericData.get().createFixed(null, value, to) + } + } else if (to.type == Schema.Type.STRING) { + val encoder = Base64Encoder + if (from.type == Schema.Type.FIXED) { + AvroDataMapper { `object` -> + encoder.encode(`object`.asAvroType(from, to).bytes()) + } + } else { + AvroDataMapper { `object` -> + encoder.encode(`object`.asAvroType(from, to).array()) + } + } + } else { + throw validationException(to, from, "Fixed type must be mapped to comparable byte size") + } + } + + @Throws(SchemaValidationException::class) + private fun mapRecord(from: Schema, to: Schema): AvroDataMapper { + if (to.type != Schema.Type.RECORD) { + throw validationException(to, from, "From and to schemas must be records.") + } + val fromFields = from.fields + val toFields = arrayOfNulls( + fromFields.size, + ) + val fieldMappers = arrayOfNulls( + fromFields.size, + ) + val filledPositions = BooleanArray(to.fields.size) + for (i in fromFields.indices) { + val fromField = fromFields[i] + val toField = to.getField(fromField.name()) ?: continue + filledPositions[toField.pos()] = true + toFields[i] = toField + fieldMappers[i] = createMapper( + fromField.schema(), + toField.schema(), + toField.defaultVal(), + ) + } + filledPositions.forEachIndexed { i, isFilled -> + if (!isFilled && to.fields[i].defaultVal() == null) { + throw validationException( + to, + from, + "Cannot map to record without default value for new field ${to.fields[i].name()}", + ) + } + } + return RecordMapper(to, toFields, fieldMappers) + } + + /** Maps one record to another. */ + private class RecordMapper constructor( + private val toSchema: Schema, + private val toFields: Array, + private val fieldMappers: Array, + ) : AvroDataMapper { + override fun convertAvro(`object`: Any?): GenericRecord { + val builder = GenericRecordBuilder(toSchema) + val record = `object`.asAvroType(toSchema, toSchema) + for (i in toFields.indices) { + val field = toFields[i] ?: continue + val mapper = fieldMappers[i] ?: continue + builder[field] = mapper.convertAvro(record[i]) + } + return builder.build() + } + + override fun toString(): String { + return ( + "RecordMapper{" + + "fieldMappers=" + fieldMappers.contentToString() + + ", toFields=" + toFields.contentToString() + '}' + ) + } + } + + private class StringToNumberMapper( + private val defaultVal: Any?, + private val mapping: (String) -> Number, + ) : + AvroDataMapper { + override fun convertAvro(`object`: Any?): Any? { + `object` ?: return defaultVal + return try { + mapping(`object`.toString()) + } catch (ex: NumberFormatException) { + defaultVal + } + } + } + + private val logger: Logger = LoggerFactory.getLogger(AvroDataMapperFactory::class.java) + val IDENTITY_MAPPER: AvroDataMapper = object : AvroDataMapper { + override fun convertAvro(`object`: Any?): Any? = `object` + + override fun toString(): String = "Identity" + } + + private inline fun Any?.asAvroType(from: Schema, to: Schema): T { + if (this !is T) { + throw validationException( + to, + from, + "${to.type} type cannot be mapped from ${this?.javaClass?.name} Java type.", + ) + } + return this + } + + private val PRIMITIVE_TYPES = EnumSet.of( + Schema.Type.INT, + Schema.Type.LONG, + Schema.Type.BYTES, + Schema.Type.FLOAT, + Schema.Type.DOUBLE, + Schema.Type.NULL, + Schema.Type.BOOLEAN, + Schema.Type.STRING, + ) + + /** Map one enum to another or to String. */ + @Throws(SchemaValidationException::class) + private fun mapEnum(from: Schema, to: Schema, defaultVal: Any?): AvroDataMapper { + return if (to.type == Schema.Type.ENUM) { + var containsAll = true + if (from.type == Schema.Type.ENUM) { + for (s in from.enumSymbols) { + if (!to.hasEnumSymbol(s)) { + containsAll = false + break + } + } + } else if (from.type == Schema.Type.STRING) { + containsAll = false + } else { + throw validationException(to, from, "Cannot map enum from non-string or enum type") + } + if (containsAll) { + AvroDataMapper { obj -> GenericData.EnumSymbol(to, obj.toString()) } + } else { + var defaultString = defaultVal as? String + if (defaultString == null) { + if (to.hasEnumSymbol("UNKNOWN")) { + defaultString = "UNKNOWN" + } else { + throw validationException( + to, + from, + "Cannot map enum symbols without default value", + ) + } + } + val symbol: GenericEnumSymbol<*> = GenericData.EnumSymbol(to, defaultString) + AvroDataMapper { obj: Any? -> + val value = obj.toString() + if (to.hasEnumSymbol(value)) { + GenericData.EnumSymbol(to, value) + } else { + symbol + } + } + } + } else if (from.type == Schema.Type.ENUM && to.type == Schema.Type.STRING) { + AvroDataMapper { it.toString() } + } else { + throw validationException(to, from, "Cannot map unknown type with enum.") + } + } + + /** Get the default value as a Generic type. */ + private fun getDefaultValue(defaultVal: Any?, schema: Schema): Any? { + return if (defaultVal == null) { + null + } else if (schema.type == Schema.Type.ENUM) { + GenericData.EnumSymbol(schema, defaultVal) + } else { + defaultVal + } + } + + /** Maps one number type to another or parses/converts to a string. */ + @Throws(SchemaValidationException::class) + private fun mapNumber(from: Schema, to: Schema, defaultVal: Any?): AvroDataMapper { + if (from.type == to.type) { + return IDENTITY_MAPPER + } + return if (from.type == Schema.Type.STRING) { + defaultVal ?: throw validationException(to, from, "Cannot map string to number without default value.") + when (to.type) { + Schema.Type.INT -> StringToNumberMapper(defaultVal, Integer::valueOf) + Schema.Type.LONG -> StringToNumberMapper(defaultVal, String::toLong) + Schema.Type.DOUBLE -> StringToNumberMapper(defaultVal, String::toDouble) + Schema.Type.FLOAT -> StringToNumberMapper(defaultVal, String::toFloat) + else -> throw validationException( + to, + from, + "Cannot map numeric type with non-numeric type", + ) + } + } else { + when (to.type) { + Schema.Type.INT -> AvroDataMapper { it.asAvroType(from, to).toInt() } + Schema.Type.LONG -> AvroDataMapper { it.asAvroType(from, to).toLong() } + Schema.Type.DOUBLE -> AvroDataMapper { it.toString().toDouble() } + Schema.Type.FLOAT -> AvroDataMapper { it.asAvroType(from, to).toFloat() } + Schema.Type.STRING -> AvroDataMapper { it.toString() } + else -> throw validationException( + to, + from, + "Cannot map numeric type with non-numeric type", + ) + } + } + } + + /** Get the non-null union type of a nullable/optional union field. */ + @Throws(SchemaValidationException::class) + private fun nonNullUnionSchema(schema: Schema): Schema { + val types = checkNotNull(schema.types) { "Union does not have subtypes" } + if (types.size != 2) { + throw validationException(schema, schema, "Types must denote optionals.") + } + return if (types[0].type == Schema.Type.NULL) { + if (types[1].type != Schema.Type.NULL) { + types[1] + } else { + throw validationException(schema, schema, "Types must denote optionals.") + } + } else if (types[1].type == Schema.Type.NULL) { + types[0] + } else { + throw validationException(schema, schema, "Types must denote optionals.") + } + } + + private fun Schema.Type.isPrimitive(): Boolean = this in PRIMITIVE_TYPES + + internal fun validationException( + from: Schema, + to: Schema, + message: String, + ): SchemaValidationException = SchemaValidationException( + to, + from, + IllegalArgumentException(message), + ) +} diff --git a/radar-commons/src/main/java/org/radarbase/producer/io/BinaryEncoder.kt b/radar-commons/src/main/java/org/radarbase/producer/io/BinaryEncoder.kt new file mode 100644 index 00000000..86502180 --- /dev/null +++ b/radar-commons/src/main/java/org/radarbase/producer/io/BinaryEncoder.kt @@ -0,0 +1,136 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.radarbase.producer.io + +import org.apache.avro.util.Utf8 +import java.io.IOException +import java.nio.ByteBuffer +import java.nio.charset.StandardCharsets + +/** + * An abstract [Encoder] for Avro's binary encoding. + * + * + * To construct and configure instances, use [EncoderFactory] + * + * @see EncoderFactory + * + * @see BufferedBinaryEncoder + * + * @see DirectBinaryEncoder + * + * @see BlockingBinaryEncoder + * + * @see Encoder + * + * @see Decoder + */ +abstract class BinaryEncoder : Encoder { + @Throws(IOException::class) + override suspend fun writeNull() { + } + + @Throws(IOException::class) + override suspend fun writeString(utf8: Utf8) { + this.writeBytes(utf8.bytes, 0, utf8.byteLength) + } + + @Throws(IOException::class) + override suspend fun writeString(str: String) { + if (str.isEmpty()) { + writeZero() + return + } + val bytes = str.toByteArray(StandardCharsets.UTF_8) + writeInt(bytes.size) + writeFixed(bytes, 0, bytes.size) + } + + @Throws(IOException::class) + override suspend fun writeBytes(bytes: ByteBuffer) { + val len = bytes.limit() - bytes.position() + if (0 == len) { + writeZero() + } else { + writeInt(len) + writeFixed(bytes) + } + } + + @Throws(IOException::class) + override suspend fun writeBytes(bytes: ByteArray, start: Int, len: Int) { + if (0 == len) { + writeZero() + return + } + writeInt(len) + this.writeFixed(bytes, start, len) + } + + @Throws(IOException::class) + override suspend fun writeEnum(e: Int) { + writeInt(e) + } + + @Throws(IOException::class) + override suspend fun writeArrayStart() { + } + + @Throws(IOException::class) + override suspend fun setItemCount(itemCount: Long) { + if (itemCount > 0) { + writeLong(itemCount) + } + } + + @Throws(IOException::class) + override suspend fun startItem() { + } + + @Throws(IOException::class) + override suspend fun writeArrayEnd() { + writeZero() + } + + @Throws(IOException::class) + override suspend fun writeMapStart() { + } + + @Throws(IOException::class) + override suspend fun writeMapEnd() { + writeZero() + } + + @Throws(IOException::class) + override suspend fun writeIndex(unionIndex: Int) { + writeInt(unionIndex) + } + + /** Write a zero byte to the underlying output. */ + @Throws(IOException::class) + protected abstract suspend fun writeZero() + + /** + * Returns the number of bytes currently buffered by this encoder. If this + * Encoder does not buffer, this will always return zero. + * + * + * Call [.flush] to empty the buffer to the underlying output. + */ + open val bytesBuffered: Int = 0 +} diff --git a/radar-commons/src/main/java/org/radarbase/producer/io/DirectBinaryEncoder.kt b/radar-commons/src/main/java/org/radarbase/producer/io/DirectBinaryEncoder.kt new file mode 100644 index 00000000..c82df2b4 --- /dev/null +++ b/radar-commons/src/main/java/org/radarbase/producer/io/DirectBinaryEncoder.kt @@ -0,0 +1,129 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.radarbase.producer.io + +import io.ktor.utils.io.* +import org.apache.avro.io.BinaryData +import java.io.IOException +import java.util.* + +/** + * An [Encoder] for Avro's binary encoding that does not buffer output. + * + * + * This encoder does not buffer writes, and as a result is slower than + * [BufferedBinaryEncoder]. However, it is lighter-weight and useful when + * the buffering in BufferedBinaryEncoder is not desired and/or the Encoder is + * very short lived. + * + * + * To construct, use + * [EncoderFactory.directBinaryEncoder] + * + * + * DirectBinaryEncoder is not thread-safe + * + * @see BinaryEncoder + * + * @see EncoderFactory + * + * @see Encoder + * + * @see Decoder + */ +class DirectBinaryEncoder( + var out: ByteWriteChannel, +) : BinaryEncoder() { + // the buffer is used for writing floats, doubles, and large longs. + private val buf = ByteArray(12) + + @Throws(IOException::class) + override suspend fun flush() { + out.flush() + } + + override fun close() { + out.close() + } + + @Throws(IOException::class) + override suspend fun writeBoolean(b: Boolean) { + out.writeByte(if (b) 1 else 0) + } + + /* + * buffering is slower for ints that encode to just 1 or two bytes, and and + * faster for large ones. (Sun JRE 1.6u22, x64 -server) + */ + @Throws(IOException::class) + override suspend fun writeInt(n: Int) { + val `val` = n shl 1 xor (n shr 31) + if (`val` and 0x7F.inv() == 0) { + out.writeByte(`val`) + return + } else if (`val` and 0x3FFF.inv() == 0) { + out.writeByte(0x80 or `val`) + out.writeByte(`val` ushr 7) + return + } + val len = BinaryData.encodeInt(n, buf, 0) + out.writeFully(buf, 0, len) + } + + /* + * buffering is slower for writeLong when the number is small enough to fit in + * an int. (Sun JRE 1.6u22, x64 -server) + */ + @Throws(IOException::class) + override suspend fun writeLong(n: Long) { + val `val` = n shl 1 xor (n shr 63) // move sign to low-order bit + if (`val` and 0x7FFFFFFFL.inv() == 0L) { + var i = `val`.toInt() + while (i and 0x7F.inv() != 0) { + out.writeByte((0x80 or i and 0xFF).toByte().toInt()) + i = i ushr 7 + } + out.writeByte(i.toByte().toInt()) + return + } + val len = BinaryData.encodeLong(n, buf, 0) + out.writeFully(buf, 0, len) + } + + @Throws(IOException::class) + override suspend fun writeFloat(f: Float) { + val len = BinaryData.encodeFloat(f, buf, 0) + out.writeFully(buf, 0, len) + } + + @Throws(IOException::class) + override suspend fun writeDouble(d: Double) { + val len = BinaryData.encodeDouble(d, buf, 0) + out.writeFully(buf, 0, len) + } + + @Throws(IOException::class) + override suspend fun writeFixed(bytes: ByteArray, start: Int, len: Int) { + out.writeFully(bytes, start, len) + } + + @Throws(IOException::class) + override suspend fun writeZero() { + out.writeByte(0) + } +} diff --git a/radar-commons/src/main/java/org/radarbase/producer/io/Encoder.kt b/radar-commons/src/main/java/org/radarbase/producer/io/Encoder.kt new file mode 100644 index 00000000..0411c5f1 --- /dev/null +++ b/radar-commons/src/main/java/org/radarbase/producer/io/Encoder.kt @@ -0,0 +1,342 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.radarbase.producer.io + +import org.apache.avro.util.Utf8 +import java.io.Closeable +import java.io.IOException +import java.nio.ByteBuffer + +/** + * Low-level support for serializing Avro values. + * + * + * This class has two types of methods. One type of methods support the writing + * of leaf values (for example, [.writeLong] and [.writeString]). + * These methods have analogs in [Decoder]. + * + * + * The other type of methods support the writing of maps and arrays. These + * methods are [.writeArrayStart], [.startItem], and + * [.writeArrayEnd] (and similar methods for maps). Some implementations + * of [Encoder] handle the buffering required to break large maps and + * arrays into blocks, which is necessary for applications that want to do + * streaming. (See [.writeArrayStart] for details on these methods.) + * + * + * [EncoderFactory] contains Encoder construction and configuration + * facilities. + * + * @see EncoderFactory + * + * @see Decoder + */ +interface Encoder : Closeable { + /** + * "Writes" a null value. (Doesn't actually write anything, but advances the + * state of the parser if this class is stateful.) + * + * @throws AvroTypeException If this is a stateful writer and a null is not + * expected + */ + @Throws(IOException::class) + suspend fun writeNull() + + /** + * Write a boolean value. + * + * @throws AvroTypeException If this is a stateful writer and a boolean is not + * expected + */ + @Throws(IOException::class) + suspend fun writeBoolean(b: Boolean) + + /** + * Writes a 32-bit integer. + * + * @throws AvroTypeException If this is a stateful writer and an integer is not + * expected + */ + @Throws(IOException::class) + suspend fun writeInt(n: Int) + + /** + * Write a 64-bit integer. + * + * @throws AvroTypeException If this is a stateful writer and a long is not + * expected + */ + @Throws(IOException::class) + suspend fun writeLong(n: Long) + + /** + * Write a float. + * + * @throws IOException + * @throws AvroTypeException If this is a stateful writer and a float is not + * expected + */ + @Throws(IOException::class) + suspend fun writeFloat(f: Float) + + /** + * Write a double. + * + * @throws AvroTypeException If this is a stateful writer and a double is not + * expected + */ + @Throws(IOException::class) + suspend fun writeDouble(d: Double) + + /** + * Write a Unicode character string. + * + * @throws AvroTypeException If this is a stateful writer and a char-string is + * not expected + */ + @Throws(IOException::class) + suspend fun writeString(utf8: Utf8) + + /** + * Write a Unicode character string. The default implementation converts the + * String to a [Utf8]. Some Encoder implementations + * may want to do something different as a performance optimization. + * + * @throws AvroTypeException If this is a stateful writer and a char-string is + * not expected + */ + @Throws(IOException::class) + suspend fun writeString(str: String) { + writeString(Utf8(str)) + } + + /** + * Write a Unicode character string. If the CharSequence is an + * [Utf8] it writes this directly, otherwise the + * CharSequence is converted to a String via toString() and written. + * + * @throws AvroTypeException If this is a stateful writer and a char-string is + * not expected + */ + @Throws(IOException::class) + suspend fun writeString(charSequence: CharSequence) { + if (charSequence is Utf8) writeString(charSequence) else writeString(charSequence.toString()) + } + + /** + * Write a byte string. + * + * @throws AvroTypeException If this is a stateful writer and a byte-string is + * not expected + */ + @Throws(IOException::class) + suspend fun writeBytes(bytes: ByteBuffer) + + /** + * Write a byte string. + * + * @throws AvroTypeException If this is a stateful writer and a byte-string is + * not expected + */ + @Throws(IOException::class) + suspend fun writeBytes(bytes: ByteArray, start: Int, len: Int) + + /** + * Writes a byte string. Equivalent to + * writeBytes(bytes, 0, bytes.length) + * + * @throws IOException + * @throws AvroTypeException If this is a stateful writer and a byte-string is + * not expected + */ + @Throws(IOException::class) + suspend fun writeBytes(bytes: ByteArray) { + writeBytes(bytes, 0, bytes.size) + } + + /** + * Writes a fixed size binary object. + * + * @param bytes The contents to write + * @param start The position within bytes where the contents start. + * @param len The number of bytes to write. + * @throws AvroTypeException If this is a stateful writer and a byte-string is + * not expected + * @throws IOException + */ + @Throws(IOException::class) + suspend fun writeFixed(bytes: ByteArray, start: Int, len: Int) + + /** + * A shorthand for writeFixed(bytes, 0, bytes.length) + * + * @param bytes + */ + @Throws(IOException::class) + suspend fun writeFixed(bytes: ByteArray) { + writeFixed(bytes, 0, bytes.size) + } + + /** Writes a fixed from a ByteBuffer. */ + @Throws(IOException::class) + suspend fun writeFixed(bytes: ByteBuffer) { + val pos = bytes.position() + val len = bytes.limit() - pos + if (bytes.hasArray()) { + writeFixed(bytes.array(), bytes.arrayOffset() + pos, len) + } else { + val b = ByteArray(len) + bytes.duplicate()[b, 0, len] + writeFixed(b, 0, len) + } + } + + /** + * Writes an enumeration. + * + * @param e + * @throws AvroTypeException If this is a stateful writer and an enumeration is + * not expected or the e is out of range. + * @throws IOException + */ + @Throws(IOException::class) + suspend fun writeEnum(e: Int) + + /** + * Call this method to start writing an array. + * + * When starting to serialize an array, call [.writeArrayStart]. Then, + * before writing any data for any item call [.setItemCount] followed by a + * sequence of [.startItem] and the item itself. The number of + * [.startItem] should match the number specified in + * [.setItemCount]. When actually writing the data of the item, you can + * call any [Encoder] method (e.g., [.writeLong]). When all items of + * the array have been written, call [.writeArrayEnd]. + * + * As an example, let's say you want to write an array of records, the record + * consisting of an Long field and a Boolean field. Your code would look + * something like this: + * + *
+     * out.writeArrayStart();
+     * out.setItemCount(list.size());
+     * for (Record r : list) {
+     * out.startItem();
+     * out.writeLong(r.longField);
+     * out.writeBoolean(r.boolField);
+     * }
+     * out.writeArrayEnd();
+     
* + * + * @throws AvroTypeException If this is a stateful writer and an array is not + * expected + */ + @Throws(IOException::class) + suspend fun writeArrayStart() + + /** + * Call this method before writing a batch of items in an array or a map. Then + * for each item, call [.startItem] followed by any of the other write + * methods of [Encoder]. The number of calls to [.startItem] must + * be equal to the count specified in [.setItemCount]. Once a batch is + * completed you can start another batch with [.setItemCount]. + * + * @param itemCount The number of [.startItem] calls to follow. + * @throws IOException + */ + @Throws(IOException::class) + suspend fun setItemCount(itemCount: Long) + + /** + * Start a new item of an array or map. See [.writeArrayStart] for usage + * information. + * + * @throws AvroTypeException If called outside of an array or map context + */ + @Throws(IOException::class) + suspend fun startItem() + + /** + * Call this method to finish writing an array. See [.writeArrayStart] for + * usage information. + * + * @throws AvroTypeException If items written does not match count provided to + * [.writeArrayStart] + * @throws AvroTypeException If not currently inside an array + */ + @Throws(IOException::class) + suspend fun writeArrayEnd() + + /** + * Call this to start a new map. See [.writeArrayStart] for details on + * usage. + * + * As an example of usage, let's say you want to write a map of records, the + * record consisting of an Long field and a Boolean field. Your code would look + * something like this: + * + *
+     * out.writeMapStart();
+     * out.setItemCount(list.size());
+     * for (Map.Entry, Record> entry : map.entrySet()) {
+     * out.startItem();
+     * out.writeString(entry.getKey());
+     * out.writeLong(entry.getValue().longField);
+     * out.writeBoolean(entry.getValue().boolField);
+     * }
+     * out.writeMapEnd();
+     
* + * + * @throws AvroTypeException If this is a stateful writer and a map is not + * expected + */ + @Throws(IOException::class) + suspend fun writeMapStart() + + /** + * Call this method to terminate the inner-most, currently-opened map. See + * [.writeArrayStart] for more details. + * + * @throws AvroTypeException If items written does not match count provided to + * [.writeMapStart] + * @throws AvroTypeException If not currently inside a map + */ + @Throws(IOException::class) + suspend fun writeMapEnd() + + /** + * Call this method to write the tag of a union. + * + * As an example of usage, let's say you want to write a union, whose second + * branch is a record consisting of an Long field and a Boolean field. Your code + * would look something like this: + * + *
+     * out.writeIndex(1);
+     * out.writeLong(record.longField);
+     * out.writeBoolean(record.boolField);
+     
* + * + * @throws AvroTypeException If this is a stateful writer and a map is not + * expected + */ + @Throws(IOException::class) + suspend fun writeIndex(unionIndex: Int) + + suspend fun flush() +} diff --git a/radar-commons/src/main/java/org/radarbase/producer/io/FunctionalWriteChannelContent.kt b/radar-commons/src/main/java/org/radarbase/producer/io/FunctionalWriteChannelContent.kt new file mode 100644 index 00000000..2bf60d01 --- /dev/null +++ b/radar-commons/src/main/java/org/radarbase/producer/io/FunctionalWriteChannelContent.kt @@ -0,0 +1,10 @@ +package org.radarbase.producer.io + +import io.ktor.http.content.* +import io.ktor.utils.io.* + +class FunctionalWriteChannelContent( + private val writeAction: suspend (ByteWriteChannel) -> Unit, +) : OutgoingContent.WriteChannelContent() { + override suspend fun writeTo(channel: ByteWriteChannel) = writeAction(channel) +} diff --git a/radar-commons/src/main/java/org/radarbase/producer/io/GzipContentEncoding.kt b/radar-commons/src/main/java/org/radarbase/producer/io/GzipContentEncoding.kt new file mode 100644 index 00000000..3df18ccb --- /dev/null +++ b/radar-commons/src/main/java/org/radarbase/producer/io/GzipContentEncoding.kt @@ -0,0 +1,93 @@ +package org.radarbase.producer.io + +import io.ktor.client.* +import io.ktor.client.plugins.* +import io.ktor.client.request.* +import io.ktor.http.* +import io.ktor.http.content.* +import io.ktor.util.* +import io.ktor.util.cio.* +import io.ktor.utils.io.* +import kotlinx.coroutines.coroutineScope + +/** + * A plugin that allows you to enable specified compression algorithms (such as `gzip` and `deflate`) and configure their settings. + * This plugin serves two primary purposes: + * - Sets the `Accept-Encoding` header with the specified quality value. + * - Decodes content received from a server to obtain the original payload. + * + * You can learn more from [Content encoding](https://ktor.io/docs/content-encoding.html). + */ +class GzipContentEncoding private constructor() { + private fun setRequestHeaders(headers: HeadersBuilder) { + if (headers.contains(HttpHeaders.ContentEncoding)) return + headers[HttpHeaders.ContentEncoding] = "gzip" + } + + private fun encode(headers: Headers, content: OutgoingContent): OutgoingContent { + val encodingHeader = (headers[HttpHeaders.ContentEncoding] ?: return content).split(",") + if (!encodingHeader.containsIgnoreCase("gzip")) return content + + return when (content) { + is OutgoingContent.ProtocolUpgrade, is OutgoingContent.NoContent -> content + is OutgoingContent.ReadChannelContent -> GzipReadChannel(content.readFrom()) + is OutgoingContent.ByteArrayContent -> GzipReadChannel(ByteReadChannel(content.bytes())) + is OutgoingContent.WriteChannelContent -> GzipWriteChannel(content) + } + } + + /** + * A configuration for the [GzipContentEncoding] plugin. + */ + @KtorDsl + class Config + + companion object : HttpClientPlugin { + override val key: AttributeKey = AttributeKey("GzipHttpEncoding") + + override fun prepare(block: Config.() -> Unit): GzipContentEncoding { + return GzipContentEncoding() + } + + override fun install(plugin: GzipContentEncoding, scope: HttpClient) { + scope.requestPipeline.intercept(HttpRequestPipeline.State) { + plugin.setRequestHeaders(context.headers) + } + + scope.requestPipeline.intercept(HttpRequestPipeline.Transform) { call -> + val method = this.context.method + val contentLength = context.contentLength() + + if (contentLength == 0L) return@intercept + if (contentLength == null && (method == HttpMethod.Head || method == HttpMethod.Options)) return@intercept + + if (call !is OutgoingContent) return@intercept + + proceedWith(plugin.encode(context.headers.build(), call)) + } + } + + private fun List.containsIgnoreCase(value: String): Boolean { + return any { el -> el.trim { it <= ' ' }.equals(value, ignoreCase = true) } + } + } + + private class GzipReadChannel( + private val original: ByteReadChannel, + ) : OutgoingContent.ReadChannelContent() { + override fun readFrom(): ByteReadChannel = + original.deflated(gzip = true) + } + + private class GzipWriteChannel( + private val content: WriteChannelContent, + ) : OutgoingContent.WriteChannelContent() { + override suspend fun writeTo(channel: ByteWriteChannel) { + coroutineScope { + channel.deflated(gzip = true, coroutineContext = coroutineContext).use { + content.writeTo(this) + } + } + } + } +} diff --git a/radar-commons/src/main/java/org/radarbase/producer/io/HttpClientExtensions.kt b/radar-commons/src/main/java/org/radarbase/producer/io/HttpClientExtensions.kt new file mode 100644 index 00000000..99e1ece4 --- /dev/null +++ b/radar-commons/src/main/java/org/radarbase/producer/io/HttpClientExtensions.kt @@ -0,0 +1,36 @@ +package org.radarbase.producer.io + +import io.ktor.client.* +import io.ktor.client.engine.cio.* +import io.ktor.client.plugins.* +import java.security.cert.X509Certificate +import javax.net.ssl.X509TrustManager +import kotlin.time.Duration + +fun HttpClientConfig<*>.timeout(duration: Duration) { + install(HttpTimeout) { + val millis = duration.inWholeMilliseconds + connectTimeoutMillis = millis + socketTimeoutMillis = millis + requestTimeoutMillis = millis + } +} + +fun HttpClientConfig<*>.unsafeSsl() { + engine { + if (this is CIOEngineConfig) { + https { + trustManager = UNSAFE_TRUST_MANAGER + } + } + } +} + +/** Unsafe trust manager that trusts all certificates. */ +private val UNSAFE_TRUST_MANAGER = object : X509TrustManager { + override fun checkClientTrusted(chain: Array, authType: String) = Unit + + override fun checkServerTrusted(chain: Array, authType: String) = Unit + + override fun getAcceptedIssuers(): Array = arrayOf() +} diff --git a/radar-commons/src/main/java/org/radarbase/producer/io/UnsupportedMediaTypeException.kt b/radar-commons/src/main/java/org/radarbase/producer/io/UnsupportedMediaTypeException.kt new file mode 100644 index 00000000..34de28a1 --- /dev/null +++ b/radar-commons/src/main/java/org/radarbase/producer/io/UnsupportedMediaTypeException.kt @@ -0,0 +1,11 @@ +package org.radarbase.producer.io + +import io.ktor.http.* +import java.io.IOException + +class UnsupportedMediaTypeException( + contentType: ContentType?, + contentEncoding: String?, +) : IOException( + "Unsupported media type ${contentType ?: "unknown"} with ${contentEncoding ?: "no"} encoding", +) diff --git a/radar-commons/src/main/java/org/radarbase/producer/rest/AvroContentConverter.kt b/radar-commons/src/main/java/org/radarbase/producer/rest/AvroContentConverter.kt new file mode 100644 index 00000000..b8634e33 --- /dev/null +++ b/radar-commons/src/main/java/org/radarbase/producer/rest/AvroContentConverter.kt @@ -0,0 +1,63 @@ +package org.radarbase.producer.rest + +import io.ktor.http.* +import io.ktor.http.content.* +import io.ktor.serialization.* +import io.ktor.util.reflect.* +import io.ktor.utils.io.* +import io.ktor.utils.io.charsets.* +import kotlinx.coroutines.async +import kotlinx.coroutines.coroutineScope +import org.radarbase.data.RecordData +import org.radarbase.producer.schema.SchemaRetriever + +class AvroContentConverter( + private val schemaRetriever: SchemaRetriever, + private val binary: Boolean, +) : ContentConverter { + override suspend fun serializeNullable( + contentType: ContentType, + charset: Charset, + typeInfo: TypeInfo, + value: Any?, + ): OutgoingContent? { + if (value !is RecordData<*, *>) return null + + return coroutineScope { + val keySchema = async { + schemaRetriever.metadata( + topic = value.topic.name, + ofValue = false, + schema = value.topic.keySchema, + ) + } + val valueSchema = async { + schemaRetriever.metadata( + topic = value.topic.name, + ofValue = true, + schema = value.topic.valueSchema, + ) + } + val maker = if (binary) { + BinaryRecordContent( + records = value, + keySchemaMetadata = keySchema.await(), + valueSchemaMetadata = valueSchema.await(), + ) + } else { + JsonRecordContent( + records = value, + keySchemaMetadata = keySchema.await(), + valueSchemaMetadata = valueSchema.await(), + ) + } + maker.createContent() + } + } + + override suspend fun deserialize( + charset: Charset, + typeInfo: TypeInfo, + content: ByteReadChannel, + ): Any? = null +} diff --git a/radar-commons/src/main/java/org/radarbase/producer/rest/AvroDataMapperFactory.java b/radar-commons/src/main/java/org/radarbase/producer/rest/AvroDataMapperFactory.java deleted file mode 100644 index 6ff189be..00000000 --- a/radar-commons/src/main/java/org/radarbase/producer/rest/AvroDataMapperFactory.java +++ /dev/null @@ -1,497 +0,0 @@ -package org.radarbase.producer.rest; - -import static org.apache.avro.JsonProperties.NULL_VALUE; - -import java.nio.ByteBuffer; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import org.apache.avro.Schema; -import org.apache.avro.Schema.Type; -import org.apache.avro.SchemaValidationException; -import org.apache.avro.generic.GenericData; -import org.apache.avro.generic.GenericData.Fixed; -import org.apache.avro.generic.GenericEnumSymbol; -import org.apache.avro.generic.GenericRecord; -import org.apache.avro.generic.GenericRecordBuilder; -import org.apache.avro.generic.IndexedRecord; -import org.radarbase.util.Base64; -import org.radarbase.util.Base64.Encoder; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -@SuppressWarnings({"PMD"}) -public final class AvroDataMapperFactory { - private static final Logger logger = LoggerFactory.getLogger(AvroDataMapperFactory.class); - - public static final AvroDataMapper IDENTITY_MAPPER = new AvroDataMapper() { - @Override - public Object convertAvro(Object obj) { - return obj; - } - - @Override - public String toString() { - return "Identity"; - } - }; - private static final AvroDataMapperFactory INSTANCE = new AvroDataMapperFactory(); - - public static AvroDataMapperFactory get() { - return INSTANCE; - } - - /** - * Create a mapper for data in one Avro schema to that in another Avro schema. - * @param from originating Avro schema - * @param to resulting Avro schema - * @param defaultVal default value as defined in an Avro record field, - * may be null if there is no default value. - * @return Avro data mapper - * @throws SchemaValidationException if the given schemas are incompatible. - */ - public AvroDataMapper createMapper(Schema from, Schema to, final Object defaultVal) - throws SchemaValidationException { - if (from.equals(to)) { - logger.debug("Using identity schema mapping from {} to {}", from, to); - return IDENTITY_MAPPER; - } - - logger.debug("Computing custom mapping from {} to {}", from, to); - try { - if (to.getType() == Schema.Type.UNION || from.getType() == Schema.Type.UNION) { - return mapUnion(from, to, defaultVal); - } - if (to.getType() == Schema.Type.ENUM || to.getType() == Schema.Type.ENUM) { - return mapEnum(from, to, defaultVal); - } - - switch (to.getType()) { - case INT: - case LONG: - case DOUBLE: - case FLOAT: - return mapNumber(from, to, defaultVal); - default: - break; - } - switch (from.getType()) { - case RECORD: - return mapRecord(from, to); - case ARRAY: - return mapArray(from, to); - case MAP: - return mapMap(from, to); - case FIXED: - case BYTES: - return mapBytes(from, to, defaultVal); - case INT: - case LONG: - case DOUBLE: - case FLOAT: - return mapNumber(from, to, defaultVal); - default: - if (from.getType() != to.getType()) { - throw new SchemaValidationException(to, from, new IllegalArgumentException( - "Schema types of from and to don't match")); - } - return IDENTITY_MAPPER; - } - } catch (SchemaValidationException ex) { - if (defaultVal != null) { - if (defaultVal == NULL_VALUE) { - return obj -> null; - } else { - return obj -> defaultVal; - } - } else { - throw ex; - } - } - } - - /** Map one enum to another or to String. */ - private static AvroDataMapper mapEnum(Schema from, final Schema to, Object defaultVal) - throws SchemaValidationException { - if (to.getType() == Schema.Type.ENUM) { - boolean containsAll = true; - if (from.getType() == Schema.Type.ENUM) { - for (String s : from.getEnumSymbols()) { - if (!to.hasEnumSymbol(s)) { - containsAll = false; - break; - } - } - } else if (from.getType() == Schema.Type.STRING) { - containsAll = false; - } else { - throw new SchemaValidationException(to, from, new IllegalArgumentException( - "Cannot map enum from non-string or enum type")); - } - if (containsAll) { - return obj -> new GenericData.EnumSymbol(to, obj.toString()); - } else { - String defaultString = (String) defaultVal; - if (defaultString == null && to.hasEnumSymbol("UNKNOWN")) { - defaultString = "UNKNOWN"; - } - if (defaultString == null) { - throw new SchemaValidationException(to, from, new IllegalArgumentException( - "Cannot map enum symbols without default value")); - } else { - GenericEnumSymbol symbol = new GenericData.EnumSymbol(to, defaultString); - return obj -> { - String value = obj.toString(); - if (to.hasEnumSymbol(value)) { - return new GenericData.EnumSymbol(to, value); - } else { - return symbol; - } - }; - } - } - } else if (from.getType() == Schema.Type.ENUM && to.getType() == Schema.Type.STRING) { - return Object::toString; - } else { - throw new SchemaValidationException(to, from, new IllegalArgumentException( - "Cannot map unknown type with enum.")); - } - } - - /** Get the default value as a Generic type. */ - private static Object getDefaultValue(Object defaultVal, Schema schema) { - if (defaultVal == null) { - return null; - } else if (schema.getType() == Schema.Type.ENUM) { - return new GenericData.EnumSymbol(schema, defaultVal); - } else { - return defaultVal; - } - } - - /** Maps one number type to another or parses/converts to a string. */ - private static AvroDataMapper mapNumber(Schema from, Schema to, final Object defaultVal) - throws SchemaValidationException { - if (from.getType() == to.getType()) { - return IDENTITY_MAPPER; - } - - if (from.getType() == Schema.Type.STRING) { - if (defaultVal == null) { - throw new SchemaValidationException(to, from, new IllegalArgumentException( - "Cannot map string to number without default value.")); - } else { - switch (to.getType()) { - case INT: - return new StringToNumberMapper(defaultVal) { - @Override - public Number stringToNumber(String obj) { - return Integer.valueOf(obj); - } - }; - case LONG: - return new StringToNumberMapper(defaultVal) { - @Override - public Number stringToNumber(String obj) { - return Long.valueOf(obj); - } - }; - case DOUBLE: - return new StringToNumberMapper(defaultVal) { - @Override - public Number stringToNumber(String obj) { - return Double.valueOf(obj); - } - }; - case FLOAT: - return new StringToNumberMapper(defaultVal) { - @Override - public Number stringToNumber(String obj) { - return Float.valueOf(obj); - } - }; - default: - throw new SchemaValidationException(to, from, new IllegalArgumentException( - "Cannot map numeric type with non-numeric type")); - } - } - } else { - switch (to.getType()) { - case INT: - return obj -> ((Number) obj).intValue(); - case LONG: - return obj -> ((Number) obj).longValue(); - case DOUBLE: - return obj -> Double.valueOf(obj.toString()); - case FLOAT: - return obj -> ((Number) obj).floatValue(); - case STRING: - return Object::toString; - default: - throw new SchemaValidationException(to, from, new IllegalArgumentException( - "Cannot map numeric type with non-numeric type")); - } - } - } - - /** Get the non-null union type of a nullable/optional union field. */ - private static Schema nonNullUnionSchema(Schema schema) throws SchemaValidationException { - List types = schema.getTypes(); - - if (types.size() != 2) { - throw new SchemaValidationException(schema, schema, - new IllegalArgumentException("Types must denote optionals")); - } - - if (types.get(0).getType() == Schema.Type.NULL) { - if (types.get(1).getType() != Schema.Type.NULL) { - return types.get(1); - } else { - throw new SchemaValidationException(schema, schema, - new IllegalArgumentException("Types must denote optionals")); - } - } else if (types.get(1).getType() == Schema.Type.NULL) { - return types.get(0); - } else { - throw new SchemaValidationException(schema, schema, - new IllegalArgumentException("Types must denote optionals.")); - } - } - - /** Map one union to another, or a union to non-union, or non-union to union. */ - private AvroDataMapper mapUnion(Schema from, Schema to, Object defaultVal) - throws SchemaValidationException { - - // Do not create a custom mapper for trivial changes. - if (from.getType() == Schema.Type.UNION && to.getType() == Schema.Type.UNION - && from.getTypes().size() == from.getTypes().size()) { - boolean matches = true; - for (int i = 0; i < from.getTypes().size(); i++) { - Schema.Type fromType = from.getTypes().get(i).getType(); - Schema.Type toType = to.getTypes().get(i).getType(); - - if (fromType != toType || !isPrimitive(fromType)) { - matches = false; - break; - } - } - if (matches) { - return IDENTITY_MAPPER; - } - } - - Schema resolvedFrom = from.getType() == Schema.Type.UNION ? nonNullUnionSchema(from) : from; - - if (from.getType() == Schema.Type.UNION && to.getType() != Schema.Type.UNION) { - if (defaultVal != null) { - final Object actualDefault = getDefaultValue(defaultVal, to); - final AvroDataMapper subMapper = createMapper(resolvedFrom, to, defaultVal); - return obj -> { - if (obj == null) { - return actualDefault; - } else { - return subMapper.convertAvro(obj); - } - }; - } else { - throw new SchemaValidationException(to, from, new IllegalArgumentException( - "Cannot map union to non-union without a default value")); - } - } else { - Schema toNonNull = nonNullUnionSchema(to); - final AvroDataMapper unionMapper = createMapper(resolvedFrom, toNonNull, defaultVal); - return obj -> { - if (obj == null) { - return null; - } else { - return unionMapper.convertAvro(obj); - } - }; - } - } - - /** Map an array to another. */ - private AvroDataMapper mapArray(Schema from, Schema to) - throws SchemaValidationException { - if (to.getType() != Schema.Type.ARRAY) { - throw new SchemaValidationException(to, from, - new IllegalArgumentException("Cannot map array to non-array")); - } - final AvroDataMapper subMapper = createMapper(from.getElementType(), to.getElementType(), - null); - return obj -> { - List array = (List) obj; - List toArray = new ArrayList<>(array.size()); - for (Object val : array) { - toArray.add(subMapper.convertAvro(val)); - } - return toArray; - }; - } - - /** Map a map to another. */ - private AvroDataMapper mapMap(Schema from, Schema to) throws SchemaValidationException { - if (to.getType() != Schema.Type.MAP) { - throw new SchemaValidationException(to, from, - new IllegalArgumentException("Cannot map array to non-array")); - } - final AvroDataMapper subMapper = createMapper(from.getValueType(), to.getValueType(), - null); - return obj -> { - @SuppressWarnings("unchecked") - Map map = (Map) obj; - Map toMap = new HashMap<>(map.size() * 4 / 3 + 1); - for (Map.Entry entry : map.entrySet()) { - toMap.put(entry.getKey().toString(), subMapper.convertAvro(entry.getValue())); - } - return toMap; - }; - } - - private AvroDataMapper mapBytes(Schema from, final Schema to, final Object defaultVal) - throws SchemaValidationException { - if (from.getType() == to.getType() - && (from.getType() == Type.BYTES - || (from.getType() == Type.FIXED && from.getFixedSize() == to.getFixedSize()))) { - return IDENTITY_MAPPER; - } else if (from.getType() == Type.FIXED && to.getType() == Schema.Type.BYTES) { - return object -> ByteBuffer.wrap(((Fixed)object).bytes()); - } else if (from.getType() == Type.BYTES && to.getType() == Type.FIXED) { - if (defaultVal == null) { - throw new SchemaValidationException(to, from, new IllegalArgumentException( - "Cannot map bytes to fixed without default value")); - } - return object -> { - byte[] bytes = ((ByteBuffer) object).array(); - if (bytes.length == to.getFixedSize()) { - return GenericData.get().createFixed(null, bytes, to); - } else { - return GenericData.get().createFixed(null, (byte[]) defaultVal, to); - } - }; - } else if (to.getType() == Type.STRING) { - final Encoder encoder = Base64.getEncoder(); - if (from.getType() == Type.FIXED) { - return object -> encoder.encode(((Fixed) object).bytes()); - } else { - return object -> encoder.encode(((ByteBuffer) object).array()); - } - } else { - throw new SchemaValidationException(to, from, - new IllegalArgumentException( - "Fixed type must be mapped to comparable byte size")); - } - } - - - private AvroDataMapper mapRecord(Schema from, Schema to) - throws SchemaValidationException { - if (to.getType() != Schema.Type.RECORD) { - throw new SchemaValidationException(to, from, - new IllegalArgumentException("From and to schemas must be records.")); - } - List fromFields = from.getFields(); - Schema.Field[] toFields = new Schema.Field[fromFields.size()]; - AvroDataMapper[] fieldMappers = new AvroDataMapper[fromFields.size()]; - - boolean[] filledPositions = new boolean[to.getFields().size()]; - - for (int i = 0; i < fromFields.size(); i++) { - Schema.Field fromField = fromFields.get(i); - Schema.Field toField = to.getField(fromField.name()); - if (toField == null) { - continue; - } - - filledPositions[toField.pos()] = true; - - Schema fromSchema = fromField.schema(); - Schema toSchema = toField.schema(); - - toFields[i] = toField; - fieldMappers[i] = createMapper(fromSchema, toSchema, toField.defaultVal()); - } - - for (int i = 0; i < filledPositions.length; i++) { - if (!filledPositions[i] && to.getFields().get(i).defaultVal() == null) { - throw new SchemaValidationException(to, from, - new IllegalArgumentException("Cannot map to record without default value" - + " for new field " + to.getFields().get(i).name())); - } - } - - return new RecordMapper(to, toFields, fieldMappers); - } - - /** Maps one record to another. */ - private static class RecordMapper implements AvroDataMapper { - private final AvroDataMapper[] fieldMappers; - private final Schema.Field[] toFields; - private final Schema toSchema; - - RecordMapper(Schema toSchema, Schema.Field[] toFields, AvroDataMapper[] fieldMappers) { - this.toSchema = toSchema; - this.fieldMappers = fieldMappers; - this.toFields = toFields; - } - - - @Override - public GenericRecord convertAvro(Object obj) { - GenericRecordBuilder builder = new GenericRecordBuilder(toSchema); - IndexedRecord record = (IndexedRecord) obj; - for (int i = 0; i < toFields.length; i++) { - Schema.Field field = toFields[i]; - if (field == null) { - continue; - } - builder.set(field, fieldMappers[i].convertAvro(record.get(i))); - } - return builder.build(); - } - - @Override - public String toString() { - return "RecordMapper{" - + "fieldMappers=" + Arrays.toString(fieldMappers) - + ", toFields=" + Arrays.toString(toFields) + '}'; - } - } - - private abstract static class StringToNumberMapper implements AvroDataMapper { - private final Object defaultVal; - - StringToNumberMapper(Object defaultVal) { - this.defaultVal = defaultVal; - } - - @Override - public Object convertAvro(Object object) { - try { - return stringToNumber(object.toString()); - } catch (NumberFormatException ex) { - return defaultVal; - } - } - - abstract Number stringToNumber(String toString); - } - - private static boolean isPrimitive(Schema.Type type) { - switch (type) { - case INT: - case LONG: - case BYTES: - case FLOAT: - case DOUBLE: - case NULL: - case BOOLEAN: - case STRING: - return true; - default: - return false; - } - } -} diff --git a/radar-commons/src/main/java/org/radarbase/producer/rest/AvroRecordContent.kt b/radar-commons/src/main/java/org/radarbase/producer/rest/AvroRecordContent.kt new file mode 100644 index 00000000..5bcea266 --- /dev/null +++ b/radar-commons/src/main/java/org/radarbase/producer/rest/AvroRecordContent.kt @@ -0,0 +1,7 @@ +package org.radarbase.producer.rest + +import io.ktor.http.content.* + +interface AvroRecordContent { + fun createContent(): OutgoingContent +} diff --git a/radar-commons/src/main/java/org/radarbase/producer/rest/BinaryRecordContent.kt b/radar-commons/src/main/java/org/radarbase/producer/rest/BinaryRecordContent.kt new file mode 100644 index 00000000..42785c50 --- /dev/null +++ b/radar-commons/src/main/java/org/radarbase/producer/rest/BinaryRecordContent.kt @@ -0,0 +1,67 @@ +package org.radarbase.producer.rest + +import io.ktor.http.content.* +import org.radarbase.data.RecordData +import org.radarbase.data.RemoteSchemaEncoder +import org.radarbase.producer.avro.AvroDataMapperFactory +import org.radarbase.producer.io.BinaryEncoder +import org.radarbase.producer.io.DirectBinaryEncoder +import org.radarbase.producer.io.FunctionalWriteChannelContent +import org.radarbase.producer.schema.ParsedSchemaMetadata +import org.slf4j.LoggerFactory + +class BinaryRecordContent( + private val records: RecordData<*, V>, + keySchemaMetadata: ParsedSchemaMetadata, + valueSchemaMetadata: ParsedSchemaMetadata, +) : AvroRecordContent { + private val valueEncoder = RemoteSchemaEncoder.SchemaEncoderWriter( + binary = true, + schema = records.topic.valueSchema, + clazz = records.topic.valueClass, + readerSchema = valueSchemaMetadata.schema, + ) + private val sourceId = records.sourceId + ?: throw AvroDataMapperFactory.validationException( + records.topic.keySchema, + keySchemaMetadata.schema, + "Cannot map record without source ID", + ) + + private val keySchemaVersion = requireNotNull(keySchemaMetadata.version) { + "missing key schema version" + } + private val valueSchemaVersion = requireNotNull(valueSchemaMetadata.version) { + "missing key schema version" + } + + override fun createContent(): OutgoingContent = FunctionalWriteChannelContent { channel -> + DirectBinaryEncoder(channel).use { + it.writeRecords() + } + } + + private suspend fun BinaryEncoder.writeRecords() { + startItem() + writeInt(keySchemaVersion) + writeInt(valueSchemaVersion) + + // do not send project ID; it is encoded in the serialization + writeIndex(0) + // do not send user ID; it is encoded in the serialization + writeIndex(0) + writeString(sourceId) + writeArrayStart() + setItemCount(records.size().toLong()) + for (record in records) { + startItem() + writeBytes(valueEncoder.encode(record)) + } + writeArrayEnd() + flush() + } + + companion object { + private val logger = LoggerFactory.getLogger(BinaryRecordContent::class.java) + } +} diff --git a/radar-commons/src/main/java/org/radarbase/producer/rest/BinaryRecordRequest.java b/radar-commons/src/main/java/org/radarbase/producer/rest/BinaryRecordRequest.java deleted file mode 100644 index 41c56377..00000000 --- a/radar-commons/src/main/java/org/radarbase/producer/rest/BinaryRecordRequest.java +++ /dev/null @@ -1,133 +0,0 @@ -/* - * Copyright 2018 The Hyve - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.radarbase.producer.rest; - -import java.io.IOException; -import okio.Buffer; -import okio.BufferedSink; -import org.apache.avro.Schema; -import org.apache.avro.SchemaValidationException; -import org.apache.avro.generic.IndexedRecord; -import org.apache.avro.io.BinaryEncoder; -import org.apache.avro.io.EncoderFactory; -import org.radarbase.data.AvroEncoder.AvroWriter; -import org.radarbase.data.RecordData; -import org.radarbase.data.RemoteSchemaEncoder; -import org.radarbase.topic.AvroTopic; -import org.radarbase.util.Strings; - -/** - * Encodes a record request as binary data, in the form of a RecordSet. - * @param record key type - * @param record value type - */ -public class BinaryRecordRequest implements RecordRequest { - private int keyVersion; - private int valueVersion; - private RecordData records; - private BinaryEncoder binaryEncoder; - private final AvroWriter valueEncoder; - private final int sourceIdPos; - - /** - * Binary record request for given topic. - * @param topic topic to send data for. - * @throws SchemaValidationException if the key schema does not contain a - * {@code sourceId} field. - * @throws IllegalArgumentException if the topic cannot be used to make a AvroWriter. - */ - public BinaryRecordRequest(AvroTopic topic) throws SchemaValidationException { - if (topic.getKeySchema() == null || topic.getKeySchema().getType() != Schema.Type.RECORD) { - Schema keySchema = topic.getKeySchema(); - if (keySchema == null) { - keySchema = Schema.create(Schema.Type.NULL); - } - throw new SchemaValidationException(keySchema, keySchema, - new IllegalArgumentException("Cannot use non-record key schema")); - } - Schema.Field sourceIdField = topic.getKeySchema().getField("sourceId"); - if (sourceIdField == null) { - throw new SchemaValidationException(topic.getKeySchema(), topic.getKeySchema(), - new IllegalArgumentException("Cannot use binary encoder without a source ID.")); - } else { - sourceIdPos = sourceIdField.pos(); - } - valueEncoder = new RemoteSchemaEncoder(true) - .writer(topic.getValueSchema(), topic.getValueClass()); - } - - @Override - public void writeToSink(BufferedSink sink) throws IOException { - writeToSink(sink, Integer.MAX_VALUE); - } - - private void writeToSink(BufferedSink sink, int maxLength) throws IOException { - binaryEncoder = EncoderFactory.get().directBinaryEncoder( - sink.outputStream(), binaryEncoder); - binaryEncoder.startItem(); - binaryEncoder.writeInt(keyVersion); - binaryEncoder.writeInt(valueVersion); - - // do not send project ID; it is encoded in the serialization - binaryEncoder.writeIndex(0); - // do not send user ID; it is encoded in the serialization - binaryEncoder.writeIndex(0); - String sourceId = ((IndexedRecord) records.getKey()).get(sourceIdPos).toString(); - binaryEncoder.writeString(sourceId); - binaryEncoder.writeArrayStart(); - binaryEncoder.setItemCount(records.size()); - - int curLength = 18 + sourceId.length(); - - for (V record : records) { - if (curLength >= maxLength) { - return; - } - binaryEncoder.startItem(); - byte[] valueBytes = valueEncoder.encode(record); - binaryEncoder.writeBytes(valueBytes); - curLength += 4 + valueBytes.length; - } - binaryEncoder.writeArrayEnd(); - binaryEncoder.flush(); - } - - @Override - public void reset() { - records = null; - } - - @Override - public void prepare(ParsedSchemaMetadata keySchema, ParsedSchemaMetadata valueSchema, - RecordData records) throws SchemaValidationException { - keyVersion = keySchema.getVersion() == null ? 0 : keySchema.getVersion(); - valueVersion = valueSchema.getVersion() == null ? 0 : valueSchema.getVersion(); - - valueEncoder.setReaderSchema(valueSchema); - - this.records = records; - } - - @Override - public String content(int maxLength) throws IOException { - try (Buffer buffer = new Buffer()) { - writeToSink(buffer, maxLength / 2 - 2); - return "0x" + Strings.bytesToHex( - buffer.readByteArray(Math.min(buffer.size(), maxLength - 2))); - } - } -} diff --git a/radar-commons/src/main/java/org/radarbase/producer/rest/ConnectionState.java b/radar-commons/src/main/java/org/radarbase/producer/rest/ConnectionState.java deleted file mode 100644 index cb911f07..00000000 --- a/radar-commons/src/main/java/org/radarbase/producer/rest/ConnectionState.java +++ /dev/null @@ -1,95 +0,0 @@ -/* - * Copyright 2017 The Hyve and King's College London - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.radarbase.producer.rest; - -import java.util.concurrent.TimeUnit; - -/** - * Current connection status of a KafkaSender. After a timeout occurs this will turn to - * disconnected. When the connection is dropped, the associated KafkaSender should set this to - * disconnected, when it successfully connects, it should set it to connected. This class is - * thread-safe. The state transition diagram is CONNECTED to and from DISCONNECTED with - * {@link #didConnect()} and {@link #didDisconnect()}; CONNECTED to and from UNKNOWN with - * {@link #getState()} after a timeout occurs and {@link #didConnect()}; and UNKNOWN to DISCONNECTED - * with {@link #didDisconnect()}. - * - *

A connection state could be shared with multiple HTTP clients if they are talking to the same - * server. - */ -public final class ConnectionState { - - /** State symbols of the connection. */ - public enum State { - CONNECTED, DISCONNECTED, UNKNOWN, UNAUTHORIZED - } - - private long timeout; - private long lastConnection; - private State state; - - /** - * Connection state with given timeout. The state will start as connected. - * @param timeout timeout - * @param unit unit of the timeout - * @throws IllegalArgumentException if the timeout is not strictly positive. - */ - public ConnectionState(long timeout, TimeUnit unit) { - lastConnection = -1L; - state = State.UNKNOWN; - setTimeout(timeout, unit); - } - - /** Current state of the connection. */ - public synchronized State getState() { - if (state == State.CONNECTED && System.currentTimeMillis() - lastConnection >= timeout) { - state = State.UNKNOWN; - } - return state; - } - - /** For a sender to indicate that a connection attempt succeeded. */ - public synchronized void didConnect() { - state = State.CONNECTED; - lastConnection = System.currentTimeMillis(); - } - - /** For a sender to indicate that a connection attempt failed. */ - public synchronized void didDisconnect() { - state = State.DISCONNECTED; - } - - public synchronized void wasUnauthorized() { - state = State.UNAUTHORIZED; - } - - public synchronized void reset() { - state = State.UNKNOWN; - } - - /** - * Set the timeout after which the state will go from CONNECTED to UNKNOWN. - * @param timeout timeout - * @param unit unit of the timeout - * @throws IllegalArgumentException if the timeout is not strictly positive - */ - public synchronized void setTimeout(long timeout, TimeUnit unit) { - if (timeout <= 0) { - throw new IllegalArgumentException("Timeout must be strictly positive"); - } - this.timeout = TimeUnit.MILLISECONDS.convert(timeout, unit); - } -} diff --git a/radar-commons/src/main/java/org/radarbase/producer/rest/ConnectionState.kt b/radar-commons/src/main/java/org/radarbase/producer/rest/ConnectionState.kt new file mode 100644 index 00000000..78440880 --- /dev/null +++ b/radar-commons/src/main/java/org/radarbase/producer/rest/ConnectionState.kt @@ -0,0 +1,84 @@ +/* + * Copyright 2017 The Hyve and King's College London + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.radarbase.producer.rest + +import kotlinx.coroutines.* +import kotlinx.coroutines.flow.* +import kotlin.coroutines.EmptyCoroutineContext +import kotlin.time.Duration + +/** + * Current connection status of a KafkaSender. After a timeout occurs this will turn to + * disconnected. When the connection is dropped, the associated KafkaSender should set this to + * disconnected, when it successfully connects, it should set it to connected. This class is + * thread-safe. The state transition diagram is CONNECTED to and from DISCONNECTED with + * [.didConnect] and [.didDisconnect]; CONNECTED to and from UNKNOWN with + * [.getState] after a timeout occurs and [.didConnect]; and UNKNOWN to DISCONNECTED + * with [.didDisconnect]. + * + * + * A connection state could be shared with multiple HTTP clients if they are talking to the same + * server. + * + * @param timeout timeout after which the connected state will be reset to unknown. + * @throws IllegalArgumentException if the timeout is not strictly positive. + */ +class ConnectionState( + private val timeout: Duration, + scope: CoroutineScope = CoroutineScope(EmptyCoroutineContext), +) { + /** State symbols of the connection. */ + enum class State { + CONNECTED, DISCONNECTED, UNKNOWN, UNAUTHORIZED + } + + val scope = scope + Job() + + private val mutableState = MutableStateFlow(State.UNKNOWN) + + @OptIn(ExperimentalCoroutinesApi::class) + val state: Flow = mutableState + .transformLatest { state -> + emit(state) + if (state == State.CONNECTED) { + delay(timeout) + emit(State.UNKNOWN) + } + } + .shareIn(this.scope + Dispatchers.Unconfined, SharingStarted.Eagerly, replay = 1) + + init { + mutableState.value = State.UNKNOWN + } + + /** For a sender to indicate that a connection attempt succeeded. */ + fun didConnect() { + mutableState.value = State.CONNECTED + } + + /** For a sender to indicate that a connection attempt failed. */ + fun didDisconnect() { + mutableState.value = State.DISCONNECTED + } + + fun wasUnauthorized() { + mutableState.value = State.UNAUTHORIZED + } + + fun reset() { + mutableState.value = State.UNKNOWN + } +} diff --git a/radar-commons/src/main/java/org/radarbase/producer/rest/GzipRequestInterceptor.java b/radar-commons/src/main/java/org/radarbase/producer/rest/GzipRequestInterceptor.java deleted file mode 100644 index 6ce5f98b..00000000 --- a/radar-commons/src/main/java/org/radarbase/producer/rest/GzipRequestInterceptor.java +++ /dev/null @@ -1,75 +0,0 @@ -/* - * Copyright 2018 The Hyve - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.radarbase.producer.rest; - -import java.io.IOException; -import okhttp3.Interceptor; -import okhttp3.MediaType; -import okhttp3.Request; -import okhttp3.RequestBody; -import okhttp3.Response; -import okio.BufferedSink; -import okio.GzipSink; -import okio.Okio; - -/** This interceptor compresses the HTTP request body. Many webservers can't handle this! */ -public class GzipRequestInterceptor implements Interceptor { - @Override - public Response intercept(Interceptor.Chain chain) throws IOException { - Request originalRequest = chain.request(); - if (originalRequest.body() == null || originalRequest.header("Content-Encoding") != null) { - return chain.proceed(originalRequest); - } - - Request compressedRequest = originalRequest.newBuilder() - .header("Content-Encoding", "gzip") - .method(originalRequest.method(), gzip(originalRequest.body())) - .build(); - return chain.proceed(compressedRequest); - } - - private RequestBody gzip(final RequestBody body) { - return new RequestBody() { - @Override - public MediaType contentType() { - return body.contentType(); - } - - @Override - public long contentLength() { - return -1; // We don't know the compressed length in advance! - } - - @Override - public void writeTo(BufferedSink sink) throws IOException { - try (BufferedSink gzipSink = Okio.buffer(new GzipSink(sink))) { - body.writeTo(gzipSink); - } - } - }; - } - - @Override - public int hashCode() { - return 1; - } - - @Override - public boolean equals(Object obj) { - return this == obj || obj != null && getClass() == obj.getClass(); - } -} diff --git a/radar-commons/src/main/java/org/radarbase/producer/rest/JsonRecordContent.kt b/radar-commons/src/main/java/org/radarbase/producer/rest/JsonRecordContent.kt new file mode 100644 index 00000000..7fa481b9 --- /dev/null +++ b/radar-commons/src/main/java/org/radarbase/producer/rest/JsonRecordContent.kt @@ -0,0 +1,66 @@ +package org.radarbase.producer.rest + +import io.ktor.http.content.* +import io.ktor.utils.io.* +import org.radarbase.data.RecordData +import org.radarbase.data.RemoteSchemaEncoder +import org.radarbase.producer.io.FunctionalWriteChannelContent +import org.radarbase.producer.schema.ParsedSchemaMetadata +import org.slf4j.LoggerFactory + +class JsonRecordContent( + private val records: RecordData, + private val keySchemaMetadata: ParsedSchemaMetadata, + private val valueSchemaMetadata: ParsedSchemaMetadata, +) : AvroRecordContent { + private val keyEncoder = RemoteSchemaEncoder.SchemaEncoderWriter( + binary = false, + schema = records.topic.keySchema, + clazz = records.topic.keyClass, + readerSchema = keySchemaMetadata.schema, + ) + private val valueEncoder = RemoteSchemaEncoder.SchemaEncoderWriter( + binary = false, + schema = records.topic.valueSchema, + clazz = records.topic.valueClass, + readerSchema = valueSchemaMetadata.schema, + ) + + override fun createContent(): OutgoingContent = + FunctionalWriteChannelContent { it.writeRecords() } + + private suspend fun ByteWriteChannel.writeRecords() { + writeByte('{'.code) + writeFully(KEY_SCHEMA_ID) + writeFully(keySchemaMetadata.id.toString().toByteArray()) + writeFully(VALUE_SCHEMA_ID) + writeFully(valueSchemaMetadata.id.toString().toByteArray()) + writeFully(RECORDS) + val key = keyEncoder.encode(records.key) + var first = true + for (record in records) { + if (first) { + first = false + } else { + writeByte(','.code) + } + writeFully(KEY) + writeFully(key) + writeFully(VALUE) + writeFully(valueEncoder.encode(record)) + writeByte('}'.code) + } + writeFully(END) + } + + companion object { + val KEY_SCHEMA_ID = "\"key_schema_id\":".toByteArray() + val VALUE_SCHEMA_ID = ",\"value_schema_id\":".toByteArray() + val RECORDS = ",\"records\":[".toByteArray() + val KEY = "{\"key\":".toByteArray() + val VALUE = ",\"value\":".toByteArray() + val END = "]}".toByteArray() + + private val logger = LoggerFactory.getLogger(JsonRecordContent::class.java) + } +} diff --git a/radar-commons/src/main/java/org/radarbase/producer/rest/JsonRecordRequest.java b/radar-commons/src/main/java/org/radarbase/producer/rest/JsonRecordRequest.java deleted file mode 100644 index 2ae287ce..00000000 --- a/radar-commons/src/main/java/org/radarbase/producer/rest/JsonRecordRequest.java +++ /dev/null @@ -1,126 +0,0 @@ -/* - * Copyright 2017 The Hyve and King's College London - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.radarbase.producer.rest; - -import java.io.IOException; -import java.nio.charset.StandardCharsets; -import okio.Buffer; -import okio.BufferedSink; -import org.apache.avro.SchemaValidationException; -import org.json.JSONObject; -import org.radarbase.data.AvroEncoder.AvroWriter; -import org.radarbase.data.RecordData; -import org.radarbase.data.RemoteSchemaEncoder; -import org.radarbase.topic.AvroTopic; -import org.radarbase.util.Strings; - -/** - * Request data to submit records to the Kafka REST proxy. - */ -public class JsonRecordRequest implements RecordRequest { - public static final byte[] KEY_SCHEMA_ID = Strings.utf8("\"key_schema_id\":"); - public static final byte[] VALUE_SCHEMA_ID = Strings.utf8(",\"value_schema_id\":"); - public static final byte[] RECORDS = Strings.utf8(",\"records\":["); - public static final byte[] KEY = Strings.utf8("{\"key\":"); - public static final byte[] VALUE = Strings.utf8(",\"value\":"); - public static final byte[] END = Strings.utf8("]}"); - - private final AvroWriter keyEncoder; - private final AvroWriter valueEncoder; - - private RecordData records; - - /** - * Generate a record request for given topic. - * @param topic topic to use. - * @throws IllegalStateException if key or value encoders could not be made. - */ - public JsonRecordRequest(AvroTopic topic) { - RemoteSchemaEncoder schemaEncoder = new RemoteSchemaEncoder(false); - - this.keyEncoder = schemaEncoder.writer(topic.getKeySchema(), topic.getKeyClass()); - this.valueEncoder = schemaEncoder.writer(topic.getValueSchema(), topic.getValueClass()); - } - - /** - * Writes the current topic to a stream. This implementation does not use any JSON writers to - * write the data, but writes it directly to a stream. {@link JSONObject#quote(String)} - * is used to get the correct formatting. This makes the method as lean as possible. - * @param sink buffered sink to write to. - * @throws IOException if a superimposing stream could not be created - */ - @Override - public void writeToSink(BufferedSink sink) throws IOException { - writeToSink(sink, Integer.MAX_VALUE); - } - - private void writeToSink(BufferedSink sink, int maxLength) throws IOException { - sink.writeByte('{'); - sink.write(KEY_SCHEMA_ID); - sink.write(Strings.utf8(String.valueOf(keyEncoder.getReaderSchema().getId()))); - sink.write(VALUE_SCHEMA_ID); - sink.write(Strings.utf8(String.valueOf(valueEncoder.getReaderSchema().getId()))); - - sink.write(RECORDS); - - byte[] key = keyEncoder.encode(records.getKey()); - - int curLength = KEY_SCHEMA_ID.length + VALUE_SCHEMA_ID.length + 7; - - boolean first = true; - for (V record : records) { - if (curLength >= maxLength) { - return; - } - if (first) { - first = false; - } else { - sink.writeByte(','); - } - sink.write(KEY); - sink.write(key); - - sink.write(VALUE); - byte[] valueBytes = valueEncoder.encode(record); - sink.write(valueBytes); - sink.writeByte('}'); - curLength += 2 + key.length + KEY.length + VALUE.length + valueBytes.length; - } - sink.write(END); - } - - @Override - public void reset() { - records = null; - } - - @Override - public void prepare(ParsedSchemaMetadata keySchema, ParsedSchemaMetadata valueSchema, - RecordData records) throws SchemaValidationException { - keyEncoder.setReaderSchema(keySchema); - valueEncoder.setReaderSchema(valueSchema); - this.records = records; - } - - @Override - public String content(int maxLength) throws IOException { - try (Buffer buffer = new Buffer()) { - writeToSink(buffer, maxLength); - return buffer.readString(Math.min(buffer.size(), maxLength), StandardCharsets.UTF_8); - } - } -} diff --git a/radar-commons/src/main/java/org/radarbase/producer/rest/ParsedSchemaMetadata.java b/radar-commons/src/main/java/org/radarbase/producer/rest/ParsedSchemaMetadata.java deleted file mode 100644 index d0009e72..00000000 --- a/radar-commons/src/main/java/org/radarbase/producer/rest/ParsedSchemaMetadata.java +++ /dev/null @@ -1,56 +0,0 @@ -/* - * Copyright 2017 The Hyve and King's College London - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.radarbase.producer.rest; - -import org.apache.avro.Schema; - -/** - * Parsed schema metadata from a Schema Registry. - */ -public class ParsedSchemaMetadata { - private final Integer version; - private Integer id; - private final Schema schema; - - /** - * Schema metadata. - * @param id schema ID, may be null. - * @param version schema version, may be null. - * @param schema parsed schema. - */ - public ParsedSchemaMetadata(Integer id, Integer version, Schema schema) { - this.id = id; - this.version = version; - this.schema = schema; - } - - public Integer getId() { - return id; - } - - public Schema getSchema() { - return schema; - } - - public Integer getVersion() { - return version; - } - - public void setId(Integer id) { - this.id = id; - } -} diff --git a/radar-commons/src/main/java/org/radarbase/producer/rest/RecordRequest.java b/radar-commons/src/main/java/org/radarbase/producer/rest/RecordRequest.java deleted file mode 100644 index 5c737f35..00000000 --- a/radar-commons/src/main/java/org/radarbase/producer/rest/RecordRequest.java +++ /dev/null @@ -1,51 +0,0 @@ -/* - * Copyright 2018 The Hyve - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.radarbase.producer.rest; - -import java.io.IOException; -import okio.BufferedSink; -import org.apache.avro.SchemaValidationException; -import org.radarbase.data.RecordData; - -/** - * Record request contents. Before {@link #writeToSink(BufferedSink)} is called, first - * {@link #prepare(ParsedSchemaMetadata, ParsedSchemaMetadata, RecordData)} should be called. This - * class may be reused by calling prepare and reset alternatively. - * - * @param record key type. - * @param record content type. - */ -public interface RecordRequest { - /** Write the current records to a stream as a request. */ - void writeToSink(BufferedSink sink) throws IOException; - - /** Reset the contents. This may free up some memory because the recordrequest may be stored. */ - void reset(); - - /** Set the records to be sent. */ - void prepare(ParsedSchemaMetadata keySchema, ParsedSchemaMetadata valueSchema, - RecordData records) throws IOException, SchemaValidationException; - - /** - * Return the content of the record as a string. To avoid dual reading of data for RecordData - * that does not store the results, prepare and reset may be called around this method. - * @param maxLength maximum returned length - * @return the content. - * @throws IOException if the content cannot be written. - */ - String content(int maxLength) throws IOException; -} diff --git a/radar-commons/src/main/java/org/radarbase/producer/rest/RestClient.java b/radar-commons/src/main/java/org/radarbase/producer/rest/RestClient.java deleted file mode 100644 index df1cf8f6..00000000 --- a/radar-commons/src/main/java/org/radarbase/producer/rest/RestClient.java +++ /dev/null @@ -1,310 +0,0 @@ -/* - * Copyright 2017 The Hyve and King's College London - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.radarbase.producer.rest; - -import java.io.IOException; -import java.lang.ref.WeakReference; -import java.net.MalformedURLException; -import java.util.List; -import java.util.Objects; -import java.util.concurrent.TimeUnit; -import javax.net.ssl.SSLSocketFactory; -import javax.net.ssl.X509TrustManager; -import okhttp3.Callback; -import okhttp3.Headers; -import okhttp3.HttpUrl; -import okhttp3.Interceptor; -import okhttp3.OkHttpClient; -import okhttp3.Protocol; -import okhttp3.Request; -import okhttp3.Response; -import okhttp3.ResponseBody; -import org.radarbase.config.ServerConfig; -import org.radarbase.util.RestUtils; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -/** REST client using OkHttp3. This class is not thread-safe. */ -public class RestClient { - private static final Logger logger = LoggerFactory.getLogger(RestClient.class); - - public static final long DEFAULT_TIMEOUT = 30; - private static WeakReference globalHttpClient = new WeakReference<>(null); - - private final ServerConfig server; - private final OkHttpClient httpClient; - private final Headers headers; - - private RestClient(Builder builder) { - this.server = Objects.requireNonNull(builder.serverConfig); - this.httpClient = builder.client.build(); - this.headers = builder.requestHeaders; - } - - /** OkHttp client. */ - public OkHttpClient getHttpClient() { - return httpClient; - } - - /** Configured connection timeout in seconds. */ - public long getTimeout() { - return httpClient.connectTimeoutMillis() / 1000; - } - - /** Configured server. */ - public ServerConfig getServer() { - return server; - } - - /** - * Make a blocking request. - * @param request request, possibly built with {@link #requestBuilder(String)} - * @return response to the request - * @throws IOException if the request fails - * @throws NullPointerException if the request is null - */ - public Response request(Request request) throws IOException { - Objects.requireNonNull(request); - return httpClient.newCall(request).execute(); - } - - /** - * Make an asynchronous request. - * @param request request, possibly built with {@link #requestBuilder(String)} - * @param callback callback to activate once the request is done. - */ - public void request(Request request, Callback callback) { - Objects.requireNonNull(request); - Objects.requireNonNull(callback); - httpClient.newCall(request).enqueue(callback); - } - - /** - * Make a request to given relative path. This does not set any request properties except the - * URL. - * @param relativePath relative path to request - * @return response to the request - * @throws IOException if the path is invalid or the request failed. - */ - public Response request(String relativePath) throws IOException { - return request(requestBuilder(relativePath).build()); - } - - /** - * Make a blocking request and return the body. - * @param request request to make. - * @return response body string. - * @throws RestException if no body was returned or an HTTP status code indicating error was - * returned. - * @throws IOException if the request cannot be completed or the response cannot be read. - * - */ - public String requestString(Request request) throws IOException { - try (Response response = request(request)) { - String bodyString = responseBody(response); - - if (!response.isSuccessful() || bodyString == null) { - throw new RestException(response.code(), bodyString); - } - - return bodyString; - } - } - - /** - * Create a OkHttp3 request builder with {@link Request.Builder#url(HttpUrl)} set. - * Call{@link Request.Builder#build()} to make the actual request with - * {@link #request(Request)}. - * - * @param relativePath relative path from the server serverConfig - * @return request builder. - * @throws MalformedURLException if the path not valid - */ - public Request.Builder requestBuilder(String relativePath) throws MalformedURLException { - return new Request.Builder().url(getRelativeUrl(relativePath)).headers(headers); - } - - /** - * Get a URL relative to the configured server. - * @param path relative path - * @return URL - * @throws MalformedURLException if the path is malformed - */ - public HttpUrl getRelativeUrl(String path) throws MalformedURLException { - String strippedPath = path; - while (!strippedPath.isEmpty() && strippedPath.charAt(0) == '/') { - strippedPath = strippedPath.substring(1); - } - HttpUrl.Builder builder = getServer().getHttpUrl().newBuilder(strippedPath); - if (builder == null) { - throw new MalformedURLException(); - } - return builder.build(); - } - - @Override - public boolean equals(Object o) { - if (this == o) { - return true; - } - if (o == null || getClass() != o.getClass()) { - return false; - } - - RestClient that = (RestClient) o; - - return this.server.equals(that.server) && this.httpClient.equals(that.httpClient); - } - - @Override - public int hashCode() { - return Objects.hash(server, httpClient); - } - - @Override - public String toString() { - return "RestClient{serverConfig=" + server + ", httpClient=" + httpClient + '}'; - } - - /** Get the response body of a response as a String. - * Will return null if the response body is null. - * @param response call response - * @return body contents as a String. - * @throws IOException if the body could not be read as a String. - */ - public static String responseBody(Response response) throws IOException { - try (ResponseBody body = response.body()) { - if (body == null) { - return null; - } - return body.string(); - } - } - - /** Create a new builder with the settings of the current client. */ - public Builder newBuilder() { - return new Builder(httpClient) - .server(server); - } - - /** Builder. */ - public static class Builder { - private ServerConfig serverConfig; - private final OkHttpClient.Builder client; - private Headers requestHeaders = Headers.of(); - - public Builder(OkHttpClient client) { - this(client.newBuilder()); - } - - public Builder(OkHttpClient.Builder client) { - this.client = client; - } - - /** Server configuration. */ - public Builder server(ServerConfig config) { - this.serverConfig = Objects.requireNonNull(config); - - if (config.isUnsafe()) { - this.client.sslSocketFactory(RestUtils.UNSAFE_SSL_FACTORY, - (X509TrustManager) RestUtils.UNSAFE_TRUST_MANAGER[0]); - this.client.hostnameVerifier(RestUtils.UNSAFE_HOSTNAME_VERIFIER); - } else { - X509TrustManager trustManager = RestUtils.systemDefaultTrustManager(); - SSLSocketFactory socketFactory = RestUtils.systemDefaultSslSocketFactory( - trustManager); - this.client.sslSocketFactory(socketFactory, trustManager); - this.client.hostnameVerifier(RestUtils.DEFAULT_HOSTNAME_VERIFIER); - } - return this; - } - - /** Allowed protocols. */ - public Builder protocols(List protocols) { - this.client.protocols(protocols); - return this; - } - - /** Builder to extend the HTTP client with. */ - public OkHttpClient.Builder httpClientBuilder() { - return client; - } - - public Builder headers(Headers headers) { - this.requestHeaders = headers; - return this; - } - - /** Whether to enable GZIP compression. */ - public Builder gzipCompression(boolean compression) { - GzipRequestInterceptor gzip = null; - for (Interceptor interceptor : client.interceptors()) { - if (interceptor instanceof GzipRequestInterceptor) { - gzip = (GzipRequestInterceptor) interceptor; - break; - } - } - if (compression && gzip == null) { - logger.debug("Enabling GZIP compression"); - client.addInterceptor(new GzipRequestInterceptor()); - } else if (!compression && gzip != null) { - logger.debug("Disabling GZIP compression"); - client.interceptors().remove(gzip); - } - return this; - } - - /** Timeouts for connecting, reading and writing. */ - public Builder timeout(long timeout, TimeUnit unit) { - client.connectTimeout(timeout, unit) - .readTimeout(timeout, unit) - .writeTimeout(timeout, unit); - return this; - } - - /** Build a new RestClient. */ - public RestClient build() { - return new RestClient(this); - } - } - - /** Create a builder with a global shared OkHttpClient. */ - public static synchronized RestClient.Builder global() { - OkHttpClient client = globalHttpClient.get(); - if (client == null) { - client = createDefaultClient().build(); - globalHttpClient = new WeakReference<>(client); - } - return new RestClient.Builder(client); - } - - /** Create a builder with a new OkHttpClient using default settings. */ - public static synchronized RestClient.Builder newClient() { - return new RestClient.Builder(createDefaultClient()); - } - - /** - * Create a new OkHttpClient. The timeouts are set to the default. - * @return new OkHttpClient. - */ - private static OkHttpClient.Builder createDefaultClient() { - return new OkHttpClient.Builder() - .connectTimeout(DEFAULT_TIMEOUT, TimeUnit.SECONDS) - .readTimeout(DEFAULT_TIMEOUT, TimeUnit.SECONDS) - .writeTimeout(DEFAULT_TIMEOUT, TimeUnit.SECONDS); - } -} diff --git a/radar-commons/src/main/java/org/radarbase/producer/rest/RestException.java b/radar-commons/src/main/java/org/radarbase/producer/rest/RestException.java deleted file mode 100644 index 0c8429f6..00000000 --- a/radar-commons/src/main/java/org/radarbase/producer/rest/RestException.java +++ /dev/null @@ -1,59 +0,0 @@ -/* - * Copyright 2018 The Hyve - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.radarbase.producer.rest; - -import java.io.IOException; - -/** - * Exception when a HTTP REST request fails. - */ -public class RestException extends IOException { - private static final long serialVersionUID = 1; - - private final int statusCode; - private final String body; - - /** - * Request with status code and response body. - * @param statusCode HTTP status code - * @param body response body. - */ - public RestException(int statusCode, String body) { - this(statusCode, body, null); - } - - /** - * Request with status code, response body and cause. - * @param statusCode HTTP status code - * @param body response body. - * @param cause causing exception. - */ - public RestException(int statusCode, String body, Throwable cause) { - super("REST call failed (HTTP code " + statusCode + "): " - + body.substring(0, Math.min(512, body.length())), cause); - this.statusCode = statusCode; - this.body = body; - } - - public int getStatusCode() { - return statusCode; - } - - public String getBody() { - return body; - } -} diff --git a/radar-commons/src/main/java/org/radarbase/producer/rest/RestException.kt b/radar-commons/src/main/java/org/radarbase/producer/rest/RestException.kt new file mode 100644 index 00000000..9048105b --- /dev/null +++ b/radar-commons/src/main/java/org/radarbase/producer/rest/RestException.kt @@ -0,0 +1,44 @@ +/* + * Copyright 2018 The Hyve + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.radarbase.producer.rest + +import io.ktor.http.* +import java.io.IOException + +/** + * Exception when a HTTP REST request fails. + */ +class RestException +/** + * Request with status code and response body. + * @param status HTTP status code + * @param body response body. + */( + val status: HttpStatusCode, + body: String? = null, + cause: Throwable? = null, +) : IOException( + buildString(150) { + append("REST call failed (HTTP code ") + append(status) + if (body == null) { + append(')') + } else { + append(body.substring(0, body.length.coerceAtMost(512))) + } + }, + cause, +) diff --git a/radar-commons/src/main/java/org/radarbase/producer/rest/RestKafkaSender.kt b/radar-commons/src/main/java/org/radarbase/producer/rest/RestKafkaSender.kt new file mode 100644 index 00000000..13fd50b4 --- /dev/null +++ b/radar-commons/src/main/java/org/radarbase/producer/rest/RestKafkaSender.kt @@ -0,0 +1,264 @@ +/* + * Copyright 2017 The Hyve and King's College London + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.radarbase.producer.rest + +import io.ktor.client.* +import io.ktor.client.call.* +import io.ktor.client.engine.cio.* +import io.ktor.client.plugins.* +import io.ktor.client.plugins.contentnegotiation.* +import io.ktor.client.request.* +import io.ktor.client.statement.* +import io.ktor.http.* +import io.ktor.util.reflect.* +import kotlinx.coroutines.* +import kotlinx.coroutines.flow.Flow +import kotlinx.coroutines.flow.first +import org.apache.avro.SchemaValidationException +import org.radarbase.data.RecordData +import org.radarbase.producer.AuthenticationException +import org.radarbase.producer.KafkaSender +import org.radarbase.producer.KafkaTopicSender +import org.radarbase.producer.io.GzipContentEncoding +import org.radarbase.producer.io.UnsupportedMediaTypeException +import org.radarbase.producer.io.timeout +import org.radarbase.producer.io.unsafeSsl +import org.radarbase.producer.schema.SchemaRetriever +import org.radarbase.topic.AvroTopic +import org.radarbase.util.RadarProducerDsl +import org.slf4j.LoggerFactory +import java.io.IOException +import java.util.* +import kotlin.reflect.javaType +import kotlin.reflect.typeOf +import kotlin.time.Duration +import kotlin.time.Duration.Companion.seconds + +/** + * RestSender sends records to the Kafka REST Proxy. It does so using an Avro JSON encoding. A new + * sender must be constructed with [.sender] per AvroTopic. This implementation is + * blocking and unbuffered, so flush, clear and close do not do anything. + */ +class RestKafkaSender(config: Config) : KafkaSender { + val scope = config.scope + private val allowUnsafe: Boolean = config.allowUnsafe + private val contentType: ContentType = config.contentType + val schemaRetriever: SchemaRetriever = requireNotNull(config.schemaRetriever) { + "Missing schemaRetriever from configuration" + } + + /** Get the current REST client. */ + val restClient: HttpClient + + private val _connectionState: ConnectionState = config.connectionState + ?: ConnectionState(DEFAULT_TIMEOUT, scope) + + override val connectionState: Flow + get() = _connectionState.state + + private val baseUrl: String = requireNotNull(config.baseUrl) + private val headers: Headers = config.headers.build() + private val connectionTimeout: Duration = config.connectionTimeout + private val contentEncoding = config.contentEncoding + private val originalHttpClient = config.httpClient + + /** + * Construct a RestSender. + */ + init { + restClient = config.httpClient?.config { + configure() + } ?: HttpClient(CIO) { + configure() + } + } + + private fun HttpClientConfig<*>.configure() { + timeout(connectionTimeout) + install(ContentNegotiation) { + this.register( + KAFKA_REST_BINARY_ENCODING, + AvroContentConverter(schemaRetriever, binary = true), + ) + this.register( + KAFKA_REST_JSON_ENCODING, + AvroContentConverter(schemaRetriever, binary = false), + ) + } + when (contentEncoding) { + GZIP_CONTENT_ENCODING -> install(GzipContentEncoding) + else -> {} + } + if (allowUnsafe) { + unsafeSsl() + } + defaultRequest { + url(baseUrl) + contentType(contentType) + accept(ContentType.Application.Json) + headers { + appendAll(this@RestKafkaSender.headers) + } + } + } + + inner class RestKafkaTopicSender( + override val topic: AvroTopic, + ) : KafkaTopicSender { + @OptIn(ExperimentalStdlibApi::class) + override suspend fun send(records: RecordData) = scope.async { + try { + val response: HttpResponse = restClient.post { + url("topics/${topic.name}") + val kType = typeOf>() + val reifiedType = kType.javaType + setBody(records, TypeInfo(RecordData::class, reifiedType, kType)) + } + if (response.status.isSuccess()) { + _connectionState.didConnect() + logger.debug("Added message to topic {}", topic) + } else if (response.status == HttpStatusCode.Unauthorized || response.status == HttpStatusCode.Forbidden) { + _connectionState.wasUnauthorized() + throw AuthenticationException("Request unauthorized") + } else if (response.status == HttpStatusCode.UnsupportedMediaType) { + throw UnsupportedMediaTypeException( + response.request.contentType(), + response.request.headers[HttpHeaders.ContentEncoding], + ) + } else { + _connectionState.didDisconnect() + throw RestException(response.status, response.bodyAsText()) + } + } catch (ex: IOException) { + _connectionState.didDisconnect() + throw ex + } + }.await() + } + + @Throws(SchemaValidationException::class) + override fun sender(topic: AvroTopic): KafkaTopicSender { + return RestKafkaTopicSender(topic) + } + + @Throws(AuthenticationException::class) + override suspend fun resetConnection(): Boolean { + if (connectionState.first() === ConnectionState.State.CONNECTED) { + return true + } + val lastState = try { + val response = withContext(Dispatchers.IO) { + restClient.head { + url("") + } + } + if (response.status.isSuccess()) { + _connectionState.didConnect() + ConnectionState.State.CONNECTED + } else if (response.status == HttpStatusCode.Unauthorized) { + _connectionState.wasUnauthorized() + throw AuthenticationException("HEAD request unauthorized") + } else { + _connectionState.didDisconnect() + val bodyString = response.bodyAsText() + logger.warn( + "Failed to make heartbeat request to {} (HTTP status code {}): {}", + restClient, + response.status, + bodyString, + ) + ConnectionState.State.DISCONNECTED + } + } catch (ex: IOException) { + // no stack trace is needed + _connectionState.didDisconnect() + logger.warn("Failed to make heartbeat request to {}: {}", restClient, ex.toString()) + ConnectionState.State.DISCONNECTED + } + return lastState === ConnectionState.State.CONNECTED + } + + fun config(config: Config.() -> Unit): RestKafkaSender { + val oldConfig = toConfig() + val newConfig = toConfig().apply(config) + return if (oldConfig == newConfig) this else RestKafkaSender(newConfig) + } + + private fun toConfig() = Config().apply { + scope = this@RestKafkaSender.scope + baseUrl = this@RestKafkaSender.baseUrl + httpClient = this@RestKafkaSender.originalHttpClient + schemaRetriever = this@RestKafkaSender.schemaRetriever + headers = HeadersBuilder().apply { appendAll(this@RestKafkaSender.headers) } + contentType = this@RestKafkaSender.contentType + contentEncoding = this@RestKafkaSender.contentEncoding + connectionTimeout = this@RestKafkaSender.connectionTimeout + allowUnsafe = this@RestKafkaSender.allowUnsafe + } + + @RadarProducerDsl + class Config { + var scope: CoroutineScope = CoroutineScope(Dispatchers.IO + SupervisorJob()) + var baseUrl: String? = null + var schemaRetriever: SchemaRetriever? = null + var connectionState: ConnectionState? = null + var httpClient: HttpClient? = null + var headers = HeadersBuilder() + var connectionTimeout: Duration = 30.seconds + var contentEncoding: String? = null + var allowUnsafe: Boolean = false + var contentType: ContentType = KAFKA_REST_JSON_ENCODING + + fun httpClient(config: HttpClientConfig<*>.() -> Unit = {}) { + httpClient = httpClient?.config(config) + ?: HttpClient(CIO, config) + } + + fun schemaRetriever(schemaBaseUrl: String, builder: SchemaRetriever.Config.() -> Unit = {}) { + schemaRetriever = SchemaRetriever.schemaRetriever(schemaBaseUrl) { + httpClient = this@Config.httpClient + builder() + } + } + + override fun equals(other: Any?): Boolean { + if (this === other) return true + if (other == null || javaClass != other.javaClass) return false + other as Config + return schemaRetriever == other.schemaRetriever && + connectionState == other.connectionState && + headers.build() == other.headers.build() && + httpClient == other.httpClient && + contentType == other.contentType && + baseUrl == other.baseUrl && + connectionTimeout == other.connectionTimeout && + contentEncoding == other.contentEncoding && + scope == other.scope + } + override fun hashCode(): Int = headers.hashCode() + } + + companion object { + private val logger = LoggerFactory.getLogger(RestKafkaSender::class.java) + val DEFAULT_TIMEOUT: Duration = 20.seconds + val KAFKA_REST_BINARY_ENCODING = ContentType("application", "vnd.radarbase.avro.v1+binary") + val KAFKA_REST_JSON_ENCODING = ContentType("application", "vnd.kafka+json") + const val GZIP_CONTENT_ENCODING = "gzip" + + fun restKafkaSender(builder: Config.() -> Unit): RestKafkaSender = + RestKafkaSender(Config().apply(builder)) + } +} diff --git a/radar-commons/src/main/java/org/radarbase/producer/rest/RestSender.java b/radar-commons/src/main/java/org/radarbase/producer/rest/RestSender.java deleted file mode 100644 index 59556f90..00000000 --- a/radar-commons/src/main/java/org/radarbase/producer/rest/RestSender.java +++ /dev/null @@ -1,333 +0,0 @@ -/* - * Copyright 2017 The Hyve and King's College London - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.radarbase.producer.rest; - -import static org.radarbase.producer.rest.RestClient.DEFAULT_TIMEOUT; -import static org.radarbase.producer.rest.RestClient.responseBody; - -import java.io.IOException; -import java.net.MalformedURLException; -import java.util.Collections; -import java.util.Objects; -import java.util.concurrent.TimeUnit; -import okhttp3.Headers; -import okhttp3.MediaType; -import okhttp3.Protocol; -import okhttp3.Request; -import okhttp3.Response; -import org.apache.avro.SchemaValidationException; -import org.radarbase.config.ServerConfig; -import org.radarbase.producer.AuthenticationException; -import org.radarbase.producer.KafkaSender; -import org.radarbase.producer.KafkaTopicSender; -import org.radarbase.producer.rest.ConnectionState.State; -import org.radarbase.topic.AvroTopic; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -/** - * RestSender sends records to the Kafka REST Proxy. It does so using an Avro JSON encoding. A new - * sender must be constructed with {@link #sender(AvroTopic)} per AvroTopic. This implementation is - * blocking and unbuffered, so flush, clear and close do not do anything. - */ -public class RestSender implements KafkaSender { - private static final Logger logger = LoggerFactory.getLogger(RestSender.class); - - public static final String KAFKA_REST_ACCEPT_ENCODING = - "application/vnd.kafka.v2+json, application/vnd.kafka+json, application/json"; - public static final String KAFKA_REST_ACCEPT_LEGACY_ENCODING = - "application/vnd.kafka.v1+json, application/vnd.kafka+json, application/json"; - public static final MediaType KAFKA_REST_BINARY_ENCODING = - MediaType.parse("application/vnd.radarbase.avro.v1+binary"); - public static final MediaType KAFKA_REST_AVRO_ENCODING = - MediaType.parse("application/vnd.kafka.avro.v2+json; charset=utf-8"); - public static final MediaType KAFKA_REST_AVRO_LEGACY_ENCODING = - MediaType.parse("application/vnd.kafka.avro.v1+json; charset=utf-8"); - private RequestProperties requestProperties; - - private Request.Builder connectionTestRequest; - private SchemaRetriever schemaRetriever; - private RestClient httpClient; - private final ConnectionState state; - - /** - * Construct a RestSender. - */ - private RestSender(Builder builder) { - this.schemaRetriever = Objects.requireNonNull(builder.retriever); - this.requestProperties = new RequestProperties( - KAFKA_REST_ACCEPT_ENCODING, - builder.binary ? KAFKA_REST_BINARY_ENCODING : KAFKA_REST_AVRO_ENCODING, - builder.additionalHeaders.build(), - builder.binary); - this.state = builder.state; - setRestClient(Objects.requireNonNull(builder.client).newBuilder() - .protocols(Collections.singletonList(Protocol.HTTP_1_1)) - .build()); - } - - /** - * Set the connection timeout. This affects both the connection state as the HTTP client - * setting. - * @param connectionTimeout timeout - * @param unit time unit - */ - public synchronized void setConnectionTimeout(long connectionTimeout, TimeUnit unit) { - if (connectionTimeout != httpClient.getTimeout()) { - httpClient = httpClient.newBuilder().timeout(connectionTimeout, unit).build(); - state.setTimeout(connectionTimeout, unit); - } - } - - /** - * Set the Kafka REST Proxy settings. This affects the REST client. - * @param kafkaConfig server configuration of the Kafka REST proxy. - */ - public synchronized void setKafkaConfig(ServerConfig kafkaConfig) { - Objects.requireNonNull(kafkaConfig); - if (kafkaConfig.equals(httpClient.getServer())) { - return; - } - setRestClient(httpClient.newBuilder().server(kafkaConfig).build()); - } - - /** - * Set the REST client. This will reset the connection state. - */ - private void setRestClient(RestClient newClient) { - try { - connectionTestRequest = newClient.requestBuilder("").head(); - } catch (MalformedURLException ex) { - throw new IllegalArgumentException("Schemaless topics do not have a valid URL", ex); - } - httpClient = newClient; - state.reset(); - } - - /** Set the schema retriever. */ - public final synchronized void setSchemaRetriever(SchemaRetriever retriever) { - this.schemaRetriever = retriever; - } - - /** Get the current REST client. */ - public synchronized RestClient getRestClient() { - return httpClient; - } - - /** Get the schema retriever. */ - public synchronized SchemaRetriever getSchemaRetriever() { - return this.schemaRetriever; - } - - /** Get a request to check the connection status. */ - private synchronized Request getConnectionTestRequest() { - return connectionTestRequest.headers(requestProperties.headers).build(); - } - - /** Set the compression of the REST client. */ - public synchronized void setCompression(boolean useCompression) { - httpClient = httpClient.newBuilder().gzipCompression(useCompression).build(); - } - - /** Get the headers used in requests. */ - public synchronized Headers getHeaders() { - return requestProperties.headers; - } - - /** Set the headers used in requests. */ - public synchronized void setHeaders(Headers additionalHeaders) { - this.requestProperties = new RequestProperties(requestProperties.acceptType, - requestProperties.contentType, additionalHeaders, - requestProperties.binary); - this.state.reset(); - } - - @Override - public KafkaTopicSender sender(AvroTopic topic) - throws SchemaValidationException { - return new RestTopicSender<>(topic, this, state); - } - - /** - * Get the current request properties. - */ - public synchronized RequestProperties getRequestProperties() { - return requestProperties; - } - - /** - * Get the current request context. - */ - public synchronized RequestContext getRequestContext() { - return new RequestContext(httpClient, requestProperties); - } - - @Override - public boolean resetConnection() throws AuthenticationException { - if (state.getState() == State.CONNECTED) { - return true; - } - try (Response response = httpClient.request(getConnectionTestRequest())) { - if (response.isSuccessful()) { - state.didConnect(); - } else if (response.code() == 401) { - state.wasUnauthorized(); - } else { - state.didDisconnect(); - String bodyString = responseBody(response); - logger.warn("Failed to make heartbeat request to {} (HTTP status code {}): {}", - httpClient, response.code(), bodyString); - } - } catch (IOException ex) { - // no stack trace is needed - state.didDisconnect(); - logger.warn("Failed to make heartbeat request to {}: {}", httpClient, ex.toString()); - } - - if (state.getState() == State.UNAUTHORIZED) { - throw new AuthenticationException("HEAD request unauthorized"); - } - - return state.getState() == State.CONNECTED; - } - - @Override - public boolean isConnected() throws AuthenticationException { - switch (state.getState()) { - case CONNECTED: - return true; - case DISCONNECTED: - return false; - case UNAUTHORIZED: - throw new AuthenticationException("Unauthorized"); - case UNKNOWN: - return resetConnection(); - default: - throw new IllegalStateException("Illegal connection state"); - } - } - - @Override - public void close() { - // noop - } - - /** - * Revert to a legacy connection if the server does not support the latest protocols. - * @param acceptEncoding accept encoding to use in the legacy connection. - * @param contentEncoding content encoding to use in the legacy connection. - * @param binary whether to send the data as binary. - */ - public synchronized void useLegacyEncoding(String acceptEncoding, - MediaType contentEncoding, boolean binary) { - logger.debug("Reverting to encoding {} -> {} (binary: {})", - contentEncoding, acceptEncoding, binary); - this.requestProperties = new RequestProperties(acceptEncoding, - contentEncoding, - requestProperties.headers, binary); - } - - public static class Builder { - private SchemaRetriever retriever; - private ConnectionState state; - private RestClient client; - private Headers.Builder additionalHeaders = new Headers.Builder(); - private boolean binary = false; - - public Builder schemaRetriever(SchemaRetriever schemaRetriever) { - this.retriever = schemaRetriever; - return this; - } - - /** - * Whether to try to send binary content. This only works if the server supports it. If not, - * there may be an additional round-trip. - * @param binary true if attempt to send binary content, false otherwise - */ - public Builder useBinaryContent(boolean binary) { - this.binary = binary; - return this; - } - - /** - * Whether to try to send binary content. This only works if the server supports it. If not, - * there may be an additional round-trip. - * @param binary true if attempt to send binary content, false otherwise - * @deprecated use {@link #useBinaryContent(boolean)} instead - */ - @Deprecated - @SuppressWarnings("PMD.LinguisticNaming") - public Builder hasBinaryContent(boolean binary) { - this.binary = binary; - return this; - } - - public Builder connectionState(ConnectionState state) { - this.state = state; - return this; - } - - public Builder httpClient(RestClient client) { - this.client = client; - return this; - } - - public Builder headers(Headers headers) { - additionalHeaders = headers.newBuilder(); - return this; - } - - public Builder addHeader(String header, String value) { - additionalHeaders.add(header + ": " + value); - return this; - } - - /** Build a new RestSender. */ - public RestSender build() { - if (state == null) { - state = new ConnectionState(DEFAULT_TIMEOUT, TimeUnit.SECONDS); - } - - return new RestSender(this); - } - } - - static final class RequestContext { - final RequestProperties properties; - final RestClient client; - - RequestContext(RestClient client, RequestProperties properties) { - this.properties = properties; - this.client = client; - } - } - - static final class RequestProperties { - final String acceptType; - final MediaType contentType; - final Headers headers; - final boolean binary; - - RequestProperties(String acceptType, MediaType contentType, Headers headers, - boolean binary) { - this.acceptType = acceptType; - this.contentType = contentType; - this.headers = headers; - this.binary = binary; - } - } -} diff --git a/radar-commons/src/main/java/org/radarbase/producer/rest/RestTopicSender.java b/radar-commons/src/main/java/org/radarbase/producer/rest/RestTopicSender.java deleted file mode 100644 index 147ce268..00000000 --- a/radar-commons/src/main/java/org/radarbase/producer/rest/RestTopicSender.java +++ /dev/null @@ -1,209 +0,0 @@ -/* - * Copyright 2017 The Hyve and King's College London - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.radarbase.producer.rest; - -import static org.radarbase.producer.rest.UncheckedRequestException.fail; - -import java.io.IOException; -import java.util.Collections; -import java.util.Objects; -import okhttp3.HttpUrl; -import okhttp3.MediaType; -import okhttp3.Request; -import okhttp3.RequestBody; -import okhttp3.Response; -import org.apache.avro.SchemaValidationException; -import org.json.JSONException; -import org.radarbase.data.AvroRecordData; -import org.radarbase.data.RecordData; -import org.radarbase.producer.AuthenticationException; -import org.radarbase.producer.KafkaTopicSender; -import org.radarbase.topic.AvroTopic; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -class RestTopicSender - implements KafkaTopicSender { - private static final Logger logger = LoggerFactory.getLogger(RestTopicSender.class); - - private final AvroTopic topic; - private RecordRequest requestData; - private final RestSender sender; - private final ConnectionState state; - - RestTopicSender(AvroTopic topic, RestSender sender, ConnectionState state) - throws SchemaValidationException { - this.topic = topic; - this.sender = sender; - this.state = state; - - if (sender.getRequestContext().properties.binary) { - try { - requestData = new BinaryRecordRequest<>(topic); - } catch (IllegalArgumentException ex) { - logger.warn("Cannot use Binary encoding for incompatible topic {}: {}", - topic, ex.toString()); - } - } - - if (requestData == null) { - requestData = new JsonRecordRequest<>(topic); - } - } - - @Override - public void send(K key, V value) throws IOException, SchemaValidationException { - send(new AvroRecordData<>(topic, key, Collections.singletonList(value))); - } - - /** - * Actually make a REST request to the Kafka REST server and Schema Registry. - * - * @param records values to send - * @throws IOException if records could not be sent - */ - @Override - public void send(RecordData records) throws IOException, SchemaValidationException { - RestSender.RequestContext context = sender.getRequestContext(); - Request request = buildRequest(context, records); - - boolean doResend = false; - try (Response response = context.client.request(request)) { - if (response.isSuccessful()) { - state.didConnect(); - logger.debug("Added message to topic {}", topic); - } else if (response.code() == 401 || response.code() == 403) { - state.wasUnauthorized(); - } else if (response.code() == 415) { - downgradeConnection(request, response); - doResend = true; - } else { - throw fail(request, response, null); - } - } catch (IOException ex) { - state.didDisconnect(); - fail(request, null, ex).rethrow(); - } catch (UncheckedRequestException ex) { - state.didDisconnect(); - ex.rethrow(); - } finally { - requestData.reset(); - } - - if (state.getState() == ConnectionState.State.UNAUTHORIZED) { - throw new AuthenticationException("Request unauthorized"); - } - - if (doResend) { - send(records); - } - } - - private void updateRecords(RestSender.RequestContext context, RecordData records) - throws IOException, SchemaValidationException { - if (!context.properties.binary && requestData instanceof BinaryRecordRequest) { - requestData = new JsonRecordRequest<>(topic); - } - - String sendTopic = topic.getName(); - SchemaRetriever retriever = sender.getSchemaRetriever(); - - ParsedSchemaMetadata keyMetadata; - ParsedSchemaMetadata valueMetadata; - - try { - keyMetadata = retriever.getOrSetSchemaMetadata( - sendTopic, false, topic.getKeySchema(), -1); - valueMetadata = retriever.getOrSetSchemaMetadata( - sendTopic, true, topic.getValueSchema(), -1); - } catch (JSONException | IOException ex) { - throw new IOException("Failed to get schemas for topic " + topic, ex); - } - - requestData.prepare(keyMetadata, valueMetadata, records); - } - - private void downgradeConnection(Request request, Response response) throws IOException { - if (this.requestData instanceof BinaryRecordRequest) { - state.didConnect(); - logger.warn("Binary Avro encoding is not supported." - + " Switching to JSON encoding."); - sender.useLegacyEncoding( - RestSender.KAFKA_REST_ACCEPT_ENCODING, RestSender.KAFKA_REST_AVRO_ENCODING, - false); - requestData = new JsonRecordRequest<>(topic); - } else if (Objects.equals(request.header("Accept"), - RestSender.KAFKA_REST_ACCEPT_ENCODING)) { - state.didConnect(); - logger.warn("Latest Avro encoding is not supported. Switching to legacy " - + "encoding."); - sender.useLegacyEncoding( - RestSender.KAFKA_REST_ACCEPT_LEGACY_ENCODING, - RestSender.KAFKA_REST_AVRO_LEGACY_ENCODING, - false); - } else { - RequestBody body = request.body(); - MediaType contentType = body != null ? body.contentType() : null; - if (contentType == null - || contentType.equals(RestSender.KAFKA_REST_AVRO_LEGACY_ENCODING)) { - throw fail(request, response, - new IOException("Content-Type " + contentType + " not accepted by server.")); - } else { - // the connection may have been downgraded already - state.didConnect(); - logger.warn("Content-Type changed during request"); - } - } - } - - private Request buildRequest(RestSender.RequestContext context, RecordData records) - throws IOException, SchemaValidationException { - updateRecords(context, records); - - HttpUrl sendToUrl = context.client.getRelativeUrl("topics/" + topic.getName()); - - TopicRequestBody requestBody; - Request.Builder requestBuilder = new Request.Builder() - .url(sendToUrl) - .headers(context.properties.headers) - .header("Accept", context.properties.acceptType); - - MediaType contentType = context.properties.contentType; - if (contentType.equals(RestSender.KAFKA_REST_BINARY_ENCODING) - && !(requestData instanceof BinaryRecordRequest)) { - contentType = RestSender.KAFKA_REST_AVRO_ENCODING; - } - requestBody = new TopicRequestBody(requestData, contentType); - - return requestBuilder.post(requestBody).build(); - } - - @Override - public void clear() { - // nothing - } - - @Override - public void flush() { - // nothing - } - - @Override - public void close() { - // noop - } -} diff --git a/radar-commons/src/main/java/org/radarbase/producer/rest/SchemaRestClient.java b/radar-commons/src/main/java/org/radarbase/producer/rest/SchemaRestClient.java deleted file mode 100644 index ae1a3343..00000000 --- a/radar-commons/src/main/java/org/radarbase/producer/rest/SchemaRestClient.java +++ /dev/null @@ -1,117 +0,0 @@ -package org.radarbase.producer.rest; - -import java.io.IOException; -import okhttp3.MediaType; -import okhttp3.Request; -import okhttp3.RequestBody; -import okio.BufferedSink; -import org.apache.avro.Schema; -import org.json.JSONException; -import org.json.JSONObject; -import org.radarbase.util.Strings; - -/** REST client for Confluent schema registry. */ -public class SchemaRestClient { - private final RestClient client; - - public SchemaRestClient(RestClient client) { - this.client = client; - } - - /** Retrieve schema metadata from server. */ - public ParsedSchemaMetadata retrieveSchemaMetadata(String subject, int version) - throws JSONException, IOException { - boolean isLatest = version <= 0; - - StringBuilder pathBuilder = new StringBuilder(50) - .append("/subjects/") - .append(subject) - .append("/versions/"); - - if (isLatest) { - pathBuilder.append("latest"); - } else { - pathBuilder.append(version); - } - - JSONObject node = requestJson(pathBuilder.toString()); - int newVersion = isLatest ? node.getInt("version") : version; - int schemaId = node.getInt("id"); - Schema schema = parseSchema(node.getString("schema")); - return new ParsedSchemaMetadata(schemaId, newVersion, schema); - } - - private JSONObject requestJson(String path) throws IOException { - Request request = client.requestBuilder(path) - .addHeader("Accept", "application/json") - .build(); - - String response = client.requestString(request); - return new JSONObject(response); - } - - - /** Parse a schema from string. */ - public Schema parseSchema(String schemaString) { - Schema.Parser parser = new Schema.Parser(); - return parser.parse(schemaString); - } - - /** Add a schema to a subject. */ - public int addSchema(String subject, Schema schema) throws IOException { - Request request = client.requestBuilder("/subjects/" + subject + "/versions") - .addHeader("Accept", "application/json") - .post(new SchemaRequestBody(schema)) - .build(); - - String response = client.requestString(request); - JSONObject node = new JSONObject(response); - return node.getInt("id"); - } - - /** Request metadata for a schema on a subject. */ - public ParsedSchemaMetadata requestMetadata(String subject, Schema schema) - throws IOException { - Request request = client.requestBuilder("/subjects/" + subject) - .addHeader("Accept", "application/json") - .post(new SchemaRequestBody(schema)) - .build(); - - String response = client.requestString(request); - JSONObject node = new JSONObject(response); - - return new ParsedSchemaMetadata(node.getInt("id"), - node.getInt("version"), schema); - } - - /** Retrieve schema metadata from server. */ - public Schema retrieveSchemaById(int id) - throws JSONException, IOException { - JSONObject node = requestJson("/schemas/ids/" + id); - return parseSchema(node.getString("schema")); - } - - private static class SchemaRequestBody extends RequestBody { - private static final byte[] SCHEMA = Strings.utf8("{\"schema\":"); - private static final MediaType CONTENT_TYPE = MediaType.parse( - "application/vnd.schemaregistry.v1+json; charset=utf-8"); - - private final Schema schema; - - private SchemaRequestBody(Schema schema) { - this.schema = schema; - } - - @Override - public MediaType contentType() { - return CONTENT_TYPE; - } - - @Override - public void writeTo(BufferedSink sink) throws IOException { - sink.write(SCHEMA); - sink.writeUtf8(JSONObject.quote(schema.toString())); - sink.writeByte('}'); - } - } -} diff --git a/radar-commons/src/main/java/org/radarbase/producer/rest/SchemaRetriever.java b/radar-commons/src/main/java/org/radarbase/producer/rest/SchemaRetriever.java deleted file mode 100644 index 074a1ac0..00000000 --- a/radar-commons/src/main/java/org/radarbase/producer/rest/SchemaRetriever.java +++ /dev/null @@ -1,273 +0,0 @@ -/* - * Copyright 2017 The Hyve and King's College London - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.radarbase.producer.rest; - -import java.io.IOException; -import java.util.Map; -import java.util.Map.Entry; -import java.util.Objects; -import java.util.concurrent.ConcurrentHashMap; -import java.util.concurrent.ConcurrentMap; -import java.util.concurrent.TimeUnit; -import org.apache.avro.Schema; -import org.json.JSONException; -import org.json.JSONObject; -import org.radarbase.config.ServerConfig; -import org.radarbase.util.TimedInt; -import org.radarbase.util.TimedValue; -import org.radarbase.util.TimedVariable; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -/** - * Retriever of an Avro Schema. Internally, only {@link JSONObject} is used to manage JSON data, - * to keep the class as lean as possible. - */ -@SuppressWarnings("PMD.GodClass") -public class SchemaRetriever { - private static final Logger logger = LoggerFactory.getLogger(SchemaRetriever.class); - private static final long MAX_VALIDITY = 86400L; - - private final ConcurrentMap> idCache = - new ConcurrentHashMap<>(); - private final ConcurrentMap schemaCache = new ConcurrentHashMap<>(); - private final ConcurrentMap> subjectVersionCache = - new ConcurrentHashMap<>(); - - private final SchemaRestClient restClient; - private final long cacheValidity; - - public SchemaRetriever(RestClient client, long cacheValidity) { - restClient = new SchemaRestClient(client); - this.cacheValidity = cacheValidity; - } - - public SchemaRetriever(RestClient client) { - this(client, MAX_VALIDITY); - } - - /** - * Schema retriever for a Confluent Schema Registry. - * @param config schema registry configuration. - * @param connectionTimeout timeout in seconds. - */ - public SchemaRetriever(ServerConfig config, long connectionTimeout) { - this(RestClient.global() - .server(Objects.requireNonNull(config)) - .timeout(connectionTimeout, TimeUnit.SECONDS) - .build()); - } - - /** - * Schema retriever for a Confluent Schema Registry. - * @param config schema registry configuration. - * @param connectionTimeout timeout in seconds. - * @param cacheValidity timeout in seconds for considering a schema stale. - */ - public SchemaRetriever(ServerConfig config, long connectionTimeout, long cacheValidity) { - this(RestClient.global() - .server(Objects.requireNonNull(config)) - .timeout(connectionTimeout, TimeUnit.SECONDS) - .build(), cacheValidity); - } - - /** - * Add schema metadata to the retriever. This implementation only adds it to the cache. - * @return schema ID - */ - public int addSchema(String topic, boolean ofValue, Schema schema) - throws JSONException, IOException { - String subject = subject(topic, ofValue); - int id = restClient.addSchema(subject, schema); - cache(new ParsedSchemaMetadata(id, null, schema), subject, false); - return id; - } - - /** - * Get schema metadata, and if none is found, add a new schema. - * - * @param version version to get or 0 if the latest version can be used. - */ - public ParsedSchemaMetadata getOrSetSchemaMetadata(String topic, boolean ofValue, Schema schema, - int version) throws JSONException, IOException { - try { - return getBySubjectAndVersion(topic, ofValue, version); - } catch (RestException ex) { - if (ex.getStatusCode() == 404) { - logger.warn("Schema for {} value was not yet added to the schema registry.", topic); - addSchema(topic, ofValue, schema); - return getMetadata(topic, ofValue, schema, version <= 0); - } else { - throw ex; - } - } - } - - /** Get a schema by its ID. */ - public Schema getById(int id) throws IOException { - TimedValue value = idCache.get(id); - if (value == null || value.isExpired()) { - value = new TimedValue<>(restClient.retrieveSchemaById(id), cacheValidity); - idCache.put(id, value); - schemaCache.put(value.value, new TimedInt(id, cacheValidity)); - } - return value.value; - } - - /** Gets a schema by ID and check that it is present in the given topic. */ - public ParsedSchemaMetadata getBySubjectAndId(String topic, boolean ofValue, int id) - throws IOException { - Schema schema = getById(id); - String subject = subject(topic, ofValue); - ParsedSchemaMetadata metadata = getCachedVersion(subject, id, null, schema); - return metadata != null ? metadata : getMetadata(topic, ofValue, schema); - } - - /** Get schema metadata. Cached schema metadata will be used if present. */ - public ParsedSchemaMetadata getBySubjectAndVersion(String topic, boolean ofValue, int version) - throws JSONException, IOException { - String subject = subject(topic, ofValue); - ConcurrentMap versionMap = computeIfAbsent(subjectVersionCache, subject, - new ConcurrentHashMap<>()); - TimedInt id = versionMap.get(Math.max(version, 0)); - if (id == null || id.isExpired()) { - ParsedSchemaMetadata metadata = restClient.retrieveSchemaMetadata(subject, version); - cache(metadata, subject, version <= 0); - return metadata; - } else { - Schema schema = getById(id.value); - ParsedSchemaMetadata metadata = getCachedVersion(subject, id.value, version, schema); - return metadata != null ? metadata : getMetadata(topic, ofValue, schema, version <= 0); - } - } - - /** Get all schema versions in a subject. */ - public ParsedSchemaMetadata getMetadata(String topic, boolean ofValue, Schema schema) - throws IOException { - return getMetadata(topic, ofValue, schema, false); - } - - - /** Get the metadata of a specific schema in a topic. */ - public ParsedSchemaMetadata getMetadata(String topic, boolean ofValue, Schema schema, - boolean ofLatestVersion) throws IOException { - TimedInt id = schemaCache.get(schema); - String subject = subject(topic, ofValue); - - if (id != null && !id.isExpired()) { - ParsedSchemaMetadata metadata = getCachedVersion(subject, id.value, null, schema); - if (metadata != null) { - return metadata; - } - } - - ParsedSchemaMetadata metadata = restClient.requestMetadata(subject, schema); - cache(metadata, subject, ofLatestVersion); - return metadata; - } - - - /** - * Get cached metadata. - * @param subject schema registry subject - * @param id schema ID. - * @param reportedVersion version requested by the client. Null if no version was requested. - * This version will be used if the actual version was not cached. - * @param schema schema to use. - * @return metadata if present. Returns null if no metadata is cached or if no version is cached - * and the reportedVersion is null. - */ - protected ParsedSchemaMetadata getCachedVersion(String subject, int id, - Integer reportedVersion, Schema schema) { - Integer version = reportedVersion; - if (version == null || version <= 0) { - ConcurrentMap versions = subjectVersionCache.get(subject); - version = findCachedVersion(id, versions); - if (version == null || version <= 0) { - return null; - } - } - return new ParsedSchemaMetadata(id, version, schema); - } - - private Integer findCachedVersion(int id, ConcurrentMap cache) { - if (cache == null) { - return null; - } - for (Map.Entry entry : cache.entrySet()) { - if (!entry.getValue().isExpired() - && entry.getKey() != 0 - && entry.getValue().value == id) { - return entry.getKey(); - } - } - return null; - } - - protected void cache(ParsedSchemaMetadata metadata, String subject, boolean latest) { - TimedInt id = new TimedInt(metadata.getId(), cacheValidity); - schemaCache.put(metadata.getSchema(), id); - if (metadata.getVersion() != null) { - ConcurrentMap versionCache = computeIfAbsent(subjectVersionCache, - subject, new ConcurrentHashMap<>()); - - versionCache.put(metadata.getVersion(), id); - if (latest) { - versionCache.put(0, id); - } - } - idCache.put(metadata.getId(), new TimedValue<>(metadata.getSchema(), cacheValidity)); - } - - /** - * Remove expired entries from cache. - */ - public void pruneCache() { - prune(schemaCache); - prune(idCache); - for (ConcurrentMap versionMap : subjectVersionCache.values()) { - prune(versionMap); - } - } - - /** - * Remove all entries from cache. - */ - public void clearCache() { - subjectVersionCache.clear(); - idCache.clear(); - schemaCache.clear(); - } - - /** The subject in the Avro Schema Registry, given a Kafka topic. */ - protected static String subject(String topic, boolean ofValue) { - return topic + (ofValue ? "-value" : "-key"); - } - - private static void prune(Map map) { - for (Entry entry : map.entrySet()) { - if (entry.getValue().isExpired()) { - map.remove(entry.getKey(), entry.getValue()); - } - } - } - - private static V computeIfAbsent(ConcurrentMap original, K key, V newValue) { - V existingValue = original.putIfAbsent(key, newValue); - return existingValue != null ? existingValue : newValue; - } -} diff --git a/radar-commons/src/main/java/org/radarbase/producer/rest/TopicRequestBody.java b/radar-commons/src/main/java/org/radarbase/producer/rest/TopicRequestBody.java deleted file mode 100644 index 353afb9c..00000000 --- a/radar-commons/src/main/java/org/radarbase/producer/rest/TopicRequestBody.java +++ /dev/null @@ -1,54 +0,0 @@ -/* - * Copyright 2017 The Hyve and King's College London - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.radarbase.producer.rest; - -import java.io.IOException; -import okhttp3.MediaType; -import okhttp3.Request; -import okhttp3.RequestBody; -import okio.BufferedSink; - -/** - * TopicRequestData in a RequestBody. - */ -class TopicRequestBody extends RequestBody { - protected final RecordRequest data; - private final MediaType mediaType; - - TopicRequestBody(RecordRequest requestData, MediaType mediaType) { - this.data = requestData; - this.mediaType = mediaType; - } - - @Override - public MediaType contentType() { - return mediaType; - } - - @Override - public void writeTo(BufferedSink sink) throws IOException { - data.writeToSink(sink); - } - - static String topicRequestContent(Request request, int maxLength) throws IOException { - TopicRequestBody body = (TopicRequestBody) request.body(); - if (body == null) { - return null; - } - return body.data.content(maxLength); - } -} diff --git a/radar-commons/src/main/java/org/radarbase/producer/rest/UncheckedRequestException.java b/radar-commons/src/main/java/org/radarbase/producer/rest/UncheckedRequestException.java deleted file mode 100644 index a1524427..00000000 --- a/radar-commons/src/main/java/org/radarbase/producer/rest/UncheckedRequestException.java +++ /dev/null @@ -1,92 +0,0 @@ -/* - * Copyright 2018 The Hyve - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.radarbase.producer.rest; - -import java.io.IOException; -import okhttp3.Request; -import okhttp3.Response; - -/** Unchecked exception for failures during request handling. */ -public class UncheckedRequestException extends RuntimeException { - private static final long serialVersionUID = 1; - private static final int LOG_CONTENT_LENGTH = 1024; - - /** - * Unchecked exception. - * @param message exception message. - * @param cause cause of this exception, may be null - */ - public UncheckedRequestException(String message, IOException cause) { - super(message, cause); - } - - /** - * Rethrow this exception using either its cause, if that is an IOException, or using - * the current exception. - * @throws IOException if the cause of the exception was an IOException. - * @throws UncheckedRequestException if the cause of the exception was not an IOException. - */ - public void rethrow() throws IOException { - if (getCause() instanceof IOException) { - throw (IOException)getCause(); - } else { - throw new IOException(this); - } - } - - /** - * Create a new UncheckedRequestException based on given call. - * - * @param request call request - * @param response call response, may be null - * @param cause exception cause, may be null - * @return new exception - * @throws IOException if the request or response cannot be constructed into a message. - */ - public static UncheckedRequestException fail(Request request, - Response response, IOException cause) throws IOException { - - StringBuilder message = new StringBuilder(128); - message.append("FAILED to transmit message"); - String content; - if (response != null) { - message.append(" (HTTP status code ") - .append(response.code()) - .append(')'); - content = RestClient.responseBody(response); - } else { - content = null; - } - - String requestContent = TopicRequestBody.topicRequestContent(request, LOG_CONTENT_LENGTH); - if (requestContent != null || content != null) { - message.append(':'); - } - - if (requestContent != null) { - message.append("\n ") - .append(requestContent); - } - - if (content != null) { - message.append("\n ") - .append(content); - } - - return new UncheckedRequestException(message.toString(), cause); - } -} diff --git a/radar-commons/src/main/java/org/radarbase/producer/schema/ParsedSchemaMetadata.kt b/radar-commons/src/main/java/org/radarbase/producer/schema/ParsedSchemaMetadata.kt new file mode 100644 index 00000000..64ff333c --- /dev/null +++ b/radar-commons/src/main/java/org/radarbase/producer/schema/ParsedSchemaMetadata.kt @@ -0,0 +1,33 @@ +/* + * Copyright 2017 The Hyve and King's College London + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.radarbase.producer.schema + +import org.apache.avro.Schema + +/** + * Parsed schema metadata from a Schema Registry. + */ +data class ParsedSchemaMetadata +/** + * Schema metadata. + * @param id schema ID, may be null. + * @param version schema version, may be null. + * @param schema parsed schema. + */( + val id: Int, + val version: Int?, + val schema: Schema, +) diff --git a/radar-commons/src/main/java/org/radarbase/producer/schema/SchemaMetadata.kt b/radar-commons/src/main/java/org/radarbase/producer/schema/SchemaMetadata.kt new file mode 100644 index 00000000..2eb8096a --- /dev/null +++ b/radar-commons/src/main/java/org/radarbase/producer/schema/SchemaMetadata.kt @@ -0,0 +1,43 @@ +/* + * Copyright 2017 The Hyve and King's College London + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.radarbase.producer.schema + +import kotlinx.serialization.Serializable +import org.apache.avro.Schema + +/** + * Parsed schema metadata from a Schema Registry. + */ +@Serializable +data class SchemaMetadata +/** + * Schema metadata. + * @param id schema ID, may be null. + * @param version schema version, may be null. + * @param schema parsed schema. + */( + val id: Int? = null, + val version: Int? = null, + val schema: String? = null, +) { + fun toParsedSchemaMetadata(defaultId: Int? = null) = ParsedSchemaMetadata( + id = checkNotNull(id ?: defaultId) { "Need id to parse schema metadata" }, + version = version, + schema = Schema.Parser().parse( + checkNotNull(schema) { "Need schema to parse it" }, + ), + ) +} diff --git a/radar-commons/src/main/java/org/radarbase/producer/schema/SchemaRestClient.kt b/radar-commons/src/main/java/org/radarbase/producer/schema/SchemaRestClient.kt new file mode 100644 index 00000000..50746c2b --- /dev/null +++ b/radar-commons/src/main/java/org/radarbase/producer/schema/SchemaRestClient.kt @@ -0,0 +1,128 @@ +package org.radarbase.producer.schema + +import io.ktor.client.* +import io.ktor.client.call.* +import io.ktor.client.plugins.* +import io.ktor.client.plugins.contentnegotiation.* +import io.ktor.client.request.* +import io.ktor.client.statement.* +import io.ktor.http.* +import io.ktor.serialization.kotlinx.json.* +import io.ktor.util.reflect.* +import kotlinx.coroutines.Dispatchers +import kotlinx.coroutines.withContext +import kotlinx.serialization.json.Json +import org.apache.avro.Schema +import org.radarbase.producer.rest.RestException +import java.io.IOException +import kotlin.coroutines.CoroutineContext + +/** REST client for Confluent schema registry. */ +class SchemaRestClient( + httpClient: HttpClient, + baseUrl: String, + private val ioContext: CoroutineContext = Dispatchers.IO, +) { + private val httpClient: HttpClient = httpClient.config { + install(ContentNegotiation) { + json( + Json { + ignoreUnknownKeys = true + coerceInputValues = true + }, + ) + } + defaultRequest { + url(baseUrl) + accept(ContentType.Application.Json) + } + } + + suspend inline fun request( + noinline requestBuilder: HttpRequestBuilder.() -> Unit, + ): T = request(typeInfo(), requestBuilder) + + suspend fun request( + typeInfo: TypeInfo, + requestBuilder: HttpRequestBuilder.() -> Unit, + ): T = withContext(ioContext) { + val response = httpClient.request { + requestBuilder() + } + if (!response.status.isSuccess()) { + throw RestException(response.status, response.bodyAsText()) + } + response.body(typeInfo) + } + + suspend fun requestEmpty( + requestBuilder: HttpRequestBuilder.() -> Unit, + ) = withContext(ioContext) { + val response = httpClient.request { + requestBuilder() + } + if (!response.status.isSuccess()) { + throw RestException(response.status, response.bodyAsText()) + } + } + + /** Retrieve schema metadata from server. */ + @Throws(IOException::class) + suspend fun retrieveSchemaMetadata( + subject: String, + version: Int, + ): ParsedSchemaMetadata { + val isLatest = version <= 0 + val versionPath = if (isLatest) "latest" else version + return schemaGet("subjects/$subject/versions/$versionPath") + .toParsedSchemaMetadata() + } + + @Throws(IOException::class) + suspend fun schemaGet(path: String): SchemaMetadata = request { + method = HttpMethod.Get + url(path) + } + + @Throws(IOException::class) + suspend fun schemaPost( + path: String, + schema: Schema, + ): SchemaMetadata = request { + method = HttpMethod.Post + url(path) + contentType(ContentType.Application.Json) + setBody(SchemaMetadata(schema = schema.toString())) + } + + /** Add a schema to a subject. */ + @Throws(IOException::class) + suspend fun addSchema(subject: String, schema: Schema): ParsedSchemaMetadata { + val result = schemaPost("subjects/$subject/versions", schema) + return ParsedSchemaMetadata( + id = checkNotNull(result.id) { "Missing schema ID in request result" }, + version = result.version, + schema = schema, + ) + } + + /** Request metadata for a schema on a subject. */ + @Throws(IOException::class) + suspend fun requestMetadata( + subject: String, + schema: Schema, + ): ParsedSchemaMetadata { + val result = schemaPost("subjects/$subject", schema) + return ParsedSchemaMetadata( + id = checkNotNull(result.id) { "Missing schema ID in request result" }, + version = result.version, + schema = schema, + ) + } + + /** Retrieve schema metadata from server. */ + suspend fun retrieveSchemaById(id: Int): Schema = + schemaGet("/schemas/ids/$id") + .toParsedSchemaMetadata(id) + .schema +} diff --git a/radar-commons/src/main/java/org/radarbase/producer/schema/SchemaRetriever.kt b/radar-commons/src/main/java/org/radarbase/producer/schema/SchemaRetriever.kt new file mode 100644 index 00000000..b3c0de8d --- /dev/null +++ b/radar-commons/src/main/java/org/radarbase/producer/schema/SchemaRetriever.kt @@ -0,0 +1,277 @@ +/* + * Copyright 2017 The Hyve and King's College London + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.radarbase.producer.schema + +import io.ktor.client.* +import io.ktor.client.engine.cio.* +import io.ktor.client.plugins.* +import io.ktor.client.request.* +import io.ktor.http.* +import kotlinx.coroutines.Dispatchers +import kotlinx.coroutines.coroutineScope +import kotlinx.coroutines.launch +import org.apache.avro.Schema +import org.radarbase.kotlin.coroutines.CacheConfig +import org.radarbase.kotlin.coroutines.CachedValue +import org.radarbase.util.RadarProducerDsl +import java.io.IOException +import java.lang.ref.SoftReference +import java.util.* +import java.util.Objects.hash +import java.util.concurrent.ConcurrentHashMap +import java.util.concurrent.ConcurrentMap +import kotlin.coroutines.CoroutineContext +import kotlin.time.Duration.Companion.days +import kotlin.time.Duration.Companion.minutes + +typealias VersionCache = ConcurrentMap> + +/** + * Retriever of an Avro Schema. + */ +open class SchemaRetriever(config: Config) { + private val idCache: ConcurrentMap> = ConcurrentHashMap() + private val schemaCache: ConcurrentMap> = ConcurrentHashMap() + private val subjectVersionCache: ConcurrentMap = ConcurrentHashMap() + + private val baseUrl = config.baseUrl + private val ioContext = config.ioContext + private val httpClient = requireNotNull(config.httpClient) { "Missing HTTP client" } + + val restClient: SchemaRestClient = SchemaRestClient(httpClient, baseUrl, ioContext) + + private val schemaTimeout = config.schemaTimeout + + /** + * Add schema metadata to the retriever. This implementation only adds it to the cache. + * @return schema ID + */ + @Throws(IOException::class) + suspend fun addSchema(topic: String, ofValue: Boolean, schema: Schema): Int = coroutineScope { + val subject = subject(topic, ofValue) + val metadata = restClient.addSchema(subject, schema) + + launch { + cachedMetadata(subject, metadata.schema).set(metadata) + } + if (metadata.version != null) { + launch { + cachedVersion(subject, metadata.version).set(metadata) + } + } + metadata.id + } + + /** Get schema metadata. Cached schema metadata will be used if present. */ + @Throws(IOException::class) + open suspend fun getByVersion( + topic: String, + ofValue: Boolean, + version: Int, + ): ParsedSchemaMetadata { + val subject = subject(topic, ofValue) + val versionMap = subjectVersionCache.computeIfAbsent( + subject, + ::ConcurrentHashMap, + ) + val metadata = versionMap.cachedVersion(subject, version).get() + if (version <= 0 && metadata.version != null) { + versionMap.cachedVersion(subject, metadata.version).set(metadata) + } + return metadata + } + + /** Get schema metadata. Cached schema metadata will be used if present. */ + @Throws(IOException::class) + open suspend fun getById( + topic: String, + ofValue: Boolean, + id: Int, + ): ParsedSchemaMetadata { + val subject = subject(topic, ofValue) + val schema = idCache[id]?.get() + ?: restClient.retrieveSchemaById(id) + + return cachedMetadata(subject, schema).get() + } + + /** Get the metadata of a specific schema in a topic. */ + @Throws(IOException::class) + open suspend fun metadata( + topic: String, + ofValue: Boolean, + schema: Schema, + ): ParsedSchemaMetadata { + val subject = subject(topic, ofValue) + return cachedMetadata(subject, schema).get() + } + + private fun cachedMetadata( + subject: String, + schema: Schema, + ): CachedValue = schemaCache.computeIfAbsent(schema) { + CachedValue(schemaTimeout) { + val metadata = restClient.requestMetadata(subject, schema) + if (metadata.version != null) { + cachedVersion(subject, metadata.version).set(metadata) + } + idCache[metadata.id] = SoftReference(metadata.schema) + metadata + } + } + + private suspend fun cachedVersion( + subject: String, + version: Int, + ): CachedValue = subjectVersionCache + .computeIfAbsent( + subject, + ::ConcurrentHashMap, + ) + .cachedVersion(subject, version) + + private suspend fun VersionCache.cachedVersion( + subject: String, + version: Int, + ): CachedValue { + val useVersion = version.coerceAtLeast(0) + val versionId = computeIfAbsent(useVersion) { + CachedValue(schemaTimeout) { + val metadata = restClient.retrieveSchemaMetadata(subject, version) + cachedMetadata(subject, metadata.schema).set(metadata) + idCache[metadata.id] = SoftReference(metadata.schema) + metadata + } + } + return versionId + } + + private suspend fun MutableCollection>.prune() { + val iter = iterator() + while (iter.hasNext()) { + val staleValue = iter.next().getFromCache() + ?: continue + + if ( + staleValue is CachedValue.CacheError || + ( + staleValue is CachedValue.CacheValue && + staleValue.isExpired(schemaTimeout.refreshDuration) + ) + ) { + iter.remove() + } + } + } + + /** + * Remove expired entries from cache. + */ + open suspend fun pruneCache() = coroutineScope { + launch { + schemaCache.values.prune() + } + + launch { + val subjectsIter = subjectVersionCache.values.iterator() + while (subjectsIter.hasNext()) { + val versionMap = subjectsIter.next() + versionMap.values.prune() + if (versionMap.isEmpty()) { + subjectsIter.remove() + } + } + } + } + + /** + * Remove all entries from cache. + */ + open fun clearCache() { + subjectVersionCache.clear() + schemaCache.clear() + } + + @RadarProducerDsl + class Config( + val baseUrl: String, + ) { + var httpClient: HttpClient? = null + var schemaTimeout: CacheConfig = DEFAULT_SCHEMA_TIMEOUT_CONFIG + var ioContext: CoroutineContext = Dispatchers.IO + fun httpClient(config: HttpClientConfig<*>.() -> Unit) { + httpClient = httpClient?.config(config) + ?: HttpClient(CIO) + } + + override fun equals(other: Any?): Boolean { + if (this === other) return true + if (javaClass != other?.javaClass) return false + + other as Config + + return baseUrl == other.baseUrl && + httpClient == other.httpClient && + schemaTimeout == other.schemaTimeout && + ioContext == other.ioContext + } + + override fun hashCode(): Int = hash(baseUrl, httpClient, schemaTimeout, ioContext) + } + + fun config(config: Config.() -> Unit): SchemaRetriever { + val currentConfig = toConfig() + val newConfig = toConfig().apply(config) + return if (currentConfig != newConfig) { + SchemaRetriever(newConfig) + } else { + this + } + } + + private fun toConfig(): Config = Config(baseUrl = baseUrl).apply { + httpClient = this@SchemaRetriever.httpClient + schemaTimeout = this@SchemaRetriever.schemaTimeout + ioContext = this@SchemaRetriever.ioContext + } + + companion object { + private val DEFAULT_SCHEMA_TIMEOUT_CONFIG = CacheConfig( + refreshDuration = 1.days, + retryDuration = 1.minutes, + ) + + fun schemaRetriever(baseUrl: String, config: Config.() -> Unit): SchemaRetriever { + return SchemaRetriever(Config(baseUrl).apply(config)) + } + + /** The subject in the Avro Schema Registry, given a Kafka topic. */ + @JvmStatic + fun subject(topic: String, ofValue: Boolean): String = if (ofValue) "$topic-value" else "$topic-key" + + private fun MutableMap.computeIfAbsent( + key: K, + newValueGenerator: () -> V, + ): V { + return get(key) + ?: run { + val newValue = newValueGenerator() + putIfAbsent(key, newValue) + ?: newValue + } + } + } +} diff --git a/radar-commons/src/main/java/org/radarbase/topic/AvroTopic.java b/radar-commons/src/main/java/org/radarbase/topic/AvroTopic.java deleted file mode 100644 index 01d515f7..00000000 --- a/radar-commons/src/main/java/org/radarbase/topic/AvroTopic.java +++ /dev/null @@ -1,160 +0,0 @@ -/* - * Copyright 2017 The Hyve and King's College London - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.radarbase.topic; - -import java.util.Arrays; -import java.util.List; -import java.util.Objects; -import org.apache.avro.Schema; -import org.apache.avro.Schema.Type; -import org.apache.avro.specific.SpecificData; -import org.apache.avro.specific.SpecificRecord; - -/** Kafka topic with schema. */ -public class AvroTopic extends KafkaTopic { - private final Schema valueSchema; - private final Schema keySchema; - private final Schema.Type[] valueFieldTypes; - private final Class valueClass; - private final Class keyClass; - - /** - * Kafka topic with Avro schema. - * @param name topic name - * @param keySchema Avro schema for keys - * @param valueSchema Avro schema for values - * @param keyClass Java class for keys - * @param valueClass Java class for values - */ - public AvroTopic(String name, - Schema keySchema, Schema valueSchema, - Class keyClass, Class valueClass) { - super(name); - - if (keySchema == null || valueSchema == null || keyClass == null || valueClass == null) { - throw new IllegalArgumentException("Topic values may not be null"); - } - - this.keySchema = keySchema; - this.valueSchema = valueSchema; - this.valueClass = valueClass; - this.keyClass = keyClass; - - if (valueSchema.getType() == Type.RECORD) { - List fields = valueSchema.getFields(); - this.valueFieldTypes = new Schema.Type[fields.size()]; - for (int i = 0; i < fields.size(); i++) { - valueFieldTypes[i] = fields.get(i).schema().getType(); - } - } else { - this.valueFieldTypes = null; - } - } - - /** Avro schema used for keys. */ - public Schema getKeySchema() { - return keySchema; - } - - /** Avro schema used for values. */ - public Schema getValueSchema() { - return valueSchema; - } - - /** Java class used for keys. */ - public Class getKeyClass() { - return keyClass; - } - - /** Java class used for values. */ - public Class getValueClass() { - return valueClass; - } - - /** - * Tries to construct a new SpecificData instance of the value. - * @return new empty SpecificData class - * @throws ClassCastException Value class is not a SpecificData class - */ - @SuppressWarnings("unchecked") - public V newValueInstance() throws ClassCastException { - return (V)SpecificData.newInstance(valueClass, valueSchema); - } - - public Schema.Type[] getValueFieldTypes() { - return Arrays.copyOf(valueFieldTypes, valueFieldTypes.length); - } - - /** - * Parse an AvroTopic. - * - * @throws IllegalArgumentException if the key_schema or value_schema properties are not valid - * Avro SpecificRecord classes - */ - @SuppressWarnings({"unchecked"}) - public static AvroTopic parse( - String topic, String keySchema, String valueSchema) { - Objects.requireNonNull(topic, "topic needs to be specified"); - K key = parseSpecificRecord(keySchema); - V value = parseSpecificRecord(valueSchema); - return new AvroTopic<>(topic, - key.getSchema(), value.getSchema(), - (Class) key.getClass(), (Class) value.getClass()); - } - - /** - * Parse the schema of a single specific record. - * - * @param schemaClass class name of the SpecificRecord to use - * @param class type to return - * @return Instantiated class of given specific record class - */ - @SuppressWarnings("unchecked") - public static K parseSpecificRecord(String schemaClass) { - try { - Objects.requireNonNull(schemaClass, "schema needs to be specified"); - - Class keyClass = (Class) Class.forName(schemaClass); - Schema keyAvroSchema = (Schema) keyClass - .getMethod("getClassSchema").invoke(null); - // check instantiation - return (K) SpecificData.newInstance(keyClass, keyAvroSchema); - } catch (ClassCastException | ReflectiveOperationException ex) { - throw new IllegalArgumentException("Schema " + schemaClass + " cannot be instantiated", - ex); - } - } - - @Override - public boolean equals(Object o) { - if (this == o) { - return true; - } - if (!super.equals(o)) { - return false; - } - - AvroTopic topic = (AvroTopic) o; - - return keyClass == topic.getKeyClass() && valueClass == topic.getValueClass(); - } - - @Override - public int hashCode() { - return Objects.hash(getName(), keyClass, valueClass); - } -} diff --git a/radar-commons/src/main/java/org/radarbase/topic/AvroTopic.kt b/radar-commons/src/main/java/org/radarbase/topic/AvroTopic.kt new file mode 100644 index 00000000..b678fc8e --- /dev/null +++ b/radar-commons/src/main/java/org/radarbase/topic/AvroTopic.kt @@ -0,0 +1,123 @@ +/* + * Copyright 2017 The Hyve and King's College London + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.radarbase.topic + +import org.apache.avro.Schema +import org.apache.avro.specific.SpecificData +import org.apache.avro.specific.SpecificRecord +import java.util.* + +/** + * Kafka topic with Avro schema. + * @param name topic name + * @param keySchema Avro schema for keys + * @param valueSchema Avro schema for values + * @param keyClass Java class for keys + * @param valueClass Java class for values + */ +open class AvroTopic( + name: String, + val keySchema: Schema, + val valueSchema: Schema, + val keyClass: Class, + val valueClass: Class, +) : KafkaTopic(name) { + val valueFieldTypes: Array? = if (valueSchema.type == Schema.Type.RECORD) { + val fields = valueSchema.fields + Array(fields.size) { i -> + fields[i].schema().type + } + } else { + null + } + get() = field?.copyOf() + + /** + * Tries to construct a new SpecificData instance of the value. + * @return new empty SpecificData class + * @throws ClassCastException Value class is not a SpecificData class + */ + @Suppress("UNCHECKED_CAST") + @Throws(ClassCastException::class) + fun newValueInstance(): V { + return SpecificData.newInstance(valueClass, valueSchema) as V + } + + override fun equals(other: Any?): Boolean { + if (this === other) { + return true + } + if (!super.equals(other)) { + return false + } + other as AvroTopic<*, *> + return keyClass == other.keyClass && valueClass == other.valueClass + } + + override fun hashCode(): Int = name.hashCode() + + companion object { + /** + * Parse an AvroTopic. + * + * @throws IllegalArgumentException if the key_schema or value_schema properties are not valid + * Avro SpecificRecord classes + */ + fun parse( + topic: String, + keySchema: String, + valueSchema: String, + ): AvroTopic { + val key = parseSpecificRecord(keySchema) + val value = parseSpecificRecord(valueSchema) + return AvroTopic( + topic, + key.schema, + value.schema, + key.javaClass, + value.javaClass, + ) + } + + /** + * Parse the schema of a single specific record. + * + * @param schemaClass class name of the SpecificRecord to use + * @param class type to return + * @return Instantiated class of given specific record class + */ + @Suppress("UNCHECKED_CAST") + fun parseSpecificRecord(schemaClass: String): K { + return try { + val keyClass = Class.forName(schemaClass) + val keyAvroSchema = keyClass + .getMethod("getClassSchema").invoke(null) as Schema + // check instantiation + SpecificData.newInstance(keyClass, keyAvroSchema) as K + } catch (ex: ClassCastException) { + throw IllegalArgumentException( + "Schema $schemaClass cannot be instantiated", + ex, + ) + } catch (ex: ReflectiveOperationException) { + throw IllegalArgumentException( + "Schema $schemaClass cannot be instantiated", + ex, + ) + } + } + } +} diff --git a/radar-commons/src/main/java/org/radarbase/topic/KafkaTopic.java b/radar-commons/src/main/java/org/radarbase/topic/KafkaTopic.java deleted file mode 100644 index 61b40e26..00000000 --- a/radar-commons/src/main/java/org/radarbase/topic/KafkaTopic.java +++ /dev/null @@ -1,82 +0,0 @@ -/* - * Copyright 2017 The Hyve and King's College London - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.radarbase.topic; - -import java.util.regex.Pattern; - -/** - * A topic that used by Apache Kafka. - */ -public class KafkaTopic implements Comparable { - private final String name; - private static final Pattern TOPIC_NAME_PATTERN = Pattern.compile("[a-zA-Z][a-zA-Z0-9_]*"); - - /** - * Kafka topic with given name. - * @param name topic name inside the Kafka cluster - * @throws IllegalArgumentException if the topic name is null or is not ASCII-alphanumeric with - * possible underscores. - */ - public KafkaTopic(String name) { - if (name == null) { - throw new IllegalArgumentException("Kafka topic name may not be null"); - } - if (!TOPIC_NAME_PATTERN.matcher(name).matches()) { - throw new IllegalArgumentException("Kafka topic " + name + " is not ASCII-alphanumeric " - + "with possible underscores."); - } - this.name = name; - } - - /** - * Get the topic name. - * @return topic name - */ - public String getName() { - return this.name; - } - - - @Override - public boolean equals(Object o) { - if (this == o) { - return true; - } - if (o == null || getClass() != o.getClass()) { - return false; - } - - KafkaTopic topic = (KafkaTopic) o; - - return name.equals(topic.name); - } - - @Override - public int hashCode() { - return name.hashCode(); - } - - @Override - public String toString() { - return getClass().getSimpleName() + "<" + name + ">"; - } - - @Override - public int compareTo(KafkaTopic o) { - return name.compareTo(o.name); - } -} diff --git a/radar-commons/src/main/java/org/radarbase/topic/KafkaTopic.kt b/radar-commons/src/main/java/org/radarbase/topic/KafkaTopic.kt new file mode 100644 index 00000000..3c1dc449 --- /dev/null +++ b/radar-commons/src/main/java/org/radarbase/topic/KafkaTopic.kt @@ -0,0 +1,57 @@ +/* + * Copyright 2017 The Hyve and King's College London + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.radarbase.topic + +/** + * A topic that used by Apache Kafka. + * + * @param name topic name inside the Kafka cluster + * @throws IllegalArgumentException if the topic name is null or is not ASCII-alphanumeric with + * possible underscores. +*/ +open class KafkaTopic( + val name: String, +) : Comparable { + init { + require(name.matches(TOPIC_NAME_PATTERN)) { + ( + "Kafka topic " + name + " is not ASCII-alphanumeric " + + "with possible underscores." + ) + } + } + + override fun equals(other: Any?): Boolean { + if (this === other) { + return true + } + if (other == null || javaClass != other.javaClass) { + return false + } + other as KafkaTopic + return name == other.name + } + + override fun hashCode(): Int = name.hashCode() + + override fun toString(): String = javaClass.simpleName + "<" + name + ">" + + override fun compareTo(other: KafkaTopic): Int = name.compareTo(other.name) + + companion object { + private val TOPIC_NAME_PATTERN = "[a-zA-Z][a-zA-Z0-9_]*".toRegex() + } +} diff --git a/radar-commons/src/main/java/org/radarbase/topic/SensorTopic.java b/radar-commons/src/main/java/org/radarbase/topic/SensorTopic.java deleted file mode 100644 index b675dd19..00000000 --- a/radar-commons/src/main/java/org/radarbase/topic/SensorTopic.java +++ /dev/null @@ -1,78 +0,0 @@ -/* - * Copyright 2017 The Hyve and King's College London - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.radarbase.topic; - -import org.apache.avro.Schema; -import org.apache.avro.Schema.Type; -import org.apache.avro.specific.SpecificRecord; - -/** - * AvroTopic used by sensors. This has additional verification on the schemas that are used compared - * to AvroTopic. - */ -@SuppressWarnings("PMD.UseUtilityClass") -public class SensorTopic extends AvroTopic { - /** - * AvroTopic that does additional validation on the keys and values. - * @param name topic name - * @param keySchema key schema - * @param valueSchema value schema - * @param keyClass actual key class - * @param valueClass actual value class - */ - public SensorTopic(String name, Schema keySchema, Schema valueSchema, - Class keyClass, Class valueClass) { - super(name, keySchema, valueSchema, keyClass, valueClass); - - if (keySchema.getType() != Type.RECORD) { - throw new IllegalArgumentException("Sensors must send records as keys"); - } - if (valueSchema.getType() != Type.RECORD) { - throw new IllegalArgumentException("Sensors must send records as values"); - } - - if (keySchema.getField("projectId") == null) { - throw new IllegalArgumentException("Key schema must have a project ID"); - } - if (keySchema.getField("userId") == null) { - throw new IllegalArgumentException("Key schema must have a user ID"); - } - if (keySchema.getField("sourceId") == null) { - throw new IllegalArgumentException("Key schema must have a source ID"); - } - if (valueSchema.getField("time") == null) { - throw new IllegalArgumentException("Schema must have time as its first field"); - } - if (valueSchema.getField("timeReceived") == null) { - throw new IllegalArgumentException("Schema must have timeReceived as a field"); - } - } - - /** - * Parse a SensorTopic. - * - * @throws IllegalArgumentException if the key_schema or value_schema properties are not valid - * Avro SpecificRecord classes - */ - public static SensorTopic parse( - String topic, String keySchema, String valueSchema) { - AvroTopic parseAvro = AvroTopic.parse(topic, keySchema, valueSchema); - return new SensorTopic<>(parseAvro.getName(), - parseAvro.getKeySchema(), parseAvro.getValueSchema(), - parseAvro.getKeyClass(), parseAvro.getValueClass()); - } -} diff --git a/radar-commons/src/main/java/org/radarbase/topic/SensorTopic.kt b/radar-commons/src/main/java/org/radarbase/topic/SensorTopic.kt new file mode 100644 index 00000000..0fcdd0f7 --- /dev/null +++ b/radar-commons/src/main/java/org/radarbase/topic/SensorTopic.kt @@ -0,0 +1,72 @@ +/* + * Copyright 2017 The Hyve and King's College London + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.radarbase.topic + +import org.apache.avro.Schema +import org.apache.avro.specific.SpecificRecord + +/** + * AvroTopic used by sensors. This has additional verification on the schemas that are used compared + * to AvroTopic. + * + * @param name topic name + * @param keySchema key schema + * @param valueSchema value schema + * @param keyClass actual key class + * @param valueClass actual value class +*/ +class SensorTopic( + name: String, + keySchema: Schema, + valueSchema: Schema, + keyClass: Class, + valueClass: Class, +) : AvroTopic(name, keySchema, valueSchema, keyClass, valueClass) { + + init { + require(keySchema.type == Schema.Type.RECORD) { "Sensors must send records as keys" } + require(valueSchema.type == Schema.Type.RECORD) { "Sensors must send records as values" } + requireNotNull(keySchema.getField("projectId")) { "Key schema must have a project ID" } + requireNotNull(keySchema.getField("userId")) { "Key schema must have a user ID" } + requireNotNull(keySchema.getField("sourceId")) { "Key schema must have a source ID" } + requireNotNull(valueSchema.getField("time")) { "Schema must have time as its first field" } + requireNotNull(valueSchema.getField("timeReceived")) { "Schema must have timeReceived as a field" } + } + + companion object { + /** + * Parse a SensorTopic. + * + * @throws IllegalArgumentException if the key_schema or value_schema properties are not valid + * Avro SpecificRecord classes + */ + @JvmStatic + inline fun parse( + topic: String, + keySchema: String, + valueSchema: String, + ): SensorTopic { + val parseAvro = AvroTopic.parse(topic, keySchema, valueSchema) + return SensorTopic( + parseAvro.name, + parseAvro.keySchema, + parseAvro.valueSchema, + parseAvro.keyClass, + parseAvro.valueClass, + ) + } + } +} diff --git a/radar-commons/src/main/java/org/radarbase/util/Base64.java b/radar-commons/src/main/java/org/radarbase/util/Base64.java deleted file mode 100644 index 3f23e4a3..00000000 --- a/radar-commons/src/main/java/org/radarbase/util/Base64.java +++ /dev/null @@ -1,143 +0,0 @@ -/* - * Copyright (c) 2012, 2013, Oracle and/or its affiliates. All rights reserved. - * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. - * - * This code is free software; you can redistribute it and/or modify it - * under the terms of the GNU General Public License version 2 only, as - * published by the Free Software Foundation. Oracle designates this - * particular file as subject to the "Classpath" exception as provided - * by Oracle in the LICENSE file that accompanied this code. - * - * This code is distributed in the hope that it will be useful, but WITHOUT - * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or - * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License - * version 2 for more details (a copy is included in the LICENSE file that - * accompanied this code). - * - * You should have received a copy of the GNU General Public License version - * 2 along with this work; if not, write to the Free Software Foundation, - * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. - * - * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA - * or visit www.oracle.com if you need additional information or have any - * questions. - */ - -package org.radarbase.util; - -import static java.nio.charset.StandardCharsets.UTF_8; // Since Android API 19 - -/** - * This class consists exclusively of static methods for obtaining - * encoders and decoders for the Base64 encoding scheme. The - * implementation of this class supports the following types of Base64 - * as specified in - * RFC 4648 and - * RFC 2045. - * - *

Uses "The Base64 Alphabet" as specified in Table 1 of - * RFC 4648 and RFC 2045 for encoding and decoding operation. - * The encoder does not add any line feed (line separator) - * character. The decoder rejects data that contains characters - * outside the base64 alphabet.

- * - *

Unless otherwise noted, passing a {@code null} argument to a - * method of this class will cause a {@link java.lang.NullPointerException - * NullPointerException} to be thrown. - * - * @author Xueming Shen - * @since 1.8 - */ - -@SuppressWarnings("PMD.ClassNamingConventions") -public class Base64 { - - private Base64() { - } - - /** - * Returns a {@link Encoder} that encodes using the - * Basic type base64 encoding scheme. - * - * @return A Base64 encoder. - */ - public static Encoder getEncoder() { - return Encoder.RFC4648; - } - - /** - * This class implements an encoder for encoding byte data using - * the Base64 encoding scheme as specified in RFC 4648 and RFC 2045. - * - *

Instances of {@link Encoder} class are safe for use by - * multiple concurrent threads. - * - *

Unless otherwise noted, passing a {@code null} argument to - * a method of this class will cause a - * {@link java.lang.NullPointerException NullPointerException} to - * be thrown. - * - * @since 1.8 - */ - public static class Encoder { - /** - * This array is a lookup table that translates 6-bit positive integer - * index values into their "Base64 Alphabet" equivalents as specified - * in "Table 1: The Base64 Alphabet" of RFC 2045 (and RFC 4648). - */ - private static final byte[] BASE_64_CHAR = { - 'A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'I', 'J', 'K', 'L', 'M', - 'N', 'O', 'P', 'Q', 'R', 'S', 'T', 'U', 'V', 'W', 'X', 'Y', 'Z', - 'a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j', 'k', 'l', 'm', - 'n', 'o', 'p', 'q', 'r', 's', 't', 'u', 'v', 'w', 'x', 'y', 'z', - '0', '1', '2', '3', '4', '5', '6', '7', '8', '9', '+', '/' - }; - - static final Encoder RFC4648 = new Encoder(); - - private Encoder() { - } - - /** - * Encodes all bytes from the specified byte array into a newly-allocated - * byte array using the {@link Base64} encoding scheme. The returned byte - * array is of the length of the resulting bytes. - * - * @param src - * the byte array to encode - * @return A newly-allocated byte array containing the resulting - * encoded bytes. - */ - public String encode(byte[] src) { - int srcLen = src.length; - byte[] dst = new byte[4 * ((srcLen + 2) / 3)]; - int fullDataLen = srcLen / 3 * 3; - int dstP = 0; - int srcP = 0; - for (; srcP < fullDataLen; srcP += 3) { - int bits = (src[srcP] & 0xff) << 16 - | (src[srcP + 1] & 0xff) << 8 - | (src[srcP + 2] & 0xff); - dst[dstP++] = BASE_64_CHAR[(bits >>> 18) & 0x3f]; - dst[dstP++] = BASE_64_CHAR[(bits >>> 12) & 0x3f]; - dst[dstP++] = BASE_64_CHAR[(bits >>> 6) & 0x3f]; - dst[dstP++] = BASE_64_CHAR[bits & 0x3f]; - } - if (srcP < srcLen) { // 1 or 2 leftover bytes - int b0 = src[srcP++] & 0xff; - dst[dstP++] = BASE_64_CHAR[b0 >> 2]; - if (srcP == srcLen) { - dst[dstP++] = BASE_64_CHAR[(b0 << 4) & 0x3f]; - dst[dstP++] = '='; - } else { - int b1 = src[srcP] & 0xff; - dst[dstP++] = BASE_64_CHAR[(b0 << 4) & 0x3f | (b1 >> 4)]; - dst[dstP++] = BASE_64_CHAR[(b1 << 2) & 0x3f]; - } - dst[dstP] = '='; - } - - return new String(dst, UTF_8); - } - } -} diff --git a/radar-commons/src/main/java/org/radarbase/util/Base64Encoder.kt b/radar-commons/src/main/java/org/radarbase/util/Base64Encoder.kt new file mode 100644 index 00000000..44292d6b --- /dev/null +++ b/radar-commons/src/main/java/org/radarbase/util/Base64Encoder.kt @@ -0,0 +1,179 @@ +/* + * Copyright (c) 2012, 2013, Oracle and/or its affiliates. All rights reserved. + * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. + * + * This code is free software; you can redistribute it and/or modify it + * under the terms of the GNU General Public License version 2 only, as + * published by the Free Software Foundation. Oracle designates this + * particular file as subject to the "Classpath" exception as provided + * by Oracle in the LICENSE file that accompanied this code. + * + * This code is distributed in the hope that it will be useful, but WITHOUT + * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or + * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License + * version 2 for more details (a copy is included in the LICENSE file that + * accompanied this code). + * + * You should have received a copy of the GNU General Public License version + * 2 along with this work; if not, write to the Free Software Foundation, + * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. + * + * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA + * or visit www.oracle.com if you need additional information or have any + * questions. + */ +package org.radarbase.util + +// Since Android API 19 +/** + * This class consists exclusively of static methods for obtaining + * encoders and decoders for the Base64 encoding scheme. The + * implementation of this class supports the following types of Base64 + * as specified in + * [RFC 4648](http://www.ietf.org/rfc/rfc4648.txt) and + * [RFC 2045](http://www.ietf.org/rfc/rfc2045.txt). + * + * + * Uses "The Base64 Alphabet" as specified in Table 1 of + * RFC 4648 and RFC 2045 for encoding and decoding operation. + * The encoder does not add any line feed (line separator) + * character. The decoder rejects data that contains characters + * outside the base64 alphabet. + * + * + * Unless otherwise noted, passing a `null` argument to a + * method of this class will cause a [ NullPointerException][java.lang.NullPointerException] to be thrown. + * + * Note: needed because it is only included in Android API level 26. + * + * @author Xueming Shen + * @since 1.8 + */ +object Base64Encoder { + /** + * This class implements an encoder for encoding byte data using + * the Base64 encoding scheme as specified in RFC 4648 and RFC 2045. + * + * + * Instances of [Encoder] class are safe for use by + * multiple concurrent threads. + * + * + * Unless otherwise noted, passing a `null` argument to + * a method of this class will cause a + * [NullPointerException][java.lang.NullPointerException] to + * be thrown. + * + * Encodes all bytes from the specified byte array into a newly-allocated + * byte array using the [Base64] encoding scheme. The returned byte + * array is of the length of the resulting bytes. + * + * @param src + * the byte array to encode + * @return A newly-allocated byte array containing the resulting + * encoded bytes. + */ + fun encode(src: ByteArray): String { + val srcLen = src.size + val dst = ByteArray(4 * ((srcLen + 2) / 3)) + val fullDataLen = srcLen / 3 * 3 + var dstP = 0 + var srcP = 0 + while (srcP < fullDataLen) { + val bits = (src[srcP].toInt() and 0xff).shl(16) or + (src[srcP + 1].toInt() and 0xff).shl(8) or + (src[srcP + 2].toInt() and 0xff) + dst[dstP++] = BASE_64_CHAR[bits.ushr(18) and 0x3f] + dst[dstP++] = BASE_64_CHAR[bits.ushr(12) and 0x3f] + dst[dstP++] = BASE_64_CHAR[bits.ushr(6) and 0x3f] + dst[dstP++] = BASE_64_CHAR[bits and 0x3f] + srcP += 3 + } + if (srcP < srcLen) { // 1 or 2 leftover bytes + val b0 = src[srcP++].toInt() and 0xff + dst[dstP++] = BASE_64_CHAR[b0 shr 2] + if (srcP == srcLen) { + dst[dstP++] = BASE_64_CHAR[b0 shl 4 and 0x3f] + dst[dstP++] = '='.code.toByte() + } else { + val b1 = src[srcP].toInt() and 0xff + dst[dstP++] = BASE_64_CHAR[b0 shl 4 and 0x3f or (b1 shr 4)] + dst[dstP++] = BASE_64_CHAR[b1 shl 2 and 0x3f] + } + dst[dstP] = '='.code.toByte() + } + return String(dst) + } + + /** + * This array is a lookup table that translates 6-bit positive integer + * index values into their "Base64 Alphabet" equivalents as specified + * in "Table 1: The Base64 Alphabet" of RFC 2045 (and RFC 4648). + */ + private val BASE_64_CHAR = byteArrayOf( + 'A'.code.toByte(), + 'B'.code.toByte(), + 'C'.code.toByte(), + 'D'.code.toByte(), + 'E'.code.toByte(), + 'F'.code.toByte(), + 'G'.code.toByte(), + 'H'.code.toByte(), + 'I'.code.toByte(), + 'J'.code.toByte(), + 'K'.code.toByte(), + 'L'.code.toByte(), + 'M'.code.toByte(), + 'N'.code.toByte(), + 'O'.code.toByte(), + 'P'.code.toByte(), + 'Q'.code.toByte(), + 'R'.code.toByte(), + 'S'.code.toByte(), + 'T'.code.toByte(), + 'U'.code.toByte(), + 'V'.code.toByte(), + 'W'.code.toByte(), + 'X'.code.toByte(), + 'Y'.code.toByte(), + 'Z'.code.toByte(), + 'a'.code.toByte(), + 'b'.code.toByte(), + 'c'.code.toByte(), + 'd'.code.toByte(), + 'e'.code.toByte(), + 'f'.code.toByte(), + 'g'.code.toByte(), + 'h'.code.toByte(), + 'i'.code.toByte(), + 'j'.code.toByte(), + 'k'.code.toByte(), + 'l'.code.toByte(), + 'm'.code.toByte(), + 'n'.code.toByte(), + 'o'.code.toByte(), + 'p'.code.toByte(), + 'q'.code.toByte(), + 'r'.code.toByte(), + 's'.code.toByte(), + 't'.code.toByte(), + 'u'.code.toByte(), + 'v'.code.toByte(), + 'w'.code.toByte(), + 'x'.code.toByte(), + 'y'.code.toByte(), + 'z'.code.toByte(), + '0'.code.toByte(), + '1'.code.toByte(), + '2'.code.toByte(), + '3'.code.toByte(), + '4'.code.toByte(), + '5'.code.toByte(), + '6'.code.toByte(), + '7'.code.toByte(), + '8'.code.toByte(), + '9'.code.toByte(), + '+'.code.toByte(), + '/'.code.toByte(), + ) +} diff --git a/radar-commons/src/main/java/org/radarbase/util/RadarProducerDsl.kt b/radar-commons/src/main/java/org/radarbase/util/RadarProducerDsl.kt new file mode 100644 index 00000000..6c88aa2b --- /dev/null +++ b/radar-commons/src/main/java/org/radarbase/util/RadarProducerDsl.kt @@ -0,0 +1,13 @@ +package org.radarbase.util + +/** + * A marker annotations for DSLs. + */ +@DslMarker +@Target( + AnnotationTarget.CLASS, + AnnotationTarget.TYPEALIAS, + AnnotationTarget.TYPE, + AnnotationTarget.FUNCTION, +) +annotation class RadarProducerDsl diff --git a/radar-commons/src/main/java/org/radarbase/util/RestUtils.java b/radar-commons/src/main/java/org/radarbase/util/RestUtils.java deleted file mode 100644 index 6b90181a..00000000 --- a/radar-commons/src/main/java/org/radarbase/util/RestUtils.java +++ /dev/null @@ -1,121 +0,0 @@ -/* - * Copyright 2018 The Hyve - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.radarbase.util; - -import java.security.GeneralSecurityException; -import java.security.KeyManagementException; -import java.security.KeyStore; -import java.security.NoSuchAlgorithmException; -import java.util.Arrays; -import javax.net.ssl.HostnameVerifier; -import javax.net.ssl.SSLContext; -import javax.net.ssl.SSLSocketFactory; -import javax.net.ssl.TrustManager; -import javax.net.ssl.TrustManagerFactory; -import javax.net.ssl.X509TrustManager; -import okhttp3.internal.platform.Platform; -import okhttp3.internal.tls.OkHostnameVerifier; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -/** Utility methods and variables for OkHttp initialization. */ -public final class RestUtils { - private static final Logger logger = LoggerFactory.getLogger(RestUtils.class); - - /** OkHttp3 default hostname verifier. */ - public static final HostnameVerifier DEFAULT_HOSTNAME_VERIFIER = OkHostnameVerifier.INSTANCE; - /** OkHttp3 hostname verifier for unsafe connections. */ - public static final HostnameVerifier UNSAFE_HOSTNAME_VERIFIER = (hostname, session) -> true; - - /** Unsafe OkHttp3 trust manager that trusts all certificates. */ - public static final TrustManager[] UNSAFE_TRUST_MANAGER = { - new X509TrustManager() { - @Override - public void checkClientTrusted(java.security.cert.X509Certificate[] chain, - String authType) { - //Nothing to do - } - - @Override - public void checkServerTrusted(java.security.cert.X509Certificate[] chain, - String authType) { - //Nothing to do - } - - @Override - public java.security.cert.X509Certificate[] getAcceptedIssuers() { - return new java.security.cert.X509Certificate[]{}; - } - } - }; - - /** Unsafe OkHttp3 SSLSocketFactory that trusts all certificates. */ - public static final SSLSocketFactory UNSAFE_SSL_FACTORY; - - static { - SSLSocketFactory factory; - try { - final SSLContext sslContext = SSLContext.getInstance("SSL"); - sslContext.init(null, UNSAFE_TRUST_MANAGER, new java.security.SecureRandom()); - - factory = sslContext.getSocketFactory(); - } catch (NoSuchAlgorithmException | KeyManagementException e) { - logger.error("Failed to initialize unsafe SSL factory", e); - factory = null; - } - UNSAFE_SSL_FACTORY = factory; - } - - - private RestUtils() { - // utility class - } - - /** - * Default OkHttp3 trust manager that trusts all certificates. - * Copied from private method in OkHttpClient. - */ - public static X509TrustManager systemDefaultTrustManager() { - try { - TrustManagerFactory trustManagerFactory = TrustManagerFactory.getInstance( - TrustManagerFactory.getDefaultAlgorithm()); - trustManagerFactory.init((KeyStore) null); - TrustManager[] trustManagers = trustManagerFactory.getTrustManagers(); - if (trustManagers.length != 1 || !(trustManagers[0] instanceof X509TrustManager)) { - throw new IllegalStateException("Unexpected default trust managers:" - + Arrays.toString(trustManagers)); - } - return (X509TrustManager) trustManagers[0]; - } catch (GeneralSecurityException e) { - throw new IllegalStateException("No System TLS", e); - } - } - - /** - * Default OkHttp3 SSLSocketFactory that trusts all certificates. - * Copied from private method in OkHttpClient. - */ - public static SSLSocketFactory systemDefaultSslSocketFactory(X509TrustManager trustManager) { - try { - SSLContext sslContext = Platform.get().newSSLContext(); - sslContext.init(null, new TrustManager[] { trustManager }, null); - return sslContext.getSocketFactory(); - } catch (GeneralSecurityException e) { - throw new IllegalStateException("No System TLS", e); - } - } -} diff --git a/radar-commons/src/main/java/org/radarbase/util/Serialization.java b/radar-commons/src/main/java/org/radarbase/util/Serialization.java deleted file mode 100644 index 2726f21f..00000000 --- a/radar-commons/src/main/java/org/radarbase/util/Serialization.java +++ /dev/null @@ -1,138 +0,0 @@ -/* - * Copyright 2017 The Hyve and King's College London - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.radarbase.util; - -import java.io.IOException; -import java.io.InputStream; -import java.io.OutputStream; - -/** Serialization utility class. */ -@SuppressWarnings("PMD.ClassNamingConventions") -public final class Serialization { - - private Serialization() { - // utility class - } - - /** Read a little-endian encoded long from given bytes, starting from startIndex. */ - public static long bytesToLong(byte[] b, int startIndex) { - long result = 0; - for (int i = 0; i < 8; i++) { - result <<= 8; - result |= b[i + startIndex] & 0xFF; - } - return result; - } - - /** Write a long to given bytes with little-endian encoding, starting from startIndex. */ - public static void longToBytes(long value, byte[] b, int startIndex) { - b[startIndex] = (byte)((value >> 56) & 0xFF); - b[startIndex + 1] = (byte)((value >> 48) & 0xFF); - b[startIndex + 2] = (byte)((value >> 40) & 0xFF); - b[startIndex + 3] = (byte)((value >> 32) & 0xFF); - b[startIndex + 4] = (byte)((value >> 24) & 0xFF); - b[startIndex + 5] = (byte)((value >> 16) & 0xFF); - b[startIndex + 6] = (byte)((value >> 8) & 0xFF); - b[startIndex + 7] = (byte)(value & 0xFF); - } - - /** Write an int to given bytes with little-endian encoding, starting from startIndex. */ - public static void intToBytes(int value, byte[] b, int startIndex) { - b[startIndex] = (byte)((value >> 24) & 0xFF); - b[startIndex + 1] = (byte)((value >> 16) & 0xFF); - b[startIndex + 2] = (byte)((value >> 8) & 0xFF); - b[startIndex + 3] = (byte)(value & 0xFF); - } - - /** Read a little-endian encoded int from given bytes, starting from startIndex. */ - public static int bytesToInt(byte[] b, int startIndex) { - int result = 0; - for (int i = 0; i < 4; i++) { - result <<= 8; - result |= b[i + startIndex] & 0xFF; - } - return result; - } - - /** Read a little-endian encoded short from given bytes, starting from startIndex. */ - public static short bytesToShort(byte[] b, int startIndex) { - short result = 0; - for (int i = 0; i < 2; i++) { - result <<= 8; - result |= b[i + startIndex] & 0xFF; - } - return result; - } - - /** - * Convert a boolean to a byte. - * @return -1 if b is null, 1 if b, and 0 if not b - */ - public static byte booleanToByte(Boolean b) { - if (b == null) { - return -1; - } else if (b.equals(Boolean.TRUE)) { - return 1; - } else { - return 0; - } - } - - /** - * Read a boolean from a byte. - * @return null if b == -1, true if b == 1, false otherwise. - */ - public static Boolean byteToBoolean(byte b) { - if (b == -1) { - return null; - } else if (b == 1) { - return Boolean.TRUE; - } else { - return Boolean.FALSE; - } - } - - /** - * Convert a float to a double using its apparent value. This avoids casting to the double - * value closest to the mathematical value of a float. - */ - public static double floatToDouble(float value) { - return Double.parseDouble(Float.toString(value)); - } - - /** - * Copy a stream using a buffer. - * - * @param buffer non-empty, non-null buffer for the copy operations. - * @param in input stream to read data from - * @param out output stream to write data to - * @throws IOException if the streams cannot be read from or written to. - * @throws IllegalArgumentException if the buffer has size 0 - * @throws NullPointerException if buffer, in or out are null. - */ - public static void copyStream(byte[] buffer, InputStream in, OutputStream out) - throws IOException { - if (buffer.length == 0) { - throw new IllegalArgumentException("Cannot copy with empty buffer."); - } - int len = in.read(buffer); - while (len != -1) { - out.write(buffer, 0, len); - len = in.read(buffer); - } - } -} diff --git a/radar-commons/src/main/java/org/radarbase/util/Strings.java b/radar-commons/src/main/java/org/radarbase/util/Strings.java deleted file mode 100644 index e613eb8f..00000000 --- a/radar-commons/src/main/java/org/radarbase/util/Strings.java +++ /dev/null @@ -1,96 +0,0 @@ -/* - * Copyright 2017 The Hyve and King's College London - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.radarbase.util; - -import java.nio.charset.Charset; -import java.nio.charset.StandardCharsets; -import java.util.Collection; -import java.util.Iterator; -import java.util.regex.Pattern; - -/** - * String utilities. - */ -@SuppressWarnings("PMD.ClassNamingConventions") -public final class Strings { - private static final Charset UTF_8 = StandardCharsets.UTF_8; - private static final char[] HEX_ARRAY = "0123456789ABCDEF".toCharArray(); - - private Strings() { - // utility class - } - - /** - * For each string, compiles a pattern that checks if it is contained in another string in a - * case-insensitive way. - */ - public static Pattern[] containsPatterns(Collection contains) { - Pattern[] patterns = new Pattern[contains.size()]; - Iterator containsIterator = contains.iterator(); - for (int i = 0; i < patterns.length; i++) { - patterns[i] = containsIgnoreCasePattern(containsIterator.next()); - } - return patterns; - } - - /** - * Compiles a pattern that checks if it is contained in another string in a case-insensitive - * 7way. - */ - public static Pattern containsIgnoreCasePattern(String containsString) { - int flags = Pattern.CASE_INSENSITIVE // case insensitive - | Pattern.LITERAL // do not compile special characters - | Pattern.UNICODE_CASE; // case insensitive even for Unicode (special) characters. - return Pattern.compile(containsString, flags); - } - - /** - * Whether any of the patterns matches given value. - */ - public static boolean findAny(Pattern[] patterns, CharSequence value) { - for (Pattern pattern : patterns) { - if (pattern.matcher(value).find()) { - return true; - } - } - return false; - } - - public static byte[] utf8(String value) { - return value.getBytes(UTF_8); - } - - /** Whether given value is null or empty. */ - public static boolean isNullOrEmpty(String value) { - return value == null || value.isEmpty(); - } - - /** - * Converts given bytes to a hex string. - * @param bytes bytes to read. - * @return String with hex values. - */ - public static String bytesToHex(byte[] bytes) { - char[] hexChars = new char[bytes.length * 2]; - for (int i = 0; i < bytes.length; i++) { - int value = bytes[i] & 0xFF; - hexChars[i * 2] = HEX_ARRAY[value >>> 4]; - hexChars[i * 2 + 1] = HEX_ARRAY[value & 0x0F]; - } - return new String(hexChars); - } -} diff --git a/radar-commons/src/main/java/org/radarbase/util/Strings.kt b/radar-commons/src/main/java/org/radarbase/util/Strings.kt new file mode 100644 index 00000000..a03d60b6 --- /dev/null +++ b/radar-commons/src/main/java/org/radarbase/util/Strings.kt @@ -0,0 +1,49 @@ +/* + * Copyright 2017 The Hyve and King's College London + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.radarbase.util + +/** + * String utilities. + */ +object Strings { + private val HEX_ARRAY = "0123456789ABCDEF".toCharArray() + + /** + * Compiles a pattern that checks if it is contained in another string in a case-insensitive + * way. + */ + fun String.toIgnoreCaseRegex(): Regex = toRegex( + setOf( + RegexOption.IGNORE_CASE, + RegexOption.LITERAL, + ), + ) + + /** + * Converts given bytes to a hex string. + * @param bytes bytes to read. + * @return String with hex values. + */ + fun ByteArray.toHexString(): String { + val hexChars = CharArray(size * 2) + for (i in indices) { + val value = get(i).toInt() and 0xFF + hexChars[i * 2] = HEX_ARRAY[value ushr 4] + hexChars[i * 2 + 1] = HEX_ARRAY[value and 0x0F] + } + return String(hexChars) + } +} diff --git a/radar-commons/src/main/java/org/radarbase/util/TimedInt.java b/radar-commons/src/main/java/org/radarbase/util/TimedInt.java deleted file mode 100644 index 108bbc54..00000000 --- a/radar-commons/src/main/java/org/radarbase/util/TimedInt.java +++ /dev/null @@ -1,34 +0,0 @@ -package org.radarbase.util; - -public class TimedInt implements TimedVariable { - public final int value; - private final long expiry; - - public TimedInt(int value, long validity) { - expiry = System.currentTimeMillis() + validity * 1000L; - this.value = value; - } - - @Override - public boolean isExpired() { - return expiry < System.currentTimeMillis(); - } - - @Override - public boolean equals(Object o) { - if (this == o) { - return true; - } - if (o == null || getClass() != o.getClass()) { - return false; - } - TimedInt other = (TimedInt)o; - return value == other.value - && expiry == other.expiry; - } - - @Override - public int hashCode() { - return value; - } -} diff --git a/radar-commons/src/main/java/org/radarbase/util/TimedValue.java b/radar-commons/src/main/java/org/radarbase/util/TimedValue.java deleted file mode 100644 index cf186055..00000000 --- a/radar-commons/src/main/java/org/radarbase/util/TimedValue.java +++ /dev/null @@ -1,36 +0,0 @@ -package org.radarbase.util; - -import java.util.Objects; - -public class TimedValue implements TimedVariable { - public final T value; - private final long expiry; - - public TimedValue(T value, long validity) { - expiry = System.currentTimeMillis() + validity * 1000L; - this.value = Objects.requireNonNull(value); - } - - @Override - public boolean isExpired() { - return expiry < System.currentTimeMillis(); - } - - @Override - public boolean equals(Object o) { - if (this == o) { - return true; - } - if (o == null || getClass() != o.getClass()) { - return false; - } - TimedValue other = (TimedValue)o; - return value.equals(other.value) - && expiry == other.expiry; - } - - @Override - public int hashCode() { - return value.hashCode(); - } -} diff --git a/radar-commons/src/main/java/org/radarbase/util/TimedVariable.java b/radar-commons/src/main/java/org/radarbase/util/TimedVariable.java deleted file mode 100644 index c4affa99..00000000 --- a/radar-commons/src/main/java/org/radarbase/util/TimedVariable.java +++ /dev/null @@ -1,5 +0,0 @@ -package org.radarbase.util; - -public interface TimedVariable { - boolean isExpired(); -} diff --git a/radar-commons/src/test/java/org/radarbase/data/AvroDatumDecoderTest.java b/radar-commons/src/test/java/org/radarbase/data/AvroDatumDecoderTest.java index e19ec3e5..11beed4b 100644 --- a/radar-commons/src/test/java/org/radarbase/data/AvroDatumDecoderTest.java +++ b/radar-commons/src/test/java/org/radarbase/data/AvroDatumDecoderTest.java @@ -16,14 +16,14 @@ package org.radarbase.data; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; import java.io.IOException; import org.apache.avro.specific.SpecificData; -import org.junit.Test; +import org.junit.jupiter.api.Test; +import org.radarbase.topic.AvroTopic; import org.radarcns.kafka.ObservationKey; import org.radarcns.passive.empatica.EmpaticaE4BloodVolumePulse; -import org.radarbase.topic.AvroTopic; /** * Created by nivethika on 24-2-17. diff --git a/radar-commons/src/test/java/org/radarbase/data/StringEncoderTest.java b/radar-commons/src/test/java/org/radarbase/data/StringEncoderTest.java deleted file mode 100644 index 10cacf6b..00000000 --- a/radar-commons/src/test/java/org/radarbase/data/StringEncoderTest.java +++ /dev/null @@ -1,44 +0,0 @@ -/* - * Copyright 2017 The Hyve and King's College London - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.radarbase.data; - -import static org.junit.Assert.assertTrue; - -import java.io.IOException; -import org.apache.avro.Schema; -import org.apache.avro.Schema.Type; -import org.junit.Test; - -/** - * Created by nivethika on 24-2-17. - */ -public class StringEncoderTest { - - @Test - public void encodeString() throws IOException { - StringEncoder encoder = new StringEncoder(); - Schema schema = Schema.create(Type.STRING); - - AvroEncoder.AvroWriter keyEncoder = encoder.writer(schema, String.class); - - - byte[] key = keyEncoder.encode("{\"userId\":\"a\",\"sourceId\":\"b\"}"); - assertTrue( new String(key).contains("userId")); - assertTrue( new String(key).contains("sourceId")); - } - -} diff --git a/radar-commons/src/test/java/org/radarbase/producer/avro/AvroDataMapperFactoryTest.kt b/radar-commons/src/test/java/org/radarbase/producer/avro/AvroDataMapperFactoryTest.kt new file mode 100644 index 00000000..2ad26622 --- /dev/null +++ b/radar-commons/src/test/java/org/radarbase/producer/avro/AvroDataMapperFactoryTest.kt @@ -0,0 +1,233 @@ +package org.radarbase.producer.avro + +import org.apache.avro.Schema +import org.apache.avro.SchemaValidationException +import org.apache.avro.generic.GenericDatumReader +import org.apache.avro.generic.GenericDatumWriter +import org.apache.avro.io.DecoderFactory +import org.apache.avro.io.EncoderFactory +import org.junit.jupiter.api.Assertions.assertEquals +import org.junit.jupiter.api.Test +import org.junit.jupiter.api.assertThrows +import org.radarcns.kafka.ObservationKey +import java.io.ByteArrayOutputStream +import java.io.IOException + +class AvroDataMapperFactoryTest { + @Test + @Throws(SchemaValidationException::class, IOException::class) + fun mapRecord() { + val actual = doMap( + MEASUREMENT_KEY_SCHEMA, + ObservationKey.getClassSchema(), + "{\"userId\":\"u\", \"sourceId\": \"s\"}", + ) + assertEquals("{\"projectId\":null,\"userId\":\"u\",\"sourceId\":\"s\"}", actual) + } + + @Test + @Throws(SchemaValidationException::class) + fun mapRecordIncomplete() { + assertThrows { + AvroDataMapperFactory.createMapper( + INCOMPLETE_MEASUREMENT_KEY_SCHEMA, + ObservationKey.getClassSchema(), + null, + ) + } + } + + @Test + @Throws(SchemaValidationException::class, IOException::class) + fun mapEnumLarger() { + val actual = doMap(SMALL_ENUM_SCHEMA, LARGE_ENUM_SCHEMA, "{\"e\":\"A\"}") + assertEquals("{\"e\":\"A\"}", actual) + } + + @Test + @Throws(SchemaValidationException::class) + fun mapEnumSmaller() { + assertThrows { + AvroDataMapperFactory.createMapper(LARGE_ENUM_SCHEMA, SMALL_ENUM_SCHEMA, null) + } + } + + @Test + @Throws(SchemaValidationException::class, IOException::class) + fun mapEnumSmallerUnknown() { + val actual = doMap(LARGE_ENUM_SCHEMA, UNKNOWN_ENUM_SCHEMA, "{\"e\":\"C\"}") + assertEquals("{\"e\":\"UNKNOWN\"}", actual) + } + + @Test + @Throws(SchemaValidationException::class, IOException::class) + fun mapEnumSmallerDefault() { + val actual = doMap(LARGE_ENUM_SCHEMA, DEFAULT_ENUM_SCHEMA, "{\"e\":\"C\"}") + assertEquals("{\"e\":\"A\"}", actual) + } + + @Test + @Throws(SchemaValidationException::class, IOException::class) + fun mapAll() { + val actual = doMap( + ALL_TYPES_SCHEMA, + ALL_TYPES_ALT_SCHEMA, + "{" + + "\"e\":\"A\"," + + "\"i\":1," + + "\"l\":2," + + "\"d\":3.0," + + "\"f\":4.0," + + "\"sI\":\"5\"," + + "\"sD\":\"6.5\"," + + "\"sU\":null," + + "\"sUi\":{\"string\":\"7\"}," + + "\"sUe\":null," + + "\"uS\":\"s\"," + + "\"se2\":\"B\"," + + "\"se3\":\"g\"," + + "\"a\":[1,2]," + + "\"m\":{\"a\":9}," + + "\"fS\":\"ab\"," + + "\"bS\":\"ab\"," + + "\"fb\":\"ab\"," + + "\"bf\":\"ab\"," + + "\"bfd\":\"abc\"," + + "\"unmapped\":10}", + ) + assertEquals( + "{" + + "\"e\":\"A\"," + + "\"i\":1," + + "\"l\":2.0," + + "\"d\":3.0," + + "\"f\":4.0," + + "\"sI\":5," + + "\"sD\":6.5," + + "\"sU\":\"\"," + + "\"sUi\":{\"int\":7}," + + "\"sUe\":\"A\"," + + "\"uS\":{\"string\":\"s\"}," + + "\"se2\":\"B\"," + + "\"se3\":\"A\"," + + "\"a\":[1.0,2.0]," + + "\"m\":{\"a\":9.0}," + + "\"fS\":\"YWI=\"," + + "\"bS\":\"YWI=\"," + + "\"fb\":\"ab\"," + + "\"bf\":\"ab\"," + + "\"bfd\":\"aa\"" + + "}", + actual, + ) + } + + @Throws(IOException::class, SchemaValidationException::class) + private fun doMap(from: Schema, to: Schema, value: String): String { + val mapper = AvroDataMapperFactory.createMapper(from, to, null) + val reader = GenericDatumReader(from) + val decoder = DecoderFactory.get().jsonDecoder(from, value) + val readValue = reader.read(null, decoder) + val writer = GenericDatumWriter(to) + val out = ByteArrayOutputStream() + val encoder = EncoderFactory.get().jsonEncoder(to, out) + writer.write(mapper.convertAvro(readValue), encoder) + encoder.flush() + return out.toString("utf-8") + } + + companion object { + private val MEASUREMENT_KEY_SCHEMA = Schema.Parser().parse( + "{" + + " \"namespace\": \"org.radarcns.key\"," + + " \"type\": \"record\"," + + " \"name\": \"MeasurementKey\"," + + " \"doc\": \"Measurement key in the RADAR-base project\"," + + " \"fields\": [" + + " {\"name\": \"userId\", \"type\": \"string\", \"doc\": \"user ID\"}," + + " {\"name\": \"sourceId\", \"type\": \"string\", \"doc\": \"device source ID\"}" + + " ]" + + "}", + ) + private val INCOMPLETE_MEASUREMENT_KEY_SCHEMA = Schema.Parser().parse( + "{" + + " \"namespace\": \"org.radarcns.key\"," + + " \"type\": \"record\"," + + " \"name\": \"MeasurementKey\"," + + " \"doc\": \"Measurement key in the RADAR-base project\"," + + " \"fields\": [" + + " {\"name\": \"sourceId\", \"type\": \"string\", \"doc\": \"device source ID\"}" + + " ]" + + "}", + ) + private val SMALL_ENUM_SCHEMA = Schema.Parser().parse( + "{\"type\":\"record\",\"name\":\"E\",\"fields\":[" + + "{\"name\": \"e\", \"type\": {\"type\": \"enum\", \"name\": \"Enum\", \"symbols\": [\"A\", \"B\"]}}" + + "]}", + ) + private val LARGE_ENUM_SCHEMA = Schema.Parser().parse( + "{\"type\":\"record\",\"name\":\"E\",\"fields\":[" + + "{\"name\": \"e\", \"type\": {\"type\": \"enum\", \"name\": \"Enum\", \"symbols\": [\"A\", \"B\", \"C\"]}}" + + "]}", + ) + private val UNKNOWN_ENUM_SCHEMA = Schema.Parser().parse( + "{\"type\":\"record\",\"name\":\"E\",\"fields\":[" + + "{\"name\": \"e\", \"type\": {\"type\": \"enum\", \"name\": \"Enum\", \"symbols\": [\"A\", \"B\", \"UNKNOWN\"]}}" + + "]}", + ) + private val DEFAULT_ENUM_SCHEMA = Schema.Parser().parse( + "{\"type\":\"record\",\"name\":\"E\",\"fields\":[" + + "{\"name\": \"e\", \"type\": {\"type\": \"enum\", \"name\": \"Enum\", \"symbols\": [\"A\"]}, \"default\": \"A\"}" + + "]}", + ) + private val ALL_TYPES_SCHEMA = Schema.Parser().parse( + "{\"type\":\"record\",\"name\":\"R\",\"fields\":[" + + "{\"name\": \"e\", \"type\": {\"type\": \"enum\", \"name\": \"Enum\", \"symbols\": [\"A\"]}, \"default\": \"A\"}," + + "{\"name\": \"i\", \"type\": \"int\"}," + + "{\"name\": \"l\", \"type\": \"long\"}," + + "{\"name\": \"d\", \"type\": \"double\"}," + + "{\"name\": \"f\", \"type\": \"float\"}," + + "{\"name\": \"sI\", \"type\": \"string\"}," + + "{\"name\": \"sD\", \"type\": \"string\"}," + + "{\"name\": \"sU\", \"type\": [\"null\", \"string\"]}," + + "{\"name\": \"sUi\", \"type\": [\"null\", \"string\"]}," + + "{\"name\": \"sUe\", \"type\": [\"null\", {\"name\": \"SE\", \"type\": \"enum\", \"symbols\": [\"A\"]}]}," + + "{\"name\": \"uS\", \"type\": \"string\"}," + + "{\"name\": \"se2\", \"type\": \"string\"}," + + "{\"name\": \"se3\", \"type\": \"string\"}," + + "{\"name\": \"a\", \"type\": {\"type\":\"array\", \"items\": {\"type\": \"int\"}}}," + + "{\"name\": \"m\", \"type\": {\"type\":\"map\", \"values\": {\"type\": \"int\"}}}," + + "{\"name\": \"fS\", \"type\": {\"name\": \"f1\", \"type\":\"fixed\", \"size\": 2}}," + + "{\"name\": \"bS\", \"type\": \"bytes\"}," + + "{\"name\": \"fb\", \"type\": {\"name\": \"f2\",\"type\": \"fixed\", \"size\": 2}}," + + "{\"name\": \"bf\", \"type\": \"bytes\"}," + + "{\"name\": \"bfd\", \"type\": \"bytes\"}," + + "{\"name\": \"unmapped\", \"type\": \"int\"}" + + "]}", + ) + private val ALL_TYPES_ALT_SCHEMA = Schema.Parser().parse( + "{\"type\":\"record\",\"name\":\"R\",\"fields\":[" + + "{\"name\": \"e\", \"type\": {\"type\": \"enum\", \"name\": \"Enum\", \"symbols\": [\"A\", \"B\"]}, \"default\": \"A\"}," + + "{\"name\": \"i\", \"type\": \"long\"}," + + "{\"name\": \"l\", \"type\": \"double\"}," + + "{\"name\": \"d\", \"type\": \"float\"}," + + "{\"name\": \"f\", \"type\": \"double\"}," + + "{\"name\": \"sI\", \"type\": \"int\", \"default\": 0}," + + "{\"name\": \"sD\", \"type\": \"double\", \"default\": 0.0}," + + "{\"name\": \"sU\", \"type\": \"string\", \"default\": \"\"}," + + "{\"name\": \"sUi\", \"type\": [\"null\", \"int\"], \"default\":null}," + + "{\"name\": \"sUe\", \"type\": {\"name\": \"SE\", \"type\": \"enum\", \"symbols\": [\"A\"]}, \"default\": \"A\"}," + + "{\"name\": \"uS\", \"type\": [\"null\", \"string\"]}," + + "{\"name\": \"se2\", \"type\": {\"name\": \"SE2\", \"type\": \"enum\", \"symbols\": [\"A\", \"B\"]}, \"default\": \"A\"}," + + "{\"name\": \"se3\", \"type\": {\"name\": \"SE3\", \"type\": \"enum\", \"symbols\": [\"A\", \"B\"]}, \"default\": \"A\"}," + + "{\"name\": \"a\", \"type\": {\"type\":\"array\", \"items\": {\"type\": \"float\"}}}," + + "{\"name\": \"m\", \"type\": {\"type\":\"map\", \"values\": {\"type\": \"float\"}}}," + + "{\"name\": \"fS\", \"type\": \"string\"}," + + "{\"name\": \"bS\", \"type\": \"string\"}," + + "{\"name\": \"fb\", \"type\": \"bytes\"}," + + "{\"name\": \"bf\", \"type\": {\"name\": \"f3\",\"type\":\"fixed\", \"size\": 2}, \"default\": \"aa\"}," + + "{\"name\": \"bfd\", \"type\": {\"name\": \"f4\",\"type\":\"fixed\", \"size\": 2}, \"default\": \"aa\"}" + + "]}", + ) + } +} diff --git a/radar-commons/src/test/java/org/radarbase/producer/rest/AvroDataMapperFactoryTest.java b/radar-commons/src/test/java/org/radarbase/producer/rest/AvroDataMapperFactoryTest.java deleted file mode 100644 index 8d11e655..00000000 --- a/radar-commons/src/test/java/org/radarbase/producer/rest/AvroDataMapperFactoryTest.java +++ /dev/null @@ -1,215 +0,0 @@ -package org.radarbase.producer.rest; - -import static org.junit.Assert.assertEquals; - -import java.io.ByteArrayOutputStream; -import java.io.IOException; -import org.apache.avro.Schema; -import org.apache.avro.SchemaValidationException; -import org.apache.avro.generic.GenericDatumReader; -import org.apache.avro.generic.GenericDatumWriter; -import org.apache.avro.io.DecoderFactory; -import org.apache.avro.io.EncoderFactory; -import org.apache.avro.io.JsonDecoder; -import org.apache.avro.io.JsonEncoder; -import org.junit.Before; -import org.junit.Test; -import org.radarcns.kafka.ObservationKey; - -public class AvroDataMapperFactoryTest { - private static final Schema MEASUREMENT_KEY_SCHEMA = new Schema.Parser().parse("{"+ - " \"namespace\": \"org.radarcns.key\","+ - " \"type\": \"record\","+ - " \"name\": \"MeasurementKey\","+ - " \"doc\": \"Measurement key in the RADAR-base project\","+ - " \"fields\": ["+ - " {\"name\": \"userId\", \"type\": \"string\", \"doc\": \"user ID\"},"+ - " {\"name\": \"sourceId\", \"type\": \"string\", \"doc\": \"device source ID\"}"+ - " ]"+ - "}"); - - private static final Schema INCOMPLETE_MEASUREMENT_KEY_SCHEMA = new Schema.Parser().parse("{"+ - " \"namespace\": \"org.radarcns.key\","+ - " \"type\": \"record\","+ - " \"name\": \"MeasurementKey\","+ - " \"doc\": \"Measurement key in the RADAR-base project\","+ - " \"fields\": ["+ - " {\"name\": \"sourceId\", \"type\": \"string\", \"doc\": \"device source ID\"}"+ - " ]"+ - "}"); - - private static final Schema SMALL_ENUM_SCHEMA = new Schema.Parser().parse( - "{\"type\":\"record\",\"name\":\"E\",\"fields\":[" - + "{\"name\": \"e\", \"type\": {\"type\": \"enum\", \"name\": \"Enum\", \"symbols\": [\"A\", \"B\"]}}" - + "]}"); - - private static final Schema LARGE_ENUM_SCHEMA = new Schema.Parser().parse( - "{\"type\":\"record\",\"name\":\"E\",\"fields\":[" - +"{\"name\": \"e\", \"type\": {\"type\": \"enum\", \"name\": \"Enum\", \"symbols\": [\"A\", \"B\", \"C\"]}}" - + "]}"); - - private static final Schema UNKNOWN_ENUM_SCHEMA = new Schema.Parser().parse( - "{\"type\":\"record\",\"name\":\"E\",\"fields\":[" - + "{\"name\": \"e\", \"type\": {\"type\": \"enum\", \"name\": \"Enum\", \"symbols\": [\"A\", \"B\", \"UNKNOWN\"]}}" - + "]}"); - - private static final Schema DEFAULT_ENUM_SCHEMA = new Schema.Parser().parse( - "{\"type\":\"record\",\"name\":\"E\",\"fields\":[" - + "{\"name\": \"e\", \"type\": {\"type\": \"enum\", \"name\": \"Enum\", \"symbols\": [\"A\"]}, \"default\": \"A\"}" - + "]}"); - - private static final Schema ALL_TYPES_SCHEMA = new Schema.Parser().parse( - "{\"type\":\"record\",\"name\":\"R\",\"fields\":[" - + "{\"name\": \"e\", \"type\": {\"type\": \"enum\", \"name\": \"Enum\", \"symbols\": [\"A\"]}, \"default\": \"A\"}," - + "{\"name\": \"i\", \"type\": \"int\"}," - + "{\"name\": \"l\", \"type\": \"long\"}," - + "{\"name\": \"d\", \"type\": \"double\"}," - + "{\"name\": \"f\", \"type\": \"float\"}," - + "{\"name\": \"sI\", \"type\": \"string\"}," - + "{\"name\": \"sD\", \"type\": \"string\"}," - + "{\"name\": \"sU\", \"type\": [\"null\", \"string\"]}," - + "{\"name\": \"sUi\", \"type\": [\"null\", \"string\"]}," - + "{\"name\": \"sUe\", \"type\": [\"null\", {\"name\": \"SE\", \"type\": \"enum\", \"symbols\": [\"A\"]}]}," - + "{\"name\": \"uS\", \"type\": \"string\"}," - + "{\"name\": \"se2\", \"type\": \"string\"}," - + "{\"name\": \"se3\", \"type\": \"string\"}," - + "{\"name\": \"a\", \"type\": {\"type\":\"array\", \"items\": {\"type\": \"int\"}}}," - + "{\"name\": \"m\", \"type\": {\"type\":\"map\", \"values\": {\"type\": \"int\"}}}," - + "{\"name\": \"fS\", \"type\": {\"name\": \"f1\", \"type\":\"fixed\", \"size\": 2}}," - + "{\"name\": \"bS\", \"type\": \"bytes\"}," - + "{\"name\": \"fb\", \"type\": {\"name\": \"f2\",\"type\": \"fixed\", \"size\": 2}}," - + "{\"name\": \"bf\", \"type\": \"bytes\"}," - + "{\"name\": \"bfd\", \"type\": \"bytes\"}," - + "{\"name\": \"unmapped\", \"type\": \"int\"}" - + "]}"); - private static final Schema ALL_TYPES_ALT_SCHEMA = new Schema.Parser().parse( - "{\"type\":\"record\",\"name\":\"R\",\"fields\":[" - + "{\"name\": \"e\", \"type\": {\"type\": \"enum\", \"name\": \"Enum\", \"symbols\": [\"A\", \"B\"]}, \"default\": \"A\"}," - + "{\"name\": \"i\", \"type\": \"long\"}," - + "{\"name\": \"l\", \"type\": \"double\"}," - + "{\"name\": \"d\", \"type\": \"float\"}," - + "{\"name\": \"f\", \"type\": \"double\"}," - + "{\"name\": \"sI\", \"type\": \"int\", \"default\": 0}," - + "{\"name\": \"sD\", \"type\": \"double\", \"default\": 0.0}," - + "{\"name\": \"sU\", \"type\": \"string\", \"default\": \"\"}," - + "{\"name\": \"sUi\", \"type\": [\"null\", \"int\"], \"default\":null}," - + "{\"name\": \"sUe\", \"type\": {\"name\": \"SE\", \"type\": \"enum\", \"symbols\": [\"A\"]}, \"default\": \"A\"}," - + "{\"name\": \"uS\", \"type\": [\"null\", \"string\"]}," - + "{\"name\": \"se2\", \"type\": {\"name\": \"SE2\", \"type\": \"enum\", \"symbols\": [\"A\", \"B\"]}, \"default\": \"A\"}," - + "{\"name\": \"se3\", \"type\": {\"name\": \"SE3\", \"type\": \"enum\", \"symbols\": [\"A\", \"B\"]}, \"default\": \"A\"}," - + "{\"name\": \"a\", \"type\": {\"type\":\"array\", \"items\": {\"type\": \"float\"}}}," - + "{\"name\": \"m\", \"type\": {\"type\":\"map\", \"values\": {\"type\": \"float\"}}}," - + "{\"name\": \"fS\", \"type\": \"string\"}," - + "{\"name\": \"bS\", \"type\": \"string\"}," - + "{\"name\": \"fb\", \"type\": \"bytes\"}," - + "{\"name\": \"bf\", \"type\": {\"name\": \"f3\",\"type\":\"fixed\", \"size\": 2}, \"default\": \"aa\"}," - + "{\"name\": \"bfd\", \"type\": {\"name\": \"f4\",\"type\":\"fixed\", \"size\": 2}, \"default\": \"aa\"}" - + "]}"); - private AvroDataMapperFactory factory; - - @Before - public void setUp() { - this.factory = new AvroDataMapperFactory(); - } - - @Test - public void mapRecord() throws SchemaValidationException, IOException { - String actual = doMap(MEASUREMENT_KEY_SCHEMA, ObservationKey.getClassSchema(), - "{\"userId\":\"u\", \"sourceId\": \"s\"}"); - assertEquals("{\"projectId\":null,\"userId\":\"u\",\"sourceId\":\"s\"}", actual); - } - - @Test(expected = SchemaValidationException.class) - public void mapRecordIncomplete() throws SchemaValidationException { - factory.createMapper(INCOMPLETE_MEASUREMENT_KEY_SCHEMA, ObservationKey.getClassSchema(), null); - } - - @Test - public void mapEnumLarger() throws SchemaValidationException, IOException { - String actual = doMap(SMALL_ENUM_SCHEMA, LARGE_ENUM_SCHEMA, "{\"e\":\"A\"}"); - assertEquals("{\"e\":\"A\"}", actual); - } - - @Test(expected = SchemaValidationException.class) - public void mapEnumSmaller() throws SchemaValidationException { - factory.createMapper(LARGE_ENUM_SCHEMA, SMALL_ENUM_SCHEMA, null); - } - - @Test - public void mapEnumSmallerUnknown() throws SchemaValidationException, IOException { - String actual = doMap(LARGE_ENUM_SCHEMA, UNKNOWN_ENUM_SCHEMA, "{\"e\":\"C\"}"); - assertEquals("{\"e\":\"UNKNOWN\"}", actual); - } - - - @Test - public void mapEnumSmallerDefault() throws SchemaValidationException, IOException { - String actual = doMap(LARGE_ENUM_SCHEMA, DEFAULT_ENUM_SCHEMA, "{\"e\":\"C\"}"); - assertEquals("{\"e\":\"A\"}", actual); - } - - @Test - public void mapAll() throws SchemaValidationException, IOException { - String actual = doMap(ALL_TYPES_SCHEMA, ALL_TYPES_ALT_SCHEMA, "{" + - "\"e\":\"A\"," + - "\"i\":1," + - "\"l\":2," + - "\"d\":3.0," + - "\"f\":4.0," + - "\"sI\":\"5\"," + - "\"sD\":\"6.5\"," + - "\"sU\":null," + - "\"sUi\":{\"string\":\"7\"}," + - "\"sUe\":null," + - "\"uS\":\"s\"," + - "\"se2\":\"B\"," + - "\"se3\":\"g\"," + - "\"a\":[1,2]," + - "\"m\":{\"a\":9}," + - "\"fS\":\"ab\"," + - "\"bS\":\"ab\"," + - "\"fb\":\"ab\"," + - "\"bf\":\"ab\"," + - "\"bfd\":\"abc\"," + - "\"unmapped\":10}"); - - assertEquals("{" + - "\"e\":\"A\"," + - "\"i\":1," + - "\"l\":2.0," + - "\"d\":3.0," + - "\"f\":4.0," + - "\"sI\":5," + - "\"sD\":6.5," + - "\"sU\":\"\"," + - "\"sUi\":{\"int\":7}," + - "\"sUe\":\"A\"," + - "\"uS\":{\"string\":\"s\"}," + - "\"se2\":\"B\"," + - "\"se3\":\"A\"," + - "\"a\":[1.0,2.0]," + - "\"m\":{\"a\":9.0}," + - "\"fS\":\"YWI=\"," + - "\"bS\":\"YWI=\"," + - "\"fb\":\"ab\"," + - "\"bf\":\"ab\"," + - "\"bfd\":\"aa\"" + - "}", actual); - } - - private String doMap(Schema from, Schema to, String value) - throws IOException, SchemaValidationException { - AvroDataMapper mapper = factory.createMapper(from, to, null); - - GenericDatumReader reader = new GenericDatumReader<>(from); - JsonDecoder decoder = DecoderFactory.get().jsonDecoder(from, value); - Object readValue = reader.read(null, decoder); - - GenericDatumWriter writer = new GenericDatumWriter<>(to); - ByteArrayOutputStream out = new ByteArrayOutputStream(); - JsonEncoder encoder = EncoderFactory.get().jsonEncoder(to, out); - writer.write(mapper.convertAvro(readValue), encoder); - encoder.flush(); - return out.toString("utf-8"); - } -} diff --git a/radar-commons/src/test/java/org/radarbase/producer/rest/BinaryRecordContentTest.kt b/radar-commons/src/test/java/org/radarbase/producer/rest/BinaryRecordContentTest.kt new file mode 100644 index 00000000..f932a3cc --- /dev/null +++ b/radar-commons/src/test/java/org/radarbase/producer/rest/BinaryRecordContentTest.kt @@ -0,0 +1,176 @@ +package org.radarbase.producer.rest + +import io.ktor.http.content.* +import io.ktor.util.* +import io.ktor.utils.io.* +import kotlinx.coroutines.ExperimentalCoroutinesApi +import kotlinx.coroutines.launch +import kotlinx.coroutines.test.runTest +import org.apache.avro.SchemaValidationException +import org.apache.avro.io.BinaryEncoder +import org.apache.avro.io.EncoderFactory +import org.apache.avro.specific.SpecificDatumWriter +import org.junit.jupiter.api.Assertions.assertArrayEquals +import org.junit.jupiter.api.Test +import org.radarbase.data.AvroRecordData +import org.radarbase.producer.schema.ParsedSchemaMetadata +import org.radarbase.topic.AvroTopic +import org.radarcns.kafka.ObservationKey +import org.radarcns.kafka.RecordSet +import org.radarcns.passive.empatica.EmpaticaE4BloodVolumePulse +import org.radarcns.passive.phone.PhoneAcceleration +import java.io.BufferedReader +import java.io.ByteArrayOutputStream +import java.io.IOException +import java.io.InputStreamReader +import java.nio.ByteBuffer +import java.util.zip.GZIPOutputStream + +@OptIn(ExperimentalCoroutinesApi::class) +class BinaryRecordContentTest { + @Test + @Throws(SchemaValidationException::class, IOException::class) + fun writeToStream() = runTest { + val k = ObservationKey("test", "a", "b") + val v = EmpaticaE4BloodVolumePulse( + 0.0, + 0.0, + 0.0f, + ) + val t = AvroTopic( + "t", + k.schema, + v.schema, + k.javaClass, + v.javaClass, + ) + val request = BinaryRecordContent( + AvroRecordData(t, k, listOf(v)), + ParsedSchemaMetadata(2, 1, k.schema), + ParsedSchemaMetadata(4, 2, v.schema), + ) + + val channel = ByteChannel() + launch { + val content = request.createContent() as OutgoingContent.WriteChannelContent + content.writeTo(channel) + channel.close() + } + assertArrayEquals(EXPECTED, channel.toByteArray()) + } + + @Test + @Throws(IOException::class) + fun expectedMatchesRecordSet() { + val recordSet = RecordSet.newBuilder() + .setKeySchemaVersion(1) + .setValueSchemaVersion(2) + .setData( + listOf( + ByteBuffer.wrap( + byteArrayOf( + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + ), + ), + ), + ) + .setProjectId(null) + .setUserId(null) + .setSourceId("b") + .build() + val writer = SpecificDatumWriter(RecordSet.`SCHEMA$`) + val out = ByteArrayOutputStream() + val encoder = EncoderFactory.get().binaryEncoder(out, null) + writer.write(recordSet, encoder) + encoder.flush() + assertArrayEquals(EXPECTED, out.toByteArray()) + } + + @Test + @Throws(IOException::class) + fun testSize() { + val writer = SpecificDatumWriter(PhoneAcceleration.`SCHEMA$`) + val records: MutableList = ArrayList(540) + requireNotNull(BinaryRecordContentTest::class.java.getResourceAsStream("android_phone_acceleration.csv")) + .use { stream -> + InputStreamReader(stream).use { reader -> + BufferedReader(reader).use { br -> + var line = br.readLine() + var encoder: BinaryEncoder? = null + while (line != null) { + val values = line.split(",".toRegex()).dropLastWhile { it.isEmpty() } + .toTypedArray() + val acc = PhoneAcceleration( + values[0].toDouble(), + values[1].toDouble(), + values[2].toFloat(), + values[3].toFloat(), + values[4].toFloat(), + ) + val out = ByteArrayOutputStream() + encoder = EncoderFactory.get().binaryEncoder(out, encoder) + writer.write(acc, encoder) + encoder.flush() + records.add(ByteBuffer.wrap(out.toByteArray())) + line = br.readLine() + } + } + } + } + val recordSet = RecordSet.newBuilder() + .setKeySchemaVersion(1) + .setValueSchemaVersion(2) + .setData(records) + .setProjectId(null) + .setUserId(null) + .setSourceId("596740ca-5875-4c97-87ab-a08405f36aff") + .build() + val recordWriter = SpecificDatumWriter(RecordSet.`SCHEMA$`) + val out = ByteArrayOutputStream() + val encoder = EncoderFactory.get().binaryEncoder(out, null) + recordWriter.write(recordSet, encoder) + encoder.flush() + println("Size of record set with " + records.size + " entries: " + out.size()) + val gzippedOut = ByteArrayOutputStream() + val gzipOut = GZIPOutputStream(gzippedOut) + gzipOut.write(out.size()) + gzipOut.close() + println("Gzipped size of record set with " + records.size + " entries: " + gzippedOut.size()) + } + + companion object { + // note that positive numbers are multiplied by two in avro binary encoding, due to the + // zig-zag encoding schema used. + // See http://avro.apache.org/docs/1.8.1/spec.html#binary_encoding + private val EXPECTED = byteArrayOf( + 2, // key version x2 + 4, // value version x2 + 0, // null project ID + 0, // null user ID + 2, 'b'.code.toByte(), // string length x2, sourceId + 2, // number of records x2 + 40, // number of bytes in the first value x2 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // value + 0, // end of array + ) + } +} diff --git a/radar-commons/src/test/java/org/radarbase/producer/rest/BinaryRecordRequestTest.java b/radar-commons/src/test/java/org/radarbase/producer/rest/BinaryRecordRequestTest.java deleted file mode 100644 index 36e888b8..00000000 --- a/radar-commons/src/test/java/org/radarbase/producer/rest/BinaryRecordRequestTest.java +++ /dev/null @@ -1,139 +0,0 @@ -package org.radarbase.producer.rest; - -import static org.junit.Assert.assertArrayEquals; - -import java.io.BufferedReader; -import java.io.ByteArrayOutputStream; -import java.io.IOException; -import java.io.InputStream; -import java.io.InputStreamReader; -import java.nio.ByteBuffer; -import java.util.ArrayList; -import java.util.Collections; -import java.util.List; -import java.util.zip.GZIPOutputStream; -import okio.Buffer; -import org.apache.avro.SchemaValidationException; -import org.apache.avro.io.BinaryEncoder; -import org.apache.avro.io.EncoderFactory; -import org.apache.avro.specific.SpecificDatumWriter; -import org.junit.Test; -import org.radarbase.data.AvroRecordData; -import org.radarbase.topic.AvroTopic; -import org.radarcns.kafka.ObservationKey; -import org.radarcns.kafka.RecordSet; -import org.radarcns.passive.empatica.EmpaticaE4BloodVolumePulse; -import org.radarcns.passive.phone.PhoneAcceleration; - -public class BinaryRecordRequestTest { - - // note that positive numbers are multiplied by two in avro binary encoding, due to the - // zig-zag encoding schema used. - // See http://avro.apache.org/docs/1.8.1/spec.html#binary_encoding - private static final byte[] EXPECTED = { - 2, // key version x2 - 4, // value version x2 - 0, // null project ID - 0, // null user ID - 2, (byte)'b', // string length x2, sourceId - 2, // number of records x2 - 40, // number of bytes in the first value x2 - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // value - 0 // end of array - }; - - @Test - public void writeToStream() throws SchemaValidationException, IOException { - ObservationKey k = new ObservationKey("test", "a", "b"); - EmpaticaE4BloodVolumePulse v = new EmpaticaE4BloodVolumePulse(0.0, 0.0, - 0.0f); - - AvroTopic t = new AvroTopic<>( - "t", k.getSchema(), v.getSchema(), k.getClass(), v.getClass()); - - BinaryRecordRequest request = new BinaryRecordRequest<>(t); - request.prepare( - new ParsedSchemaMetadata(2, 1, k.getSchema()), - new ParsedSchemaMetadata(4, 2, v.getSchema()), - new AvroRecordData<>(t, k, Collections.singletonList(v))); - - - Buffer buffer = new Buffer(); - request.writeToSink(buffer); - assertArrayEquals(EXPECTED, buffer.readByteArray()); - } - - @Test - public void expectedMatchesRecordSet() throws IOException { - RecordSet recordSet = RecordSet.newBuilder() - .setKeySchemaVersion(1) - .setValueSchemaVersion(2) - .setData(Collections.singletonList(ByteBuffer.wrap(new byte[] {0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0}))) - .setProjectId(null) - .setUserId(null) - .setSourceId("b") - .build(); - - SpecificDatumWriter writer = new SpecificDatumWriter<>(RecordSet.SCHEMA$); - ByteArrayOutputStream out = new ByteArrayOutputStream(); - BinaryEncoder encoder = EncoderFactory.get().binaryEncoder(out, null); - writer.write(recordSet, encoder); - encoder.flush(); - - assertArrayEquals(EXPECTED, out.toByteArray()); - } - - @Test - public void testSize() throws IOException { - - - SpecificDatumWriter writer = new SpecificDatumWriter<>(PhoneAcceleration.SCHEMA$); - - List records = new ArrayList<>(540); - try (InputStream stream = BinaryRecordRequestTest.class.getResourceAsStream("android_phone_acceleration.csv"); - InputStreamReader reader = new InputStreamReader(stream); - BufferedReader br = new BufferedReader(reader)) { - - String line = br.readLine(); - BinaryEncoder encoder = null; - - while (line != null) { - String[] values = line.split(","); - PhoneAcceleration acc = new PhoneAcceleration(Double.parseDouble(values[0]), - Double.parseDouble(values[1]), Float.parseFloat(values[2]), - Float.parseFloat(values[3]), Float.parseFloat(values[4])); - ByteArrayOutputStream out = new ByteArrayOutputStream(); - encoder = EncoderFactory.get().binaryEncoder(out, encoder); - writer.write(acc, encoder); - encoder.flush(); - records.add(ByteBuffer.wrap(out.toByteArray())); - - line = br.readLine(); - } - } - - RecordSet recordSet = RecordSet.newBuilder() - .setKeySchemaVersion(1) - .setValueSchemaVersion(2) - .setData(records) - .setProjectId(null) - .setUserId(null) - .setSourceId("596740ca-5875-4c97-87ab-a08405f36aff") - .build(); - - SpecificDatumWriter recordWriter = new SpecificDatumWriter<>(RecordSet.SCHEMA$); - ByteArrayOutputStream out = new ByteArrayOutputStream(); - BinaryEncoder encoder = EncoderFactory.get().binaryEncoder(out, null); - recordWriter.write(recordSet, encoder); - encoder.flush(); - - System.out.println("Size of record set with " + records.size() + " entries: " + out.size()); - - ByteArrayOutputStream gzippedOut = new ByteArrayOutputStream(); - GZIPOutputStream gzipOut = new GZIPOutputStream(gzippedOut); - gzipOut.write(out.size()); - gzipOut.close(); - - System.out.println("Gzipped size of record set with " + records.size() + " entries: " + gzippedOut.size()); - } -} diff --git a/radar-commons/src/test/java/org/radarbase/producer/rest/ConnectionStateTest.java b/radar-commons/src/test/java/org/radarbase/producer/rest/ConnectionStateTest.java deleted file mode 100644 index 2dd6f271..00000000 --- a/radar-commons/src/test/java/org/radarbase/producer/rest/ConnectionStateTest.java +++ /dev/null @@ -1,50 +0,0 @@ -/* - * Copyright 2017 The Hyve and King's College London - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.radarbase.producer.rest; - -import static org.junit.Assert.assertEquals; -import static org.radarbase.producer.rest.ConnectionState.State.CONNECTED; -import static org.radarbase.producer.rest.ConnectionState.State.DISCONNECTED; -import static org.radarbase.producer.rest.ConnectionState.State.UNKNOWN; - -import java.util.concurrent.TimeUnit; -import org.junit.Test; - -public class ConnectionStateTest { - @Test - public void getState() throws Exception { - ConnectionState state = new ConnectionState(10, TimeUnit.MILLISECONDS); - assertEquals(UNKNOWN, state.getState()); - state.didConnect(); - assertEquals(CONNECTED, state.getState()); - state.didDisconnect(); - assertEquals(DISCONNECTED, state.getState()); - Thread.sleep(15); - assertEquals(DISCONNECTED, state.getState()); - state.didConnect(); - assertEquals(CONNECTED, state.getState()); - Thread.sleep(10); - assertEquals(UNKNOWN, state.getState()); - state.setTimeout(25, TimeUnit.MILLISECONDS); - state.didConnect(); - assertEquals(CONNECTED, state.getState()); - Thread.sleep(10); - assertEquals(CONNECTED, state.getState()); - Thread.sleep(15); - assertEquals(UNKNOWN, state.getState()); - } -} diff --git a/radar-commons/src/test/java/org/radarbase/producer/rest/ConnectionStateTest.kt b/radar-commons/src/test/java/org/radarbase/producer/rest/ConnectionStateTest.kt new file mode 100644 index 00000000..e5b75f4e --- /dev/null +++ b/radar-commons/src/test/java/org/radarbase/producer/rest/ConnectionStateTest.kt @@ -0,0 +1,57 @@ +/* + * Copyright 2017 The Hyve and King's College London + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.radarbase.producer.rest + +import kotlinx.coroutines.cancel +import kotlinx.coroutines.delay +import kotlinx.coroutines.flow.first +import kotlinx.coroutines.runBlocking +import org.junit.jupiter.api.Assertions.assertEquals +import org.junit.jupiter.api.Test +import org.junit.jupiter.api.Timeout +import java.util.concurrent.TimeUnit +import kotlin.time.Duration.Companion.milliseconds + +class ConnectionStateTest { + @Test + @Timeout(1, unit = TimeUnit.SECONDS) + fun testState() = runBlocking { + var state = ConnectionState(10.milliseconds) + state.assertEqualTo(ConnectionState.State.UNKNOWN) + state.didConnect() + state.assertEqualTo(ConnectionState.State.CONNECTED) + state.didDisconnect() + state.assertEqualTo(ConnectionState.State.DISCONNECTED) + delay(15.milliseconds) + state.assertEqualTo(ConnectionState.State.DISCONNECTED) + state.didConnect() + delay(15.milliseconds) + state.assertEqualTo(ConnectionState.State.UNKNOWN) + state.scope.cancel() + state = ConnectionState(25.milliseconds) + state.didConnect() + state.assertEqualTo(ConnectionState.State.CONNECTED) + delay(10.milliseconds) + state.assertEqualTo(ConnectionState.State.CONNECTED) + delay(20.milliseconds) + state.assertEqualTo(ConnectionState.State.UNKNOWN) + state.scope.cancel() + } + + private suspend inline fun ConnectionState.assertEqualTo(expected: ConnectionState.State) { + assertEquals(expected, state.first()) + } +} diff --git a/radar-commons/src/test/java/org/radarbase/producer/rest/RestClientTest.java b/radar-commons/src/test/java/org/radarbase/producer/rest/RestClientTest.java deleted file mode 100644 index 0c983019..00000000 --- a/radar-commons/src/test/java/org/radarbase/producer/rest/RestClientTest.java +++ /dev/null @@ -1,105 +0,0 @@ -/* - * Copyright 2017 The Hyve and King's College London - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.radarbase.producer.rest; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; - -import java.net.URL; -import java.util.concurrent.TimeUnit; -import okhttp3.HttpUrl; -import okhttp3.Request; -import okhttp3.Response; -import okhttp3.mockwebserver.MockResponse; -import okhttp3.mockwebserver.MockWebServer; -import okhttp3.mockwebserver.RecordedRequest; -import org.junit.Before; -import org.junit.Test; -import org.radarbase.config.ServerConfig; - -public class RestClientTest { - private MockWebServer server; - private ServerConfig config; - private RestClient client; - - @Before - public void setUp() { - server = new MockWebServer(); - config = new ServerConfig(server.url("base").url()); - client = RestClient.newClient() - .server(config) - .timeout(1, TimeUnit.SECONDS) - .build(); - } - - @Test - public void request() throws Exception { - server.enqueue(new MockResponse().setBody("{\"id\":10,\"version\":2,\"schema\":\"\\\"string\\\"\"}")); - Request request = client.requestBuilder("myPath").build(); - try (Response response = client.request(request)) { - assertTrue(response.isSuccessful()); - assertEquals("{\"id\":10,\"version\":2,\"schema\":\"\\\"string\\\"\"}", response.body().string()); - } - RecordedRequest recordedRequest = server.takeRequest(); - assertEquals("GET", recordedRequest.getMethod()); - assertEquals("/base/myPath", recordedRequest.getPath()); - } - - @Test - public void requestStringPath() throws Exception { - server.enqueue(new MockResponse().setBody("{\"id\":10,\"version\":2,\"schema\":\"\\\"string\\\"\"}")); - try (Response response = client.request("myPath")) { - assertTrue(response.isSuccessful()); - assertEquals("{\"id\":10,\"version\":2,\"schema\":\"\\\"string\\\"\"}", response.body().string()); - } - RecordedRequest recordedRequest = server.takeRequest(); - assertEquals("GET", recordedRequest.getMethod()); - assertEquals("/base/myPath", recordedRequest.getPath()); - } - - @Test - public void requestString() throws Exception { - server.enqueue(new MockResponse().setBody("{\"id\":10,\"version\":2,\"schema\":\"\\\"string\\\"\"}")); - String response = client.requestString(client.requestBuilder("myPath").build()); - assertEquals("{\"id\":10,\"version\":2,\"schema\":\"\\\"string\\\"\"}", response); - RecordedRequest recordedRequest = server.takeRequest(); - assertEquals("GET", recordedRequest.getMethod()); - assertEquals("/base/myPath", recordedRequest.getPath()); - } - - @Test(expected = RestException.class) - public void requestStringEmpty() throws Exception { - server.enqueue(new MockResponse().setResponseCode(500)); - client.requestString(client.requestBuilder("myPath").build()); - } - - @Test - public void requestBuilder() throws Exception { - Request.Builder builder = client.requestBuilder("myPath"); - Request request = builder.build(); - assertEquals(request.url(), HttpUrl.get(new URL(config.getUrl(), "myPath"))); - } - - @Test - public void getRelativeUrl() throws Exception { - HttpUrl url = client.getRelativeUrl("myPath"); - assertEquals(server.getHostName(), url.host()); - assertEquals(server.getPort(), url.port()); - assertEquals("http", url.scheme()); - assertEquals("/base/myPath", url.encodedPath()); - } -} \ No newline at end of file diff --git a/radar-commons/src/test/java/org/radarbase/producer/rest/RestKafkaSenderTest.kt b/radar-commons/src/test/java/org/radarbase/producer/rest/RestKafkaSenderTest.kt new file mode 100644 index 00000000..85418c75 --- /dev/null +++ b/radar-commons/src/test/java/org/radarbase/producer/rest/RestKafkaSenderTest.kt @@ -0,0 +1,339 @@ +/* + * Copyright 2017 The Hyve and King's College London + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.radarbase.producer.rest + +import com.fasterxml.jackson.core.JsonFactory +import com.fasterxml.jackson.databind.JsonNode +import com.fasterxml.jackson.databind.ObjectMapper +import com.fasterxml.jackson.databind.node.JsonNodeType +import io.ktor.util.* +import kotlinx.coroutines.ExperimentalCoroutinesApi +import kotlinx.coroutines.test.runTest +import okhttp3.mockwebserver.MockResponse +import okhttp3.mockwebserver.MockWebServer +import org.apache.avro.SchemaValidationException +import org.apache.avro.io.DecoderFactory +import org.apache.avro.specific.SpecificDatumReader +import org.junit.jupiter.api.AfterEach +import org.junit.jupiter.api.Assertions.* +import org.junit.jupiter.api.BeforeEach +import org.junit.jupiter.api.Test +import org.mockito.kotlin.* +import org.radarbase.data.AvroRecordData +import org.radarbase.producer.AuthenticationException +import org.radarbase.producer.rest.RestKafkaSender.Companion.restKafkaSender +import org.radarbase.producer.schema.ParsedSchemaMetadata +import org.radarbase.producer.schema.SchemaRetriever +import org.radarbase.topic.AvroTopic +import org.radarcns.kafka.ObservationKey +import org.radarcns.kafka.RecordSet +import org.radarcns.passive.phone.PhoneLight +import org.slf4j.LoggerFactory +import java.io.IOException +import java.nio.charset.StandardCharsets +import java.util.* +import java.util.zip.GZIPInputStream + +@OptIn(ExperimentalCoroutinesApi::class) +class RestKafkaSenderTest { + private lateinit var retriever: SchemaRetriever + private lateinit var sender: RestKafkaSender + private lateinit var webServer: MockWebServer + + @BeforeEach + fun setUp() { + webServer = MockWebServer().apply { + start() + } + retriever = mock() + sender = restKafkaSender { + baseUrl = webServer.url("/").toUrl().toExternalForm() + httpClient() + schemaRetriever = retriever + } + } + + @AfterEach + fun tearDown() { + webServer.close() + } + + @Test + @Throws(Exception::class) + fun sender() = runTest { + sender = sender.config { + scope = this@runTest + with(headers) { + append("Cookie", "ab") + append("Cookie", "bc") + } + } + val keySchema = ObservationKey.getClassSchema() + val valueSchema = PhoneLight.getClassSchema() + val topic = AvroTopic( + "test", + keySchema, + valueSchema, + ObservationKey::class.java, + PhoneLight::class.java, + ) + val topicSender = sender.sender(topic) + val key = ObservationKey("test", "a", "b") + val value = PhoneLight(0.1, 0.2, 0.3f) + val keySchemaMetadata = ParsedSchemaMetadata(10, 2, keySchema) + val valueSchemaMetadata = ParsedSchemaMetadata(10, 2, valueSchema) + retriever.stub { + onBlocking { metadata("test", false, keySchema) }.doReturn(keySchemaMetadata) + onBlocking { metadata("test", true, valueSchema) }.doReturn(valueSchemaMetadata) + } + webServer.enqueueJson("{\"offset\": 100}") + topicSender.send(key, value) + verify(retriever, times(1)) + .metadata("test", false, keySchema) + verify(retriever, times(1)) + .metadata("test", true, valueSchema) + val request = webServer.takeRequest() + assertEquals("/topics/test", request.path) + val body = READER.readTree(request.body.inputStream()) + assertEquals(10, body["key_schema_id"].asInt().toLong()) + assertEquals(10, body["value_schema_id"].asInt().toLong()) + val records = body["records"] + assertEquals(JsonNodeType.ARRAY, records.nodeType) + assertEquals(1, records.size().toLong()) + checkChildren(records) + val receivedHeaders = request.headers + assertEquals(listOf("ab,bc"), receivedHeaders.values("Cookie")) + } + + @Test + @Throws(Exception::class) + fun sendBinary() = runTest { + sender = sender.config { + scope = this@runTest + contentType = RestKafkaSender.KAFKA_REST_BINARY_ENCODING + } + val keySchema = ObservationKey.getClassSchema() + val valueSchema = PhoneLight.getClassSchema() + val topic = AvroTopic( + "test", + keySchema, + valueSchema, + ObservationKey::class.java, + PhoneLight::class.java, + ) + val topicSender = sender.sender(topic) + val key = ObservationKey("test", "a", "b") + val value = PhoneLight(0.1, 0.2, 0.3f) + val keySchemaMetadata = ParsedSchemaMetadata(10, 2, keySchema) + val valueSchemaMetadata = ParsedSchemaMetadata(10, 2, valueSchema) + retriever.stub { + onBlocking { metadata("test", false, keySchema) }.doReturn(keySchemaMetadata) + onBlocking { metadata("test", true, valueSchema) }.doReturn(valueSchemaMetadata) + } + webServer.enqueueJson("{\"offset\": 100}") + topicSender.send(key, value) + verify(retriever, times(1)) + .metadata("test", false, keySchema) + verify(retriever, times(1)) + .metadata("test", true, valueSchema) + val request = webServer.takeRequest() + assertEquals("/topics/test", request.path) + var decoder = DecoderFactory.get().directBinaryDecoder(request.body.inputStream(), null) + val recordSetDatumReader = SpecificDatumReader(RecordSet.getClassSchema()) + val recordSet = recordSetDatumReader.read(null, decoder) + assertNull(recordSet.userId) + assertEquals("b", recordSet.sourceId) + assertEquals(2, recordSet.keySchemaVersion) + assertEquals(2, recordSet.valueSchemaVersion) + assertEquals(1, recordSet.data.size) + decoder = DecoderFactory.get().directBinaryDecoder(recordSet.data[0].moveToByteArray().inputStream(), decoder) + val phoneLightDatumReader = SpecificDatumReader(PhoneLight.getClassSchema()) + val decodedValue = phoneLightDatumReader.read(null, decoder) + assertEquals(value, decodedValue) + } + + @Test + @Throws(Exception::class) + fun sendTwo() = runTest { + sender = sender.config { + scope = this@runTest + } + val keySchema = ObservationKey.getClassSchema() + val valueSchema = PhoneLight.getClassSchema() + val topic = AvroTopic( + "test", + keySchema, + valueSchema, + ObservationKey::class.java, + PhoneLight::class.java, + ) + val topicSender = sender.sender(topic) + val key = ObservationKey("test", "a", "b") + val value = PhoneLight(0.1, 0.2, 0.3f) + val keySchemaMetadata = ParsedSchemaMetadata(10, 2, keySchema) + val valueSchemaMetadata = ParsedSchemaMetadata(10, 2, valueSchema) + + retriever.stub { + onBlocking { metadata("test", false, keySchema) }.doReturn(keySchemaMetadata) + onBlocking { metadata("test", true, valueSchema) }.doReturn(valueSchemaMetadata) + } + webServer.enqueueJson("{\"offset\": 100}") + topicSender.send(AvroRecordData(topic, key, listOf(value, value))) + verify(retriever, times(1)) + .metadata("test", false, keySchema) + verify(retriever, times(1)) + .metadata("test", true, valueSchema) + val request = webServer.takeRequest() + assertEquals("/topics/test", request.path) + val bodyString = request.body.readString(StandardCharsets.UTF_8) + logger.info("Reading: {}", bodyString) + val body = READER.readTree(bodyString) + assertEquals(10, body["key_schema_id"].asInt().toLong()) + assertEquals(10, body["value_schema_id"].asInt().toLong()) + val records = body["records"] + assertEquals(JsonNodeType.ARRAY, records.nodeType) + assertEquals(2, records.size().toLong()) + checkChildren(records) + } + + @Test + @Throws(Exception::class) + fun resetConnection() = runTest { + sender = sender.config { + scope = this@runTest + } + var nRequests = 0 + webServer.enqueue(MockResponse().setResponseCode(500)) + assertFalse(sender.resetConnection()) + assertEquals(++nRequests, webServer.requestCount) + var request = webServer.takeRequest() + assertEquals("/", request.path) + assertEquals("HEAD", request.method) + webServer.enqueue(MockResponse()) + assertEquals(nRequests, webServer.requestCount) + assertTrue(sender.resetConnection()) + assertEquals(++nRequests, webServer.requestCount) + request = webServer.takeRequest() + assertEquals("/", request.path) + assertEquals("HEAD", request.method) + } + + @Test + @Throws(Exception::class) + fun resetConnectionUnauthorized() = runTest { + sender = sender.config { + scope = this@runTest + } + webServer.enqueue(MockResponse().setResponseCode(401)) + webServer.enqueue(MockResponse().setResponseCode(401)) + try { + sender.resetConnection() + fail("Authentication exception expected") + } catch (ex: AuthenticationException) { + assertEquals(1, webServer.requestCount.toLong()) + // success + } + try { + sender.resetConnection() + fail("Authentication exception expected") + } catch (ex: AuthenticationException) { + assertEquals(2, webServer.requestCount.toLong()) + // success + } + webServer.enqueue(MockResponse().setResponseCode(200)) + try { + assertTrue(sender.resetConnection()) + assertEquals(3, webServer.requestCount.toLong()) + } catch (ex: AuthenticationException) { + fail("Unexpected authentication failure") + } + } + + @Test + @Throws( + IOException::class, + InterruptedException::class, + SchemaValidationException::class, + ) + fun withCompression() = runTest { + sender = sender.config { + contentEncoding = RestKafkaSender.GZIP_CONTENT_ENCODING + } + webServer.enqueueJson("{\"offset\": 100}") + val keySchema = ObservationKey.getClassSchema() + val valueSchema = PhoneLight.getClassSchema() + val topic = AvroTopic( + "test", + keySchema, + valueSchema, + ObservationKey::class.java, + PhoneLight::class.java, + ) + val topicSender = sender.sender(topic) + val key = ObservationKey("test", "a", "b") + val value = PhoneLight(0.1, 0.2, 0.3f) + val keySchemaMetadata = ParsedSchemaMetadata(10, 2, keySchema) + val valueSchemaMetadata = ParsedSchemaMetadata(10, 2, valueSchema) + retriever.stub { + onBlocking { metadata("test", false, keySchema) }.doReturn(keySchemaMetadata) + onBlocking { metadata("test", true, valueSchema) }.doReturn(valueSchemaMetadata) + } + topicSender.send(key, value) + + val request = webServer.takeRequest() + assertEquals("gzip", request.getHeader("Content-Encoding")) + request.body.inputStream().use { `in` -> + GZIPInputStream(`in`).use { gzipIn -> + val body = READER.readTree(gzipIn) + assertEquals(10, body["key_schema_id"].asInt().toLong()) + assertEquals(10, body["value_schema_id"].asInt().toLong()) + val records = body["records"] + assertEquals(JsonNodeType.ARRAY, records.nodeType) + assertEquals(1, records.size().toLong()) + checkChildren(records) + } + } + } + + companion object { + private val logger = LoggerFactory.getLogger(RestKafkaSenderTest::class.java) + private val FACTORY = JsonFactory() + private val READER = ObjectMapper(FACTORY).reader() + private fun checkChildren(records: JsonNode) { + for (child in records) { + val jsonKey = child["key"] + assertEquals(JsonNodeType.OBJECT, jsonKey.nodeType) + assertEquals("a", jsonKey["userId"].asText()) + assertEquals("b", jsonKey["sourceId"].asText()) + val jsonValue = child["value"] + assertEquals(JsonNodeType.OBJECT, jsonValue.nodeType) + assertEquals(0.1, jsonValue["time"].asDouble(), 0.0) + assertEquals(0.2, jsonValue["timeReceived"].asDouble(), 0.0) + assertEquals(0.3f, jsonValue["light"].asDouble().toFloat(), 0f) + } + } + + fun MockWebServer.enqueueJson( + body: String, + builder: MockResponse.() -> Unit = {}, + ) = enqueue( + MockResponse() + .setBody(body) + .setHeader("Content-Type", "application/json; charset=utf-8") + .apply(builder), + ) + } +} diff --git a/radar-commons/src/test/java/org/radarbase/producer/rest/RestSenderTest.java b/radar-commons/src/test/java/org/radarbase/producer/rest/RestSenderTest.java deleted file mode 100644 index b244f41b..00000000 --- a/radar-commons/src/test/java/org/radarbase/producer/rest/RestSenderTest.java +++ /dev/null @@ -1,284 +0,0 @@ -/* - * Copyright 2017 The Hyve and King's College London - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.radarbase.producer.rest; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.times; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.when; - -import com.fasterxml.jackson.core.JsonFactory; -import com.fasterxml.jackson.databind.JsonNode; -import com.fasterxml.jackson.databind.ObjectMapper; -import com.fasterxml.jackson.databind.ObjectReader; -import com.fasterxml.jackson.databind.node.JsonNodeType; -import java.io.IOException; -import java.io.InputStream; -import java.util.Arrays; -import java.util.zip.GZIPInputStream; -import okhttp3.Headers; -import okhttp3.mockwebserver.MockResponse; -import okhttp3.mockwebserver.MockWebServer; -import okhttp3.mockwebserver.RecordedRequest; -import org.apache.avro.Schema; -import org.apache.avro.SchemaValidationException; -import org.json.JSONException; -import org.junit.Before; -import org.junit.Rule; -import org.junit.Test; -import org.radarbase.config.ServerConfig; -import org.radarbase.data.AvroRecordData; -import org.radarbase.producer.AuthenticationException; -import org.radarbase.producer.KafkaTopicSender; -import org.radarbase.topic.AvroTopic; -import org.radarcns.kafka.ObservationKey; -import org.radarcns.passive.phone.PhoneLight; - -public class RestSenderTest { - private static final JsonFactory FACTORY = new JsonFactory(); - private static final ObjectReader READER = new ObjectMapper(FACTORY).reader(); - private SchemaRetriever retriever; - private RestSender sender; - - @Rule - public MockWebServer webServer = new MockWebServer(); - - @Before - public void setUp() { - this.retriever = mock(SchemaRetriever.class); - - ServerConfig config = new ServerConfig(webServer.url("/").url()); - - RestClient client = RestClient.newClient() - .server(config) - .build(); - - this.sender = new RestSender.Builder() - .httpClient(client) - .schemaRetriever(retriever) - .build(); - } - - @Test - public void sender() throws Exception { - Schema keySchema = ObservationKey.getClassSchema(); - Schema valueSchema = PhoneLight.getClassSchema(); - AvroTopic topic = new AvroTopic<>("test", - keySchema, valueSchema, ObservationKey.class, PhoneLight.class); - Headers headers = new Headers.Builder() - .add("Cookie: ab") - .add("Cookie: bc") - .build(); - sender.setHeaders(headers); - KafkaTopicSender topicSender = sender.sender(topic); - - ObservationKey key = new ObservationKey("test","a", "b"); - PhoneLight value = new PhoneLight(0.1, 0.2, 0.3f); - ParsedSchemaMetadata keySchemaMetadata = new ParsedSchemaMetadata(10, 2, keySchema); - ParsedSchemaMetadata valueSchemaMetadata = new ParsedSchemaMetadata(10, 2, valueSchema); - - when(retriever - .getOrSetSchemaMetadata("test", false, keySchema, -1)) - .thenReturn(keySchemaMetadata); - when(retriever - .getOrSetSchemaMetadata("test", true, valueSchema, -1)) - .thenReturn(valueSchemaMetadata); - - webServer.enqueue(new MockResponse() - .setHeader("Content-Type", "application/json; charset=utf-8") - .setBody("{\"offset\": 100}")); - - topicSender.send(key, value); - - verify(retriever, times(1)) - .getOrSetSchemaMetadata("test", false, keySchema, -1); - verify(retriever, times(1)) - .getOrSetSchemaMetadata("test", true, valueSchema, -1); - - RecordedRequest request = webServer.takeRequest(); - assertEquals("/topics/test", request.getPath()); - JsonNode body = READER.readTree(request.getBody().inputStream()); - assertEquals(10, body.get("key_schema_id").asInt()); - assertEquals(10, body.get("value_schema_id").asInt()); - JsonNode records = body.get("records"); - assertEquals(JsonNodeType.ARRAY, records.getNodeType()); - assertEquals(1, records.size()); - checkChildren(records); - Headers receivedHeaders = request.getHeaders(); - assertEquals(Arrays.asList("ab", "bc"), receivedHeaders.values("Cookie")); - } - - @Test - public void sendTwo() throws Exception { - Schema keySchema = ObservationKey.getClassSchema(); - Schema valueSchema = PhoneLight.getClassSchema(); - AvroTopic topic = new AvroTopic<>("test", - keySchema, valueSchema, ObservationKey.class, PhoneLight.class); - KafkaTopicSender topicSender = sender.sender(topic); - - ObservationKey key = new ObservationKey("test", "a", "b"); - PhoneLight value = new PhoneLight(0.1, 0.2, 0.3f); - ParsedSchemaMetadata keySchemaMetadata = new ParsedSchemaMetadata(10, 2, keySchema); - ParsedSchemaMetadata valueSchemaMetadata = new ParsedSchemaMetadata(10, 2, valueSchema); - - when(retriever - .getOrSetSchemaMetadata("test", false, keySchema, -1)) - .thenReturn(keySchemaMetadata); - when(retriever - .getOrSetSchemaMetadata("test", true, valueSchema, -1)) - .thenReturn(valueSchemaMetadata); - - webServer.enqueue(new MockResponse() - .setHeader("Content-Type", "application/json; charset=utf-8") - .setBody("{\"offset\": 100}")); - - topicSender.send(new AvroRecordData<>(topic, key, Arrays.asList(value, value))); - - verify(retriever, times(1)) - .getOrSetSchemaMetadata("test", false, keySchema, -1); - verify(retriever, times(1)) - .getOrSetSchemaMetadata("test", true, valueSchema, -1); - - RecordedRequest request = webServer.takeRequest(); - assertEquals("/topics/test", request.getPath()); - JsonNode body = READER.readTree(request.getBody().inputStream()); - assertEquals(10, body.get("key_schema_id").asInt()); - assertEquals(10, body.get("value_schema_id").asInt()); - JsonNode records = body.get("records"); - assertEquals(JsonNodeType.ARRAY, records.getNodeType()); - assertEquals(2, records.size()); - checkChildren(records); - } - - @Test - public void resetConnection() throws Exception { - int n_requests = 0; - - webServer.enqueue(new MockResponse().setResponseCode(500)); - assertFalse(sender.isConnected()); - assertEquals(++n_requests, webServer.getRequestCount()); - RecordedRequest request = webServer.takeRequest(); - assertEquals("/", request.getPath()); - assertEquals("HEAD", request.getMethod()); - webServer.enqueue(new MockResponse().setResponseCode(500)); - assertFalse(sender.resetConnection()); - assertEquals(++n_requests, webServer.getRequestCount()); - request = webServer.takeRequest(); - assertEquals("/", request.getPath()); - assertEquals("HEAD", request.getMethod()); - webServer.enqueue(new MockResponse()); - assertFalse(sender.isConnected()); - assertEquals(n_requests, webServer.getRequestCount()); - assertTrue(sender.resetConnection()); - assertEquals(++n_requests, webServer.getRequestCount()); - request = webServer.takeRequest(); - assertEquals("/", request.getPath()); - assertEquals("HEAD", request.getMethod()); - } - - @Test - public void resetConnectionUnauthorized() throws Exception { - webServer.enqueue(new MockResponse().setResponseCode(401)); - try { - sender.isConnected(); - fail("Authentication exception expected"); - } catch (AuthenticationException ex) { - // success - } - try { - sender.isConnected(); - fail("Authentication exception expected"); - } catch (AuthenticationException ex) { - // success - } - webServer.enqueue(new MockResponse().setResponseCode(401)); - try { - sender.resetConnection(); - fail("Authentication exception expected"); - } catch (AuthenticationException ex) { - assertEquals(2, webServer.getRequestCount()); - // success - } - webServer.enqueue(new MockResponse().setResponseCode(200)); - try { - assertTrue(sender.resetConnection()); - } catch (AuthenticationException ex) { - assertEquals(3, webServer.getRequestCount()); - fail("Unexpected authentication failure"); - } - } - - @Test - public void withCompression() - throws IOException, InterruptedException, SchemaValidationException, JSONException { - sender.setCompression(true); - webServer.enqueue(new MockResponse() - .setHeader("Content-Type", "application/json; charset=utf-8") - .setBody("{\"offset\": 100}")); - Schema keySchema = ObservationKey.getClassSchema(); - Schema valueSchema = PhoneLight.getClassSchema(); - AvroTopic topic = new AvroTopic<>("test", - keySchema, valueSchema, ObservationKey.class, PhoneLight.class); - KafkaTopicSender topicSender = sender.sender(topic); - - ObservationKey key = new ObservationKey("test", "a", "b"); - PhoneLight value = new PhoneLight(0.1, 0.2, 0.3f); - ParsedSchemaMetadata keySchemaMetadata = new ParsedSchemaMetadata(10, 2, keySchema); - ParsedSchemaMetadata valueSchemaMetadata = new ParsedSchemaMetadata(10, 2, valueSchema); - - when(retriever - .getOrSetSchemaMetadata("test", false, keySchema, -1)) - .thenReturn(keySchemaMetadata); - when(retriever - .getOrSetSchemaMetadata("test", true, valueSchema, -1)) - .thenReturn(valueSchemaMetadata); - - topicSender.send(key, value); - - RecordedRequest request = webServer.takeRequest(); - assertEquals("gzip", request.getHeader("Content-Encoding")); - - try (InputStream in = request.getBody().inputStream(); - GZIPInputStream gzipIn = new GZIPInputStream(in)) { - JsonNode body = READER.readTree(gzipIn); - assertEquals(10, body.get("key_schema_id").asInt()); - assertEquals(10, body.get("value_schema_id").asInt()); - JsonNode records = body.get("records"); - assertEquals(JsonNodeType.ARRAY, records.getNodeType()); - assertEquals(1, records.size()); - checkChildren(records); - } - } - - private static void checkChildren(JsonNode records) { - for (JsonNode child : records) { - JsonNode jsonKey = child.get("key"); - assertEquals(JsonNodeType.OBJECT, jsonKey.getNodeType()); - assertEquals("a", jsonKey.get("userId").asText()); - assertEquals("b", jsonKey.get("sourceId").asText()); - JsonNode jsonValue = child.get("value"); - assertEquals(JsonNodeType.OBJECT, jsonValue.getNodeType()); - assertEquals(0.1, jsonValue.get("time").asDouble(), 0); - assertEquals(0.2, jsonValue.get("timeReceived").asDouble(), 0); - assertEquals(0.3f, (float)jsonValue.get("light").asDouble(), 0); - } - } -} diff --git a/radar-commons/src/test/java/org/radarbase/producer/rest/SchemaRestClientTest.java b/radar-commons/src/test/java/org/radarbase/producer/rest/SchemaRestClientTest.java deleted file mode 100644 index fd10e838..00000000 --- a/radar-commons/src/test/java/org/radarbase/producer/rest/SchemaRestClientTest.java +++ /dev/null @@ -1,80 +0,0 @@ -/* - * Copyright 2017 The Hyve and King's College London - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.radarbase.producer.rest; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertThrows; - -import java.io.IOException; -import java.util.Collections; -import java.util.List; -import java.util.Objects; -import java.util.concurrent.TimeUnit; -import okhttp3.mockwebserver.MockResponse; -import okhttp3.mockwebserver.MockWebServer; -import okhttp3.mockwebserver.RecordedRequest; -import org.apache.avro.Schema; -import org.apache.avro.Schema.Field; -import org.junit.After; -import org.junit.Before; -import org.junit.Test; -import org.radarbase.config.ServerConfig; - -public class SchemaRestClientTest { - private MockWebServer server; - private SchemaRestClient retriever; - - @Before - public void setUp() { - server = new MockWebServer(); - ServerConfig config = new ServerConfig(); - config.setProtocol("http"); - config.setHost(server.getHostName()); - config.setPort(server.getPort()); - config.setPath("base"); - retriever = new SchemaRestClient(RestClient.global() - .server(Objects.requireNonNull(config)) - .timeout(1L, TimeUnit.SECONDS) - .build()); - } - - @After - public void tearDown() throws IOException { - server.close(); - } - - @Test - public void retrieveSchemaMetadata() throws Exception { - server.enqueue(new MockResponse().setBody("{\"id\":10,\"version\":2,\"schema\":\"\\\"string\\\"\"}")); - ParsedSchemaMetadata metadata = retriever.retrieveSchemaMetadata("bla-value", -1); - assertEquals(Integer.valueOf(10), metadata.getId()); - assertEquals(Integer.valueOf(2), metadata.getVersion()); - assertEquals(Schema.create(Schema.Type.STRING), metadata.getSchema()); - assertEquals("/base/subjects/bla-value/versions/latest", server.takeRequest().getPath()); - } - - - @Test - public void retrieveSchemaMetadataVersion() throws Exception { - server.enqueue(new MockResponse().setBody("{\"id\":10,\"version\":2,\"schema\":\"\\\"string\\\"\"}")); - ParsedSchemaMetadata metadata = retriever.retrieveSchemaMetadata("bla-value", 2); - assertEquals(Integer.valueOf(10), metadata.getId()); - assertEquals(Integer.valueOf(2), metadata.getVersion()); - assertEquals(Schema.create(Schema.Type.STRING), metadata.getSchema()); - assertEquals("/base/subjects/bla-value/versions/2", server.takeRequest().getPath()); - } -} diff --git a/radar-commons/src/test/java/org/radarbase/producer/rest/SchemaRetrieverTest.java b/radar-commons/src/test/java/org/radarbase/producer/rest/SchemaRetrieverTest.java deleted file mode 100644 index d78f9211..00000000 --- a/radar-commons/src/test/java/org/radarbase/producer/rest/SchemaRetrieverTest.java +++ /dev/null @@ -1,140 +0,0 @@ -/* - * Copyright 2017 The Hyve and King's College London - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.radarbase.producer.rest; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertThrows; - -import java.io.IOException; -import java.util.Collections; -import java.util.List; -import okhttp3.mockwebserver.MockResponse; -import okhttp3.mockwebserver.MockWebServer; -import okhttp3.mockwebserver.RecordedRequest; -import org.apache.avro.Schema; -import org.apache.avro.Schema.Field; -import org.junit.After; -import org.junit.Before; -import org.junit.Test; -import org.radarbase.config.ServerConfig; - -public class SchemaRetrieverTest { - private MockWebServer server; - private SchemaRetriever retriever; - - @Before - public void setUp() { - server = new MockWebServer(); - ServerConfig config = new ServerConfig(); - config.setProtocol("http"); - config.setHost(server.getHostName()); - config.setPort(server.getPort()); - config.setPath("base"); - retriever = new SchemaRetriever(config, 1L); - } - - @After - public void tearDown() throws IOException { - server.close(); - } - - @Test - public void subject() { - assertEquals("bla-value", SchemaRetriever.subject("bla", true)); - assertEquals("bla-key", SchemaRetriever.subject("bla", false)); - } - - @Test - public void getSchemaMetadata() throws Exception { - server.enqueue(new MockResponse().setBody("{\"id\":10,\"version\":2,\"schema\":\"\\\"string\\\"\"}")); - ParsedSchemaMetadata metadata = retriever.getBySubjectAndVersion("bla", true, 2); - assertEquals(Integer.valueOf(10), metadata.getId()); - assertEquals(Integer.valueOf(2), metadata.getVersion()); - assertEquals(Schema.create(Schema.Type.STRING), metadata.getSchema()); - assertEquals("/base/subjects/bla-value/versions/2", server.takeRequest().getPath()); - - // Already queried schema is cached and does not need another request - ParsedSchemaMetadata metadata2 = retriever.getBySubjectAndVersion("bla", true, 2); - assertEquals(Integer.valueOf(10), metadata2.getId()); - assertEquals(Integer.valueOf(2), metadata2.getVersion()); - assertEquals(Schema.create(Schema.Type.STRING), metadata2.getSchema()); - assertEquals(1, server.getRequestCount()); - - // Not yet queried schema needs a new request, so if the server does not respond, an - // IOException is thrown. - server.enqueue(new MockResponse().setResponseCode(500)); - assertThrows(IOException.class, () -> retriever.getBySubjectAndVersion("bla", false, 2)); - } - - @Test - public void addSchemaMetadata() throws Exception { - server.enqueue(new MockResponse().setBody("{\"id\":10}")); - int id = retriever.addSchema("bla", true, Schema.create(Schema.Type.STRING)); - assertEquals(10, id); - - assertEquals(1, server.getRequestCount()); - RecordedRequest request = server.takeRequest(); - assertEquals("{\"schema\":\"\\\"string\\\"\"}", request.getBody().readUtf8()); - - List schemaFields = Collections.singletonList( - new Field("a", Schema.create(Schema.Type.INT), "that a", 10)); - - Schema record = Schema.createRecord("C", "that C", "org.radarcns", false, schemaFields); - server.enqueue(new MockResponse().setBody("{\"id\":11}")); - id = retriever.addSchema("bla", true, record); - assertEquals(11, id); - request = server.takeRequest(); - assertEquals("{\"schema\":\"{\\\"type\\\":\\\"record\\\",\\\"name\\\":\\\"C\\\",\\\"namespace\\\":\\\"org.radarcns\\\",\\\"doc\\\":\\\"that C\\\",\\\"fields\\\":[{\\\"name\\\":\\\"a\\\",\\\"type\\\":\\\"int\\\",\\\"doc\\\":\\\"that a\\\",\\\"default\\\":10}]}\"}", request.getBody().readUtf8()); - } - - @Test - public void getOrSetSchemaMetadataSet() throws Exception { - server.enqueue(new MockResponse().setResponseCode(404)); - server.enqueue(new MockResponse().setBody("{\"id\":10}")); - server.enqueue(new MockResponse().setBody("{\"id\":10, \"version\": 2}")); - ParsedSchemaMetadata metadata = retriever.getOrSetSchemaMetadata("bla", true, Schema.create(Schema.Type.STRING), -1); - assertEquals(Integer.valueOf(10), metadata.getId()); - assertEquals(Schema.create(Schema.Type.STRING), metadata.getSchema()); - - assertEquals(3, server.getRequestCount()); - server.takeRequest(); - RecordedRequest request = server.takeRequest(); - assertEquals("{\"schema\":\"\\\"string\\\"\"}", request.getBody().readUtf8()); - assertEquals("/base/subjects/bla-value/versions", request.getPath()); - - metadata = retriever.getOrSetSchemaMetadata("bla", true, Schema.create(Schema.Type.STRING), -1); - assertEquals(Integer.valueOf(10), metadata.getId()); - assertEquals(Schema.create(Schema.Type.STRING), metadata.getSchema()); - } - - @Test - public void getOrSetSchemaMetadataGet() throws Exception { - server.enqueue(new MockResponse().setBody("{\"id\":10,\"version\":2,\"schema\":\"\\\"string\\\"\"}")); - ParsedSchemaMetadata metadata = retriever.getOrSetSchemaMetadata("bla", true, Schema.create(Schema.Type.STRING), 2); - assertEquals(Integer.valueOf(10), metadata.getId()); - assertEquals(Integer.valueOf(2), metadata.getVersion()); - assertEquals(Schema.create(Schema.Type.STRING), metadata.getSchema()); - - assertEquals(1, server.getRequestCount()); - RecordedRequest request = server.takeRequest(); - assertEquals("/base/subjects/bla-value/versions/2", request.getPath()); - - metadata = retriever.getOrSetSchemaMetadata("bla", true, Schema.create(Schema.Type.STRING), 2); - assertEquals(Integer.valueOf(10), metadata.getId()); - assertEquals(Schema.create(Schema.Type.STRING), metadata.getSchema()); - } -} diff --git a/radar-commons/src/test/java/org/radarbase/producer/schema/SchemaRestClientTest.kt b/radar-commons/src/test/java/org/radarbase/producer/schema/SchemaRestClientTest.kt new file mode 100644 index 00000000..c58ed1fe --- /dev/null +++ b/radar-commons/src/test/java/org/radarbase/producer/schema/SchemaRestClientTest.kt @@ -0,0 +1,78 @@ +/* + * Copyright 2017 The Hyve and King's College London + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.radarbase.producer.schema + +import io.ktor.client.* +import io.ktor.client.engine.cio.* +import io.ktor.client.plugins.* +import kotlinx.coroutines.ExperimentalCoroutinesApi +import kotlinx.coroutines.test.runTest +import okhttp3.mockwebserver.MockWebServer +import org.apache.avro.Schema +import org.junit.jupiter.api.AfterEach +import org.junit.jupiter.api.Assertions.assertEquals +import org.junit.jupiter.api.BeforeEach +import org.junit.jupiter.api.Test +import org.radarbase.producer.io.timeout +import org.radarbase.producer.rest.RestKafkaSenderTest.Companion.enqueueJson +import java.io.IOException +import java.util.* +import kotlin.time.Duration.Companion.seconds + +@OptIn(ExperimentalCoroutinesApi::class) +class SchemaRestClientTest { + private lateinit var mockServer: MockWebServer + private lateinit var retriever: SchemaRestClient + + @BeforeEach + fun setUp() { + mockServer = MockWebServer() + retriever = SchemaRestClient( + HttpClient(CIO) { + timeout(1.seconds) + }, + baseUrl = "http://${mockServer.hostName}:${mockServer.port}/base/", + ) + } + + @AfterEach + @Throws(IOException::class) + fun tearDown() { + mockServer.close() + } + + @Test + @Throws(Exception::class) + fun retrieveSchemaMetadata() = runTest { + mockServer.enqueueJson("{\"id\":10,\"version\":2,\"schema\":\"\\\"string\\\"\"}") + val (id, version, schema) = retriever.retrieveSchemaMetadata("bla-value", -1) + assertEquals(10, id) + assertEquals(2, version) + assertEquals(Schema.create(Schema.Type.STRING), schema) + assertEquals("/base/subjects/bla-value/versions/latest", mockServer.takeRequest().path) + } + + @Test + @Throws(Exception::class) + fun retrieveSchemaMetadataVersion() = runTest { + mockServer.enqueueJson("{\"id\":10,\"version\":2,\"schema\":\"\\\"string\\\"\"}") + val (id, version, schema) = retriever.retrieveSchemaMetadata("bla-value", 2) + assertEquals(10, id) + assertEquals(2, version) + assertEquals(Schema.create(Schema.Type.STRING), schema) + assertEquals("/base/subjects/bla-value/versions/2", mockServer.takeRequest().path) + } +} diff --git a/radar-commons/src/test/java/org/radarbase/producer/schema/SchemaRetrieverTest.kt b/radar-commons/src/test/java/org/radarbase/producer/schema/SchemaRetrieverTest.kt new file mode 100644 index 00000000..962e9556 --- /dev/null +++ b/radar-commons/src/test/java/org/radarbase/producer/schema/SchemaRetrieverTest.kt @@ -0,0 +1,122 @@ +/* + * Copyright 2017 The Hyve and King's College London + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.radarbase.producer.schema + +import io.ktor.client.plugins.* +import kotlinx.coroutines.ExperimentalCoroutinesApi +import kotlinx.coroutines.test.runTest +import okhttp3.mockwebserver.MockResponse +import okhttp3.mockwebserver.MockWebServer +import org.apache.avro.Schema +import org.junit.jupiter.api.AfterEach +import org.junit.jupiter.api.Assertions.assertEquals +import org.junit.jupiter.api.BeforeEach +import org.junit.jupiter.api.Test +import org.junit.jupiter.api.assertThrows +import org.radarbase.producer.io.timeout +import org.radarbase.producer.rest.RestKafkaSenderTest.Companion.enqueueJson +import org.radarbase.producer.schema.SchemaRetriever.Companion.schemaRetriever +import org.radarbase.producer.schema.SchemaRetriever.Companion.subject +import java.io.IOException +import kotlin.time.Duration.Companion.seconds + +@OptIn(ExperimentalCoroutinesApi::class) +class SchemaRetrieverTest { + private lateinit var mockServer: MockWebServer + private lateinit var retriever: SchemaRetriever + + @Suppress("HttpUrlsUsage") + @BeforeEach + fun setUp() { + mockServer = MockWebServer().apply { + start() + } + retriever = schemaRetriever("http://${mockServer.hostName}:${mockServer.port}/base/") { + httpClient { + defaultRequest { + url("http://${mockServer.hostName}:${mockServer.port}/base/") + } + timeout(1.seconds) + } + } + } + + @AfterEach + @Throws(IOException::class) + fun tearDown() { + mockServer.close() + } + + @Test + fun subject() { + assertEquals("bla-value", subject("bla", true)) + assertEquals("bla-key", subject("bla", false)) + } + + // Already queried schema is cached and does not need another request + @Test + fun testSchemaMetadata() = runTest { + // Not yet queried schema needs a new request, so if the server does not respond, an + // IOException is thrown. + mockServer.enqueueJson("{\"id\":10,\"version\":2,\"schema\":\"\\\"string\\\"\"}") + val (id, version, schema) = retriever.getByVersion("bla", true, 2) + assertEquals(10, id) + assertEquals(2, version) + assertEquals(Schema.create(Schema.Type.STRING), schema) + assertEquals("/base/subjects/bla-value/versions/2", mockServer.takeRequest().path) + + // Already queried schema is cached and does not need another request + val (id1, version1, schema1) = retriever.getByVersion("bla", true, 2) + assertEquals(10, id1) + assertEquals(2, version1) + assertEquals(Schema.create(Schema.Type.STRING), schema1) + assertEquals(1, mockServer.requestCount.toLong()) + + // Not yet queried schema needs a new request, so if the server does not respond, an + // IOException is thrown. + mockServer.enqueue(MockResponse().setResponseCode(500)) + assertThrows { + retriever.getByVersion( + "bla", + false, + 2, + ) + } + } + + @Test + @Throws(Exception::class) + fun addSchemaMetadata() = runTest { + mockServer.enqueueJson("{\"id\":10}") + var id = retriever.addSchema("bla", true, Schema.create(Schema.Type.STRING)) + assertEquals(10, id.toLong()) + assertEquals(1, mockServer.requestCount.toLong()) + var request = mockServer.takeRequest() + assertEquals("{\"schema\":\"\\\"string\\\"\"}", request.body.readUtf8()) + val schemaFields = listOf( + Schema.Field("a", Schema.create(Schema.Type.INT), "that a", 10), + ) + val record = Schema.createRecord("C", "that C", "org.radarcns", false, schemaFields) + mockServer.enqueueJson("{\"id\":11}") + id = retriever.addSchema("bla", true, record) + assertEquals(11, id.toLong()) + request = mockServer.takeRequest() + assertEquals( + "{\"schema\":\"{\\\"type\\\":\\\"record\\\",\\\"name\\\":\\\"C\\\",\\\"namespace\\\":\\\"org.radarcns\\\",\\\"doc\\\":\\\"that C\\\",\\\"fields\\\":[{\\\"name\\\":\\\"a\\\",\\\"type\\\":\\\"int\\\",\\\"doc\\\":\\\"that a\\\",\\\"default\\\":10}]}\"}", + request.body.readUtf8(), + ) + } +} diff --git a/radar-commons/src/test/java/org/radarbase/topic/KafkaTopicTest.java b/radar-commons/src/test/java/org/radarbase/topic/KafkaTopicTest.java deleted file mode 100644 index cf9fac39..00000000 --- a/radar-commons/src/test/java/org/radarbase/topic/KafkaTopicTest.java +++ /dev/null @@ -1,48 +0,0 @@ -package org.radarbase.topic; - -import static org.junit.Assert.assertEquals; - -import java.util.ArrayList; -import java.util.Collections; -import java.util.List; -import java.util.UUID; -import org.junit.Test; - -public class KafkaTopicTest { - @Test(expected = IllegalArgumentException.class) - public void nullArguments() { - new KafkaTopic(null); - } - - @Test(expected = IllegalArgumentException.class) - public void invalidTopicName() { - new KafkaTopic("bla$"); - } - - - @Test - public void getName() { - KafkaTopic topic = new KafkaTopic("aba"); - assertEquals("aba", topic.getName()); - } - - - @Test - public void compare() throws Exception { - final int randomSize = 100; - List randomString = new ArrayList<>(randomSize); - List randomTopic = new ArrayList<>(randomSize); - for (int i = 0; i < randomSize; i++) { - String str = 'a' + UUID.randomUUID().toString().replace('-', '_'); - randomString.add(str); - randomTopic.add(new KafkaTopic(str)); - } - - Collections.sort(randomString); - Collections.sort(randomTopic); - - for (int i = 0; i < randomSize; i++) { - assertEquals(randomString.get(i), randomTopic.get(i).getName()); - } - } -} diff --git a/radar-commons/src/test/java/org/radarbase/topic/KafkaTopicTest.kt b/radar-commons/src/test/java/org/radarbase/topic/KafkaTopicTest.kt new file mode 100644 index 00000000..26cbefde --- /dev/null +++ b/radar-commons/src/test/java/org/radarbase/topic/KafkaTopicTest.kt @@ -0,0 +1,39 @@ +package org.radarbase.topic + +import org.junit.jupiter.api.Assertions.assertEquals +import org.junit.jupiter.api.Test +import org.junit.jupiter.api.assertThrows +import java.util.* + +class KafkaTopicTest { + @Test + fun invalidTopicName() { + assertThrows { + KafkaTopic("bla$") + } + } + + @Test + fun testName() { + val topic = KafkaTopic("aba") + assertEquals("aba", topic.name) + } + + @Test + @Throws(Exception::class) + fun compare() { + val randomSize = 100 + val randomString: MutableList = ArrayList(randomSize) + val randomTopic: MutableList = ArrayList(randomSize) + for (i in 0 until randomSize) { + val str = 'a'.toString() + UUID.randomUUID().toString().replace('-', '_') + randomString.add(str) + randomTopic.add(KafkaTopic(str)) + } + randomString.sort() + randomTopic.sort() + for (i in 0 until randomSize) { + assertEquals(randomString[i], randomTopic[i].name) + } + } +} diff --git a/radar-commons/src/test/java/org/radarbase/topic/SensorTopicTest.java b/radar-commons/src/test/java/org/radarbase/topic/SensorTopicTest.java deleted file mode 100644 index 681f94f0..00000000 --- a/radar-commons/src/test/java/org/radarbase/topic/SensorTopicTest.java +++ /dev/null @@ -1,151 +0,0 @@ -/* - * Copyright 2017 The Hyve and King's College London - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.radarbase.topic; - -import static org.junit.Assert.assertEquals; - -import org.apache.avro.Schema; -import org.apache.avro.Schema.Type; -import org.apache.avro.SchemaBuilder; -import org.apache.avro.generic.GenericRecord; -import org.junit.Test; -import org.radarcns.kafka.ObservationKey; -import org.radarcns.passive.phone.PhoneAcceleration; - -/** - * Created by joris on 05/07/2017. - */ -public class SensorTopicTest { - - @Test - public void workingConstructor() { - Schema keySchema = SchemaBuilder.record("key").fields() - .name("projectId").type(Schema.createUnion(Schema.create(Type.NULL), Schema.create(Type.STRING))).withDefault(null) - .name("userId").type(Schema.create(Type.STRING)).noDefault() - .name("sourceId").type(Schema.create(Type.STRING)).noDefault() - .endRecord(); - Schema valueSchema = SchemaBuilder.record("value").fields() - .name("time").type(Schema.create(Type.DOUBLE)).noDefault() - .name("timeReceived").type(Schema.create(Type.DOUBLE)).noDefault() - .name("value").type(Schema.create(Type.DOUBLE)).noDefault() - .endRecord(); - - new SensorTopic<>("test", - keySchema, valueSchema, - GenericRecord.class, GenericRecord.class); - } - - @Test(expected = IllegalArgumentException.class) - public void nullArguments() { - new SensorTopic<>(null, null, null, null, null); - } - - @Test(expected = IllegalArgumentException.class) - public void nullArgumentsExceptName() { - new SensorTopic<>("test", null, null, null, null); - } - - @Test(expected = IllegalArgumentException.class) - public void nullName() { - Schema keySchema = SchemaBuilder.record("key").fields() - .name("userId").type(Schema.create(Type.STRING)).noDefault() - .name("sourceId").type(Schema.create(Type.STRING)).noDefault() - .endRecord(); - Schema valueSchema = SchemaBuilder.record("value").fields() - .name("time").type(Schema.create(Type.DOUBLE)).noDefault() - .name("timeReceived").type(Schema.create(Type.DOUBLE)).noDefault() - .name("value").type(Schema.create(Type.DOUBLE)).noDefault() - .endRecord(); - - new SensorTopic<>(null, - keySchema, valueSchema, - GenericRecord.class, GenericRecord.class); - } - - - @Test(expected = IllegalArgumentException.class) - public void missingUserId() { - Schema keySchema = SchemaBuilder.record("key").fields() - .name("sourceId").type(Schema.create(Type.STRING)).noDefault() - .endRecord(); - Schema valueSchema = SchemaBuilder.record("value").fields() - .name("time").type(Schema.create(Type.DOUBLE)).noDefault() - .name("timeReceived").type(Schema.create(Type.DOUBLE)).noDefault() - .name("value").type(Schema.create(Type.DOUBLE)).noDefault() - .endRecord(); - - new SensorTopic<>("test", - keySchema, valueSchema, - GenericRecord.class, GenericRecord.class); - } - - @Test(expected = IllegalArgumentException.class) - public void missingTime() { - Schema keySchema = SchemaBuilder.record("key").fields() - .name("userId").type(Schema.create(Type.STRING)).noDefault() - .name("sourceId").type(Schema.create(Type.STRING)).noDefault() - .endRecord(); - Schema valueSchema = SchemaBuilder.record("value").fields() - .name("timeReceived").type(Schema.create(Type.DOUBLE)).noDefault() - .name("value").type(Schema.create(Type.DOUBLE)).noDefault() - .endRecord(); - - new SensorTopic<>("test", - keySchema, valueSchema, - GenericRecord.class, GenericRecord.class); - } - - @Test(expected = IllegalArgumentException.class) - public void notARecord() { - Schema keySchema = Schema.create(Type.STRING); - Schema valueSchema = SchemaBuilder.record("value").fields() - .name("timeReceived").type(Schema.create(Type.DOUBLE)).noDefault() - .name("value").type(Schema.create(Type.DOUBLE)).noDefault() - .endRecord(); - - new SensorTopic<>("test", - keySchema, valueSchema, - GenericRecord.class, GenericRecord.class); - } - - @Test - public void parseTopic() { - SensorTopic topic = SensorTopic.parse("test", - ObservationKey.class.getName(), PhoneAcceleration.class.getName()); - - SensorTopic expected = new SensorTopic<>("test", - ObservationKey.getClassSchema(), PhoneAcceleration.getClassSchema(), - ObservationKey.class, PhoneAcceleration.class); - - assertEquals(expected, topic); - } - - @Test(expected = IllegalArgumentException.class) - public void parseUnexistingKey() { - SensorTopic.parse("test", - "unexisting." + ObservationKey.class.getName(), - PhoneAcceleration.class.getName()); - } - - - @Test(expected = IllegalArgumentException.class) - public void parseUnexistingValue() { - SensorTopic.parse("test", - ObservationKey.class.getName(), - "unexisting." + PhoneAcceleration.class.getName()); - } -} diff --git a/radar-commons/src/test/java/org/radarbase/topic/SensorTopicTest.kt b/radar-commons/src/test/java/org/radarbase/topic/SensorTopicTest.kt new file mode 100644 index 00000000..f9744a57 --- /dev/null +++ b/radar-commons/src/test/java/org/radarbase/topic/SensorTopicTest.kt @@ -0,0 +1,158 @@ +/* + * Copyright 2017 The Hyve and King's College London + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.radarbase.topic + +import org.apache.avro.Schema +import org.apache.avro.SchemaBuilder +import org.apache.avro.generic.GenericRecord +import org.junit.jupiter.api.Assertions.assertEquals +import org.junit.jupiter.api.Test +import org.junit.jupiter.api.assertThrows +import org.radarcns.kafka.ObservationKey +import org.radarcns.passive.phone.PhoneAcceleration + +/** + * Created by joris on 05/07/2017. + */ +class SensorTopicTest { + @Test + fun workingConstructor() { + val keySchema = SchemaBuilder.record("key").fields() + .name("projectId").type( + Schema.createUnion( + Schema.create(Schema.Type.NULL), + Schema.create( + Schema.Type.STRING, + ), + ), + ).withDefault(null) + .name("userId").type(Schema.create(Schema.Type.STRING)).noDefault() + .name("sourceId").type(Schema.create(Schema.Type.STRING)).noDefault() + .endRecord() + val valueSchema = SchemaBuilder.record("value").fields() + .name("time").type(Schema.create(Schema.Type.DOUBLE)).noDefault() + .name("timeReceived").type(Schema.create(Schema.Type.DOUBLE)).noDefault() + .name("value").type(Schema.create(Schema.Type.DOUBLE)).noDefault() + .endRecord() + SensorTopic( + "test", + keySchema, + valueSchema, + GenericRecord::class.java, + GenericRecord::class.java, + ) + } + + @Test + fun missingUserId() { + val keySchema = SchemaBuilder.record("key").fields() + .name("sourceId").type(Schema.create(Schema.Type.STRING)).noDefault() + .endRecord() + val valueSchema = SchemaBuilder.record("value").fields() + .name("time").type(Schema.create(Schema.Type.DOUBLE)).noDefault() + .name("timeReceived").type(Schema.create(Schema.Type.DOUBLE)).noDefault() + .name("value").type(Schema.create(Schema.Type.DOUBLE)).noDefault() + .endRecord() + + assertThrows { + SensorTopic( + "test", + keySchema, + valueSchema, + GenericRecord::class.java, + GenericRecord::class.java, + ) + } + } + + @Test + fun missingTime() { + val keySchema = SchemaBuilder.record("key").fields() + .name("userId").type(Schema.create(Schema.Type.STRING)).noDefault() + .name("sourceId").type(Schema.create(Schema.Type.STRING)).noDefault() + .endRecord() + val valueSchema = SchemaBuilder.record("value").fields() + .name("timeReceived").type(Schema.create(Schema.Type.DOUBLE)).noDefault() + .name("value").type(Schema.create(Schema.Type.DOUBLE)).noDefault() + .endRecord() + assertThrows { + SensorTopic( + "test", + keySchema, + valueSchema, + GenericRecord::class.java, + GenericRecord::class.java, + ) + } + } + + @Test + fun notARecord() { + val keySchema = Schema.create(Schema.Type.STRING) + val valueSchema = SchemaBuilder.record("value").fields() + .name("timeReceived").type(Schema.create(Schema.Type.DOUBLE)).noDefault() + .name("value").type(Schema.create(Schema.Type.DOUBLE)).noDefault() + .endRecord() + assertThrows { + SensorTopic( + "test", + keySchema, + valueSchema, + GenericRecord::class.java, + GenericRecord::class.java, + ) + } + } + + @Test + fun parseTopic() { + val topic: SensorTopic = SensorTopic.parse( + "test", + ObservationKey::class.java.name, + PhoneAcceleration::class.java.name, + ) + val expected = SensorTopic( + "test", + ObservationKey.getClassSchema(), + PhoneAcceleration.getClassSchema(), + ObservationKey::class.java, + PhoneAcceleration::class.java, + ) + assertEquals(expected, topic) + } + + @Test + fun parseUnexistingKey() { + assertThrows { + SensorTopic.parse( + "test", + "unexisting." + ObservationKey::class.java.name, + PhoneAcceleration::class.java.name, + ) + } + } + + @Test + fun parseUnexistingValue() { + assertThrows { + SensorTopic.parse( + "test", + ObservationKey::class.java.name, + "unexisting." + PhoneAcceleration::class.java.name, + ) + } + } +} diff --git a/radar-commons/src/test/java/org/radarbase/util/Base64Test.java b/radar-commons/src/test/java/org/radarbase/util/Base64Test.java deleted file mode 100644 index a22f6bf9..00000000 --- a/radar-commons/src/test/java/org/radarbase/util/Base64Test.java +++ /dev/null @@ -1,25 +0,0 @@ -package org.radarbase.util; - -import static org.junit.Assert.*; - -import java.util.concurrent.ThreadLocalRandom; -import kotlin.text.Charsets; -import org.junit.Test; -import org.radarbase.util.Base64.Encoder; - -public class Base64Test { - @Test - public void encoderTest() { - Encoder encoder = Base64.getEncoder(); - java.util.Base64.Encoder javaEncoder = java.util.Base64.getEncoder(); - - ThreadLocalRandom random = ThreadLocalRandom.current(); - for (int i = 0; i < 2_000; i += 7) { - byte[] src = new byte[i]; - random.nextBytes(src); - String actual = encoder.encode(src); - String expected = new String(javaEncoder.encode(src), Charsets.UTF_8); - assertEquals(expected, actual); - } - } -} diff --git a/radar-commons/src/test/java/org/radarbase/util/Base64Test.kt b/radar-commons/src/test/java/org/radarbase/util/Base64Test.kt new file mode 100644 index 00000000..66fd0bbc --- /dev/null +++ b/radar-commons/src/test/java/org/radarbase/util/Base64Test.kt @@ -0,0 +1,25 @@ +package org.radarbase.util + +import org.junit.jupiter.api.Assertions.assertEquals +import org.junit.jupiter.api.Test +import java.util.* +import java.util.concurrent.ThreadLocalRandom +import kotlin.text.Charsets.UTF_8 + +class Base64Test { + @Test + fun encoderTest() { + val javaEncoder = Base64.getEncoder() + val random = ThreadLocalRandom.current() + var i = 0 + while (i < 2000) { + val src = ByteArray(i) + random.nextBytes(src) + assertEquals( + Base64Encoder.encode(src), + String(javaEncoder.encode(src), UTF_8), + ) + i += 7 + } + } +} diff --git a/radar-commons/src/test/java/org/radarbase/util/SerializationTest.java b/radar-commons/src/test/java/org/radarbase/util/SerializationTest.java deleted file mode 100644 index d8081002..00000000 --- a/radar-commons/src/test/java/org/radarbase/util/SerializationTest.java +++ /dev/null @@ -1,93 +0,0 @@ -/* - * Copyright 2017 The Hyve and King's College London - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.radarbase.util; - -import static org.junit.Assert.assertArrayEquals; -import static org.junit.Assert.assertEquals; - -import java.util.Random; -import org.junit.Test; - -/** - * Created by joris on 22/02/2017. - */ -public class SerializationTest { - @Test - public void bytesToLong() throws Exception { - byte[] input = {0, 0, 0, 0, 0, 0, 0, 1}; - assertEquals(1L, Serialization.bytesToLong(input, 0)); - } - - @Test - public void longToBytes() throws Exception { - byte[] buffer = new byte[8]; - Serialization.longToBytes(1L, buffer, 0); - assertArrayEquals(new byte[] {0, 0, 0, 0, 0, 0, 0, 1}, buffer); - } - - @Test - public void longToBytesAndBack() throws Exception { - byte[] buffer = new byte[8]; - Random random = new Random(); - for (int i = 0; i < 10; i++) { - long value = random.nextLong(); - Serialization.longToBytes(value, buffer, 0); - assertEquals(value, Serialization.bytesToLong(buffer, 0)); - } - } - - @Test - public void longToBytesAndOffset() throws Exception { - Random random = new Random(); - byte[] buffer = new byte[8 + 256]; - random.nextBytes(buffer); - - for (int i = 0; i < 10; i++) { - int offset = random.nextInt(256); - long value = random.nextLong(); - Serialization.longToBytes(value, buffer, offset); - assertEquals(value, Serialization.bytesToLong(buffer, offset)); - } - } - - @Test - public void intToBytes() throws Exception { - byte[] buffer = new byte[4]; - Serialization.intToBytes(1, buffer, 0); - assertArrayEquals(new byte[] {0, 0, 0, 1}, buffer); - } - - @Test - public void bytesToInt() throws Exception { - byte[] input = {0, 0, 0, 1}; - assertEquals(1, Serialization.bytesToInt(input, 0)); - } - - @Test - public void intToBytesAndOffset() throws Exception { - Random random = new Random(); - byte[] buffer = new byte[4 + 256]; - random.nextBytes(buffer); - - for (int i = 0; i < 10; i++) { - int offset = random.nextInt(256); - int value = random.nextInt(); - Serialization.intToBytes(value, buffer, offset); - assertEquals(value, Serialization.bytesToInt(buffer, offset)); - } - } -} \ No newline at end of file diff --git a/settings.gradle b/settings.gradle.kts similarity index 73% rename from settings.gradle rename to settings.gradle.kts index 2c61dca1..5ca62c8a 100644 --- a/settings.gradle +++ b/settings.gradle.kts @@ -13,7 +13,13 @@ * See the License for the specific language governing permissions and * limitations under the License. */ +rootProject.name = "radar-commons" -include ':radar-commons' -include ':radar-commons-testing' -include ':radar-commons-server' +include(":radar-commons") +include(":radar-commons-server") +include(":radar-commons-kotlin") +include(":radar-commons-testing") + +pluginManagement { + includeBuild("radar-commons-gradle") +}