diff --git a/.idea/compiler.xml b/.idea/compiler.xml
deleted file mode 100644
index 69eaff7a..00000000
--- a/.idea/compiler.xml
+++ /dev/null
@@ -1,10 +0,0 @@
-
-
-
-
-
-
-
-
-
-
\ No newline at end of file
diff --git a/.idea/modules.xml b/.idea/modules.xml
deleted file mode 100644
index ad7f0f8e..00000000
--- a/.idea/modules.xml
+++ /dev/null
@@ -1,11 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
\ No newline at end of file
diff --git a/.travis.yml b/.travis.yml
index 00db462a..12ce0e67 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -2,18 +2,22 @@ language: java
jdk:
- oraclejdk8
sudo: required
+
services:
- docker
+
env:
- DOCKER_COMPOSE_VERSION: 1.9.0
+ DOCKER_COMPOSE_VERSION: 1.16.1
TERM: dumb
-before_cache:
- - rm -f $HOME/.gradle/caches/modules-2/modules-2.lock
- - rm -fr $HOME/.gradle/caches/*/plugin-resolution/
+
cache:
directories:
- - $HOME/.gradle/caches/
- - $HOME/.gradle/wrapper/
+ - $HOME/.gradle/caches/jars-1
+ - $HOME/.gradle/caches/jars-2
+ - $HOME/.gradle/caches/jars-3
+ - $HOME/.gradle/caches/modules-2/files-2.1/
+ - $HOME/.gradle/native
+ - $HOME/.gradle/wrapper
before_install:
- mkdir -p "$HOME/bin";
@@ -31,7 +35,10 @@ deploy:
provider: releases
api_key: ${GH_TOKEN}
file_glob: true
- file: "build/libs/*.jar"
+ file:
+ - build/libs/*.jar
+ - build/distributions/*.zip
+ - build/distributions/*.tar.gz
skip_cleanup: true
on:
tags: true
diff --git a/Dockerfile b/Dockerfile
index f402cabd..68192f2d 100644
--- a/Dockerfile
+++ b/Dockerfile
@@ -1,4 +1,4 @@
-FROM confluentinc/cp-base:3.2.1-5
+FROM confluentinc/cp-base:3.3.0
ENV TERM=dumb
diff --git a/README.md b/README.md
index 00a67355..fb4bbed9 100644
--- a/README.md
+++ b/README.md
@@ -135,8 +135,8 @@ To get email notifications for Empatica E4 battery status, an email server witho
data:
- topic: topic1
file: topic1.csv
- key_schema: org.radarcns.key.MeasurementKey
- value_schema: org.radarcns.empatica.EmpaticaE4Acceleration
+ key_schema: org.radarcns.kafka.ObservationKey
+ value_schema: org.radarcns.passive.empatica.EmpaticaE4Acceleration
```
Each value has a topic to send the data to, a file containing the data, a schema class for the key and a schema class for the value. Also create a CSV file for each of these entries:
@@ -177,7 +177,7 @@ RADAR-Stream is a layer on top of Kafka streams. Topics are processed by streams
KafkaStreams currently communicates using master-slave model. The [MasterAggregator][1] defines the stream-master, while [AggregatorWorker][2] represents the stream-slave. The master-stream creates, starts and stops a list of stream-slaves registered with the corresponding master.
While the classical Kafka Consumer requires two implementations to support standalone and group executions, the AggregatorWorker provides both behaviors with one implementation.
-To extend the RADAR-Stream API, follow these steps (see the `org.radarcns.empatica` package as an example):
+To extend the RADAR-Stream API, follow these steps (see the `org.radarcns.passive.empatica` package as an example):
- Create a stream group by overriding [GeneralStreamGroup][8]. Use its `createSensorStream` and `createStream` methods to create the stream definitions.
- For each topic, create a [AggregatorWorker][2].
diff --git a/build.gradle b/build.gradle
index 211603d4..fadb6124 100644
--- a/build.gradle
+++ b/build.gradle
@@ -17,7 +17,7 @@ plugins {
//---------------------------------------------------------------------------//
group = 'org.radarcns'
-version = '0.1'
+version = '0.1.1'
ext.description = 'Kafka backend for processing device data.'
mainClassName = 'org.radarcns.RadarBackend'
@@ -30,9 +30,9 @@ sourceCompatibility = '1.8'
ext.boundaryVersion = '1.0.6'
ext.codacyVersion = '1.0.10'
-ext.confluentVersion = '3.2.1'
+ext.confluentVersion = '3.3.0'
ext.hamcrestVersion = '1.3'
-ext.kafkaVersion = '0.10.2.1'
+ext.kafkaVersion = '0.11.0.1'
ext.jacksonVersion='2.8.5'
ext.javaMailVersion = '1.5.6'
ext.junitVersion = '4.12'
@@ -40,11 +40,12 @@ ext.mathVersion = '3.0'
ext.findbugVersion = '3.0.1'
ext.commonsCliVersion = '1.2'
ext.mockitoVersion = '2.2.29'
-ext.radarCommonsVersion = '0.5'
-ext.radarSchemasVersion = '0.2-alpha.1'
+ext.radarCommonsVersion = '0.6.3'
+ext.radarSchemasVersion = '0.2.2'
ext.subethamailVersion = '3.1.7'
ext.jsoupVersion = '1.10.2'
-ext.slf4jVersion = '1.7.21'
+ext.slf4jVersion = '1.7.25'
+ext.log4jVersion = '1.2.17'
ext.avroVersion = '1.8.2'
ext.githubUrl = 'https://github.com/' + githubRepoName + '.git'
@@ -81,9 +82,6 @@ configurations.all {
// In this section you declare where to find the dependencies of your project
repositories {
- // For working with dev-branch commons
- maven { url "${rootProject.projectDir}/libs" }
-
// Use 'jcenter' for resolving your dependencies.
// You can declare any Maven/Ivy/file repository here.
jcenter()
@@ -100,15 +98,16 @@ repositories {
dependencies {
compile group: 'org.radarcns', name: 'radar-commons', version: radarCommonsVersion
compile group: 'org.apache.avro', name: 'avro', version: avroVersion
- compile (group: 'org.radarcns', name: 'radar-commons-testing', version: radarCommonsVersion) {
- exclude group: 'org.slf4j', module: 'slf4j-simple'
- }
+ compile group: 'org.radarcns', name: 'radar-commons-testing', version: radarCommonsVersion
compile group: 'org.radarcns', name: 'radar-schemas-commons', version: radarSchemasVersion
// Kafka streaming API
compile group: 'org.apache.kafka', name: 'kafka-streams', version: kafkaVersion
- compile (group: 'io.confluent', name: 'kafka-avro-serializer', version: confluentVersion) {
+ compile (group: 'io.confluent', name: 'kafka-streams-avro-serde', version: confluentVersion) {
exclude group: 'org.slf4j', module: 'slf4j-log4j12'
+ exclude group: 'log4j', module: 'log4j'
+ exclude group: 'jline', module: 'jline'
+ exclude group: 'io.netty', module: 'netty'
}
// Nonnull annotation
@@ -135,8 +134,12 @@ dependencies {
// Mock mail server
testCompile group: 'org.subethamail', name: 'subethasmtp', version: subethamailVersion
- testImplementation group: 'org.slf4j', name: 'slf4j-simple', version: slf4jVersion
- integrationTestImplementation group: 'org.slf4j', name: 'slf4j-simple', version: slf4jVersion
+ runtimeOnly group: 'log4j', name: 'log4j', version: log4jVersion
+ runtimeOnly group: 'org.slf4j', name: 'slf4j-log4j12', version: slf4jVersion
+ testImplementation group: 'log4j', name: 'log4j', version: log4jVersion
+ testImplementation group: 'org.slf4j', name: 'slf4j-log4j12', version: slf4jVersion
+ integrationTestImplementation group: 'log4j', name: 'log4j', version: log4jVersion
+ integrationTestImplementation group: 'org.slf4j', name: 'slf4j-log4j12', version: slf4jVersion
// For Topic name validation based on Kafka classes
testImplementation (group: 'org.apache.kafka', name: 'kafka_2.11', version: kafkaVersion) {
@@ -300,6 +303,10 @@ ext.sharedManifest = manifest {
// Packaging //
//---------------------------------------------------------------------------//
+processResources {
+ expand(version: version)
+}
+
jar {
manifest {
from sharedManifest
@@ -309,7 +316,11 @@ jar {
shadowJar {
mergeServiceFiles()
- exclude 'log4j.properties'
+}
+
+tasks.withType(Tar){
+ compression = Compression.GZIP
+ extension = 'tar.gz'
}
// custom tasks for creating source/javadoc jars
@@ -345,6 +356,11 @@ publishing {
root.appendNode('description', description)
root.appendNode('name', rootProject.name)
root.appendNode('url', githubUrl)
+ root.dependencies.'*'.findAll() {
+ it.artifactId.text() in ['log4j', 'slf4j-log4j12']
+ }.each() {
+ it.parent().remove(it)
+ }
root.children().last() + pomConfig
}
}
@@ -380,7 +396,7 @@ bintray {
task wrapper(type: Wrapper) {
gradleVersion = '4.1'
- distributionUrl distributionUrl.replace("bin", "all")
+ distributionType 'all'
}
run {
diff --git a/docker-compose.yml b/docker-compose.yml
index 770e60f9..70b4f232 100644
--- a/docker-compose.yml
+++ b/docker-compose.yml
@@ -6,7 +6,7 @@ services:
# Zookeeper Cluster #
#---------------------------------------------------------------------------#
zookeeper-1:
- image: confluentinc/cp-zookeeper:3.2.1
+ image: confluentinc/cp-zookeeper:3.3.0
environment:
ZOOKEEPER_SERVER_ID: 1
ZOOKEEPER_CLIENT_PORT: 2181
@@ -19,7 +19,7 @@ services:
# Kafka Cluster #
#---------------------------------------------------------------------------#
kafka-1:
- image: confluentinc/cp-kafka:3.2.1
+ image: confluentinc/cp-kafka:3.3.0
depends_on:
- zookeeper-1
environment:
@@ -28,12 +28,19 @@ services:
KAFKA_ADVERTISED_LISTENERS: PLAINTEXT://kafka-1:9092
KAFKA_AUTO_CREATE_TOPICS_ENABLE: "true"
KAFKA_GROUP_MIN_SESSION_TIMEOUT_MS: 5000
+ KAFKA_INTER_BROKER_PROTOCOL_VERSION: 0.11.0
+ KAFKA_LOG_MESSAGE_FORMAT_VERSION: 0.11.0
+ KAFKA_GROUP_INITIAL_REBALANCE_DELAY_MS: 0
+ KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1
+ KAFKA_TRANSACTION_STATE_LOG_REPLICATION_FACTOR: 1
+ KAFKA_TRANSACTION_STATE_LOG_MIN_ISR: 1
+
#---------------------------------------------------------------------------#
# Schema Registry #
#---------------------------------------------------------------------------#
schema-registry-1:
- image: confluentinc/cp-schema-registry:3.2.1
+ image: confluentinc/cp-schema-registry:3.3.0
depends_on:
- zookeeper-1
- kafka-1
@@ -50,7 +57,7 @@ services:
# REST proxy #
#---------------------------------------------------------------------------#
rest-proxy-1:
- image: confluentinc/cp-kafka-rest:3.2.1
+ image: confluentinc/cp-kafka-rest:3.3.0
depends_on:
- kafka-1
- schema-registry-1
@@ -74,3 +81,6 @@ services:
- schema-registry-1
command:
- integrationTest
+ volumes:
+ - ./build/jacoco:/code/build/jacoco
+ - ./build/reports:/code/build/reports
diff --git a/radar.yml b/radar.yml
index 293910e9..50ea150f 100644
--- a/radar.yml
+++ b/radar.yml
@@ -19,8 +19,8 @@ broker:
#Kafka internal parameters
stream_properties:
- auto_commit_interval_ms: 1000
- session_timeout_ms: 10000
+ auto.commit.interval.ms: 1000
+ session.timeout.ms: 10000
#============================ Kafka Streams ============================#
#The number of threads that a stream must be run according is priority
diff --git a/src/integrationTest/java/org/radarcns/integration/DirectProducerTest.java b/src/integrationTest/java/org/radarcns/integration/DirectProducerTest.java
deleted file mode 100644
index d56744ba..00000000
--- a/src/integrationTest/java/org/radarcns/integration/DirectProducerTest.java
+++ /dev/null
@@ -1,85 +0,0 @@
-/*
- * Copyright 2017 King's College London and The Hyve
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.radarcns.integration;
-
-import java.io.File;
-import java.io.IOException;
-import java.util.Arrays;
-
-import org.apache.commons.cli.ParseException;
-import org.junit.After;
-import org.junit.Before;
-import org.junit.Rule;
-import org.junit.Test;
-import org.junit.rules.ExpectedException;
-import org.radarcns.RadarBackend;
-import org.radarcns.config.RadarBackendOptions;
-import org.radarcns.config.RadarPropertyHandler;
-import org.radarcns.config.YamlConfigLoader;
-import org.radarcns.mock.MockProducer;
-import org.radarcns.mock.config.BasicMockConfig;
-import org.radarcns.stream.empatica.E4StreamMaster;
-import org.radarcns.stream.phone.PhoneStreamMaster;
-import org.radarcns.util.RadarSingletonFactory;
-
-public class DirectProducerTest {
- @Rule
- public ExpectedException exception = ExpectedException.none();
- private RadarBackend backend;
-
- @Before
- public void setUp() throws IOException, ParseException, InterruptedException {
- String propertiesPath = "src/integrationTest/resources/org/radarcns/kafka/radar.yml";
- RadarPropertyHandler propHandler = RadarSingletonFactory.getRadarPropertyHandler();
- if (!propHandler.isLoaded()) {
- propHandler.load(propertiesPath);
- }
-
- String[] args = {"-c", propertiesPath, "stream"};
-
- RadarBackendOptions opts = RadarBackendOptions.parse(args);
- propHandler.getRadarProperties().setStreamMasters(Arrays.asList(
- E4StreamMaster.class.getName(),
- PhoneStreamMaster.class.getName()
- ));
- backend = new RadarBackend(opts, propHandler);
- backend.start();
- }
-
- @After
- public void tearDown() throws IOException, InterruptedException {
- backend.shutdown();
- }
-
- @Test(timeout = 300_000L)
- public void testDirect() throws Exception {
- File file = new File(getClass().getResource("/mock_devices.yml").getFile());
- BasicMockConfig mockConfig = new YamlConfigLoader().load(file, BasicMockConfig.class);
-
- MockProducer mockProducer = new MockProducer(mockConfig);
- mockProducer.start();
- Thread.sleep(mockConfig.getDuration());
- mockProducer.shutdown();
-
- String clientId = "someclinet";
- E4AggregatedAccelerationMonitor monitor = new E4AggregatedAccelerationMonitor(
- RadarSingletonFactory.getRadarPropertyHandler(),
- "android_empatica_e4_acceleration_output", clientId);
- monitor.setPollTimeout(280_000L);
- monitor.start();
- }
-}
diff --git a/src/integrationTest/java/org/radarcns/integration/E4AggregatedAccelerationMonitor.java b/src/integrationTest/java/org/radarcns/integration/E4AggregatedAccelerationMonitor.java
index 0237b7f5..6eb9671d 100644
--- a/src/integrationTest/java/org/radarcns/integration/E4AggregatedAccelerationMonitor.java
+++ b/src/integrationTest/java/org/radarcns/integration/E4AggregatedAccelerationMonitor.java
@@ -16,15 +16,6 @@
package org.radarcns.integration;
-import static org.apache.kafka.clients.consumer.ConsumerConfig.HEARTBEAT_INTERVAL_MS_CONFIG;
-import static org.apache.kafka.clients.consumer.ConsumerConfig.SESSION_TIMEOUT_MS_CONFIG;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertNotNull;
-import static org.junit.Assert.assertTrue;
-
-import java.io.IOException;
-import java.util.Collections;
-import java.util.Properties;
import org.apache.avro.Schema;
import org.apache.avro.generic.GenericData;
import org.apache.avro.generic.GenericRecord;
@@ -32,13 +23,17 @@
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.radarcns.config.RadarPropertyHandler;
-import org.radarcns.config.RadarPropertyHandlerImpl;
-import org.radarcns.key.MeasurementKey;
import org.radarcns.monitor.AbstractKafkaMonitor;
-import org.radarcns.util.RadarSingletonFactory;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
+import java.io.IOException;
+import java.util.Collections;
+import java.util.Properties;
+
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertTrue;
+
/**
* Consumer for Aggregated Acceleration Stream
*/
@@ -61,10 +56,6 @@ protected void evaluateRecord(ConsumerRecord recor
@Override
protected void evaluateRecords(ConsumerRecords records) {
- if (records.isEmpty()) {
- shutdown();
- return;
- }
assertTrue(records.count() > 0);
for (ConsumerRecord record : records) {
@@ -73,36 +64,24 @@ protected void evaluateRecords(ConsumerRecords rec
logger.error("Failed to process record {} without a key.", record);
return;
}
- MeasurementKey measurementKey;
Schema keySchema = key.getSchema();
if (keySchema.getField("userId") != null
&& keySchema.getField("sourceId") != null) {
- measurementKey = new MeasurementKey(key.get("userId").toString(),
- key.get("sourceId").toString());
- assertNotNull(measurementKey);
+ assertNotNull(key.get("userId"));
+ assertNotNull(key.get("sourceId"));
} else {
logger.error("Failed to process record {} with wrong key type {}.",
record, key.getSchema());
return;
}
GenericRecord value = record.value();
- Schema recordSchema = value.getSchema();
+ GenericData.Array count = (GenericData.Array) value.get("count");
+ logger.info("Received [{}, {}, {}] E4 messages",
+ count.get(0), count.get(1), count.get(2));
- int minFieldId = recordSchema.getField("min").pos();
-
- GenericData.Array min = (GenericData.Array) value.get(minFieldId);
- assertNotNull(min);
- assertEquals(15.0d, (double)min.get(0), 0.0);
- assertEquals(-15.0d, (double)min.get(1), 0.0);
- assertEquals(64.0d, (double)min.get(2), 0.0);
-
- int maxFieldId = recordSchema.getField("max").pos();
-
- GenericData.Array max = (GenericData.Array) value.get(maxFieldId);
- assertNotNull(max);
- assertEquals(15.0d, (double)max.get(0), 0.0);
- assertEquals(Double.MIN_VALUE, (double)max.get(1), 0.0d);
- assertEquals(64.0d, (double)max.get(2), 0.0);
+ if ((Double)count.get(0) > 200) {
+ shutdown();
+ }
}
}
}
diff --git a/src/integrationTest/java/org/radarcns/integration/PhoneStreamTest.java b/src/integrationTest/java/org/radarcns/integration/PhoneStreamTest.java
index cd958d67..aa3d8ab5 100644
--- a/src/integrationTest/java/org/radarcns/integration/PhoneStreamTest.java
+++ b/src/integrationTest/java/org/radarcns/integration/PhoneStreamTest.java
@@ -16,25 +16,6 @@
package org.radarcns.integration;
-import static org.apache.kafka.clients.consumer.ConsumerConfig.AUTO_COMMIT_INTERVAL_MS_CONFIG;
-import static org.apache.kafka.clients.consumer.ConsumerConfig.HEARTBEAT_INTERVAL_MS_CONFIG;
-import static org.apache.kafka.clients.consumer.ConsumerConfig.SESSION_TIMEOUT_MS_CONFIG;
-import static org.apache.kafka.clients.producer.ProducerConfig.BOOTSTRAP_SERVERS_CONFIG;
-import static org.apache.kafka.clients.producer.ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG;
-import static org.apache.kafka.clients.producer.ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertNotNull;
-import static org.junit.Assert.assertNull;
-import static org.junit.Assert.assertTrue;
-import static org.radarcns.util.serde.AbstractKafkaAvroSerde.SCHEMA_REGISTRY_CONFIG;
-
-import java.io.IOException;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.Map;
-import java.util.Properties;
-import java.util.UUID;
-
import org.apache.avro.generic.GenericRecord;
import org.apache.avro.specific.SpecificRecord;
import org.apache.commons.cli.ParseException;
@@ -49,13 +30,17 @@
import org.radarcns.config.ConfigRadar;
import org.radarcns.config.RadarBackendOptions;
import org.radarcns.config.RadarPropertyHandler;
-import org.radarcns.key.MeasurementKey;
+import org.radarcns.config.YamlConfigLoader;
+import org.radarcns.kafka.ObservationKey;
+import org.radarcns.mock.MockProducer;
+import org.radarcns.mock.config.BasicMockConfig;
import org.radarcns.monitor.AbstractKafkaMonitor;
import org.radarcns.monitor.KafkaMonitor;
-import org.radarcns.phone.PhoneUsageEvent;
-import org.radarcns.phone.UsageEventType;
+import org.radarcns.passive.phone.PhoneUsageEvent;
+import org.radarcns.passive.phone.UsageEventType;
import org.radarcns.producer.KafkaTopicSender;
import org.radarcns.producer.direct.DirectSender;
+import org.radarcns.stream.empatica.E4StreamMaster;
import org.radarcns.stream.phone.PhoneStreamMaster;
import org.radarcns.topic.AvroTopic;
import org.radarcns.util.RadarSingletonFactory;
@@ -63,6 +48,27 @@
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
+import java.io.File;
+import java.io.IOException;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Properties;
+
+import static org.apache.kafka.clients.consumer.ConsumerConfig.AUTO_COMMIT_INTERVAL_MS_CONFIG;
+import static org.apache.kafka.clients.consumer.ConsumerConfig.HEARTBEAT_INTERVAL_MS_CONFIG;
+import static org.apache.kafka.clients.consumer.ConsumerConfig.SESSION_TIMEOUT_MS_CONFIG;
+import static org.apache.kafka.clients.producer.ProducerConfig.BOOTSTRAP_SERVERS_CONFIG;
+import static org.apache.kafka.clients.producer.ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG;
+import static org.apache.kafka.clients.producer.ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertNull;
+import static org.junit.Assert.assertTrue;
+import static org.radarcns.util.serde.AbstractKafkaAvroSerde.SCHEMA_REGISTRY_CONFIG;
+
public class PhoneStreamTest {
private static final Logger logger = LoggerFactory.getLogger(PhoneStreamTest.class);
private static final Map CATEGORIES = new HashMap<>();
@@ -99,15 +105,6 @@ public void setUp() throws IOException, ParseException, InterruptedException {
String[] args = {"-c", propertiesPath, "stream"};
RadarBackendOptions opts = RadarBackendOptions.parse(args);
- propHandler.getRadarProperties().setStreamMasters(
- Collections.singletonList(PhoneStreamMaster.class.getName()));
-
- Map streamProps = new HashMap<>();
- streamProps.put(AUTO_COMMIT_INTERVAL_MS_CONFIG, String.valueOf(1_000));
- streamProps.put(SESSION_TIMEOUT_MS_CONFIG, String.valueOf(5_000));
- streamProps.put(HEARTBEAT_INTERVAL_MS_CONFIG, String.valueOf(1_000));
-
- propHandler.getRadarProperties().setStreamProperties(streamProps);
backend = new RadarBackend(opts, propHandler);
backend.start();
}
@@ -127,32 +124,59 @@ public void testDirect() throws Exception {
properties.put(SCHEMA_REGISTRY_CONFIG, config.getSchemaRegistry().get(0));
properties.put(BOOTSTRAP_SERVERS_CONFIG, config.getBrokerPaths());
- DirectSender sender = new DirectSender<>(properties);
- AvroTopic topic = new AvroTopic<>(
- "android_phone_usage_event",
- MeasurementKey.getClassSchema(), PhoneUsageEvent.getClassSchema(),
- MeasurementKey.class, PhoneUsageEvent.class);
+ DirectSender sender = new DirectSender<>(properties);
long offset = 0;
double time = System.currentTimeMillis() / 1000d - 10d;
- MeasurementKey key = new MeasurementKey("a", "c");
- try (KafkaTopicSender topicSender = sender.sender(topic)) {
- topicSender.send(offset++, key, new PhoneUsageEvent(time, time++, "com.whatsapp", null, null, UsageEventType.FOREGROUND));
- topicSender.send(offset++, key, new PhoneUsageEvent(time, time++, "com.whatsapp", null, null, UsageEventType.BACKGROUND));
- topicSender.send(offset++, key, new PhoneUsageEvent(time, time++, "nl.thehyve.transmartclient", null, null, UsageEventType.FOREGROUND));
- topicSender.send(offset++, key, new PhoneUsageEvent(time, time++, "nl.thehyve.transmartclient", null, null, UsageEventType.BACKGROUND));
- topicSender.send(offset++, key, new PhoneUsageEvent(time, time++, "com.strava", null, null, UsageEventType.FOREGROUND));
- topicSender.send(offset++, key, new PhoneUsageEvent(time, time++, "com.strava", null, null, UsageEventType.BACKGROUND));
- topicSender.send(offset++, key, new PhoneUsageEvent(time, time++, "com.android.systemui", null, null, UsageEventType.FOREGROUND));
- topicSender.send(offset++, key, new PhoneUsageEvent(time, time, "com.android.systemui", null, null, UsageEventType.BACKGROUND));
+ ObservationKey key = new ObservationKey("test", "a", "c");
+
+ List events = Arrays.asList(
+ new PhoneUsageEvent(time, time++, "com.whatsapp", null, null, UsageEventType.FOREGROUND),
+ new PhoneUsageEvent(time, time++, "com.whatsapp", null, null, UsageEventType.BACKGROUND),
+ new PhoneUsageEvent(time, time++, "nl.thehyve.transmartclient", null, null, UsageEventType.FOREGROUND),
+ new PhoneUsageEvent(time, time++, "nl.thehyve.transmartclient", null, null, UsageEventType.BACKGROUND),
+ new PhoneUsageEvent(time, time++, "com.strava", null, null, UsageEventType.FOREGROUND),
+ new PhoneUsageEvent(time, time++, "com.strava", null, null, UsageEventType.BACKGROUND),
+ new PhoneUsageEvent(time, time++, "com.android.systemui", null, null, UsageEventType.FOREGROUND),
+ new PhoneUsageEvent(time, time, "com.android.systemui", null, null, UsageEventType.BACKGROUND));
+
+ AvroTopic topic = new AvroTopic<>(
+ "android_phone_usage_event",
+ ObservationKey.getClassSchema(), PhoneUsageEvent.getClassSchema(),
+ ObservationKey.class, PhoneUsageEvent.class);
+
+ try (KafkaTopicSender topicSender = sender.sender(topic)) {
+ for (PhoneUsageEvent event : events) {
+ topicSender.send(offset++, key, event);
+ }
}
+
sender.close();
+
+ File file = new File(getClass().getResource("/mock_devices.yml").getFile());
+ BasicMockConfig mockConfig = new YamlConfigLoader().load(file, BasicMockConfig.class);
+
+ MockProducer mockProducer = new MockProducer(mockConfig);
+ mockProducer.start();
+ Thread.sleep(mockConfig.getDuration());
+ mockProducer.shutdown();
+
consumePhone(offset);
consumeAggregated(offset / 2);
+ consumeE4();
+ }
+
+ private void consumeE4() throws IOException {
+ String clientId = "consumeE4";
+ E4AggregatedAccelerationMonitor monitor = new E4AggregatedAccelerationMonitor(
+ RadarSingletonFactory.getRadarPropertyHandler(),
+ "android_empatica_e4_acceleration_10sec", clientId);
+ monitor.setPollTimeout(280_000L);
+ monitor.start();
}
private void consumePhone(final long numRecordsExpected) throws IOException, InterruptedException {
- String clientId = "someclinet";
+ String clientId = "consumePhone";
KafkaMonitor monitor = new PhoneOutputMonitor(RadarSingletonFactory.getRadarPropertyHandler(), clientId, numRecordsExpected);
monitor.setPollTimeout(280_000L);
@@ -160,7 +184,7 @@ private void consumePhone(final long numRecordsExpected) throws IOException, Int
}
private void consumeAggregated(final long numRecordsExpected) throws IOException, InterruptedException {
- String clientId = "someclient";
+ String clientId = "consumeAggregated";
KafkaMonitor monitor = new PhoneAggregateMonitor(RadarSingletonFactory.getRadarPropertyHandler(), clientId, numRecordsExpected);
monitor.setPollTimeout(280_000L);
@@ -173,7 +197,7 @@ private static class PhoneOutputMonitor extends AbstractKafkaMonitor streamProperties = new HashMap<>();
+ @JsonProperty("build_version")
+ private String buildVersion;
+
public Date getReleased() {
return released;
}
@@ -243,4 +246,12 @@ public void setExtras(Map extras) {
public String toString() {
return new YamlConfigLoader().prettyString(this);
}
+
+ public String getBuildVersion() {
+ return buildVersion;
+ }
+
+ public void setBuildVersion(String buildVersion) {
+ this.buildVersion = buildVersion;
+ }
}
diff --git a/src/main/java/org/radarcns/config/KafkaProperty.java b/src/main/java/org/radarcns/config/KafkaProperty.java
index 21c378d9..ceacd323 100644
--- a/src/main/java/org/radarcns/config/KafkaProperty.java
+++ b/src/main/java/org/radarcns/config/KafkaProperty.java
@@ -19,17 +19,18 @@
import io.confluent.kafka.serializers.AbstractKafkaAvroSerDeConfig;
import java.util.Properties;
import javax.annotation.Nonnull;
+
+import io.confluent.kafka.streams.serdes.avro.SpecificAvroSerde;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.streams.StreamsConfig;
import org.apache.kafka.streams.processor.TimestampExtractor;
-import org.radarcns.util.serde.SpecificAvroSerde;
// TODO this class should substitute org.radarcns.util.RadarConfig
public class KafkaProperty {
private final ConfigRadar configRadar;
- protected KafkaProperty(ConfigRadar configRadar) {
+ public KafkaProperty(ConfigRadar configRadar) {
this.configRadar = configRadar;
}
@@ -38,15 +39,15 @@ protected KafkaProperty(ConfigRadar configRadar) {
* @param numThread number of threads to execute stream processing
* @return Properties for a Kafka Stream
*/
- public Properties getStream(@Nonnull String clientId, @Nonnull int numThread) {
+ public Properties getStreamProperties(@Nonnull String clientId, int numThread) {
Properties props = new Properties();
props.put(StreamsConfig.APPLICATION_ID_CONFIG, clientId);
props.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, configRadar.getBrokerPaths());
props.put(AbstractKafkaAvroSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG,
configRadar.getSchemaRegistryPaths());
- props.put(StreamsConfig.KEY_SERDE_CLASS_CONFIG, SpecificAvroSerde.class);
- props.put(StreamsConfig.VALUE_SERDE_CLASS_CONFIG, SpecificAvroSerde.class);
+ props.put(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, SpecificAvroSerde.class);
+ props.put(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG, SpecificAvroSerde.class);
props.put(StreamsConfig.NUM_STREAM_THREADS_CONFIG, numThread);
props.putAll(configRadar.getStreamProperties());
props.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
@@ -60,11 +61,12 @@ public Properties getStream(@Nonnull String clientId, @Nonnull int numThread) {
* @param timestampExtractor custom timestamp extract that overrides the out-of-the-box
* @return Properties for a Kafka Stream
*/
- public Properties getStream(@Nonnull String clientId, @Nonnull int numThread,
+ public Properties getStreamProperties(@Nonnull String clientId, int numThread,
@Nonnull Class extends TimestampExtractor> timestampExtractor) {
- Properties props = getStream(clientId, numThread);
+ Properties props = getStreamProperties(clientId, numThread);
- props.put(StreamsConfig.TIMESTAMP_EXTRACTOR_CLASS_CONFIG, timestampExtractor.getName());
+ props.put(StreamsConfig.DEFAULT_TIMESTAMP_EXTRACTOR_CLASS_CONFIG,
+ timestampExtractor.getName());
return props;
}
diff --git a/src/main/java/org/radarcns/config/RadarPropertyHandlerImpl.java b/src/main/java/org/radarcns/config/RadarPropertyHandlerImpl.java
index 630b51f0..d6ad364a 100644
--- a/src/main/java/org/radarcns/config/RadarPropertyHandlerImpl.java
+++ b/src/main/java/org/radarcns/config/RadarPropertyHandlerImpl.java
@@ -23,8 +23,10 @@
import java.io.File;
import java.io.IOException;
+import java.io.InputStream;
import java.net.URISyntaxException;
import java.net.URL;
+import java.util.Properties;
import static org.radarcns.util.Strings.isNullOrEmpty;
@@ -76,6 +78,17 @@ public void load(String pathFile) throws IOException {
}
properties = new YamlConfigLoader().load(file, ConfigRadar.class);
+
+ Properties buildProperties = new Properties();
+ try (InputStream in = getClass().getResourceAsStream("/build.properties")) {
+ if (in != null) {
+ buildProperties.load(in);
+ }
+ }
+ String version = buildProperties.getProperty("version");
+ if (version != null) {
+ properties.setBuildVersion(version);
+ }
}
private File getDefaultFile() throws IOException {
diff --git a/src/main/java/org/radarcns/monitor/AbstractKafkaMonitor.java b/src/main/java/org/radarcns/monitor/AbstractKafkaMonitor.java
index 798263e6..420c62f1 100644
--- a/src/main/java/org/radarcns/monitor/AbstractKafkaMonitor.java
+++ b/src/main/java/org/radarcns/monitor/AbstractKafkaMonitor.java
@@ -32,7 +32,7 @@
import org.apache.kafka.common.errors.WakeupException;
import org.radarcns.config.ConfigRadar;
import org.radarcns.config.RadarPropertyHandler;
-import org.radarcns.key.MeasurementKey;
+import org.radarcns.kafka.ObservationKey;
import org.radarcns.util.PersistentStateStore;
import org.radarcns.util.RollingTimeAverage;
import org.slf4j.Logger;
@@ -165,8 +165,11 @@ public void start() {
evaluateRecords(records);
} catch (SerializationException ex) {
handleSerializationException();
- } catch (InterruptException | WakeupException ex) {
+ } catch (WakeupException ex) {
logger.info("Consumer woke up");
+ } catch (InterruptException ex) {
+ logger.info("Consumer was interrupted");
+ shutdown();
} catch (KafkaException ex) {
logger.error("Kafka consumer gave exception", ex);
}
@@ -236,6 +239,7 @@ public synchronized boolean isShutdown() {
@Override
public synchronized void shutdown() {
+ logger.info("Shutting down monitor {}", getClass().getSimpleName());
this.done = true;
this.consumer.wakeup();
}
@@ -248,12 +252,17 @@ public void setPollTimeout(long pollTimeout) {
this.pollTimeout.set(pollTimeout);
}
- protected MeasurementKey extractKey(ConsumerRecord record) {
+ protected ObservationKey extractKey(ConsumerRecord record) {
GenericRecord key = record.key();
if (key == null) {
throw new IllegalArgumentException("Failed to process record without a key.");
}
Schema keySchema = key.getSchema();
+ Field projectIdField = keySchema.getField("projectId");
+ if (projectIdField == null) {
+ throw new IllegalArgumentException("Failed to process record with key type "
+ + key.getSchema() + " without project ID.");
+ }
Field userIdField = keySchema.getField("userId");
if (userIdField == null) {
throw new IllegalArgumentException("Failed to process record with key type "
@@ -264,7 +273,8 @@ protected MeasurementKey extractKey(ConsumerRecord record) {
throw new IllegalArgumentException("Failed to process record with key type "
+ key.getSchema() + " without source ID.");
}
- return new MeasurementKey(
+ return new ObservationKey(
+ key.get(projectIdField.pos()).toString(),
key.get(userIdField.pos()).toString(),
key.get(sourceIdField.pos()).toString());
}
diff --git a/src/main/java/org/radarcns/monitor/BatteryLevelMonitor.java b/src/main/java/org/radarcns/monitor/BatteryLevelMonitor.java
index f8474dd4..a17549fd 100644
--- a/src/main/java/org/radarcns/monitor/BatteryLevelMonitor.java
+++ b/src/main/java/org/radarcns/monitor/BatteryLevelMonitor.java
@@ -30,7 +30,7 @@
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.radarcns.config.RadarPropertyHandler;
-import org.radarcns.key.MeasurementKey;
+import org.radarcns.kafka.ObservationKey;
import org.radarcns.monitor.BatteryLevelMonitor.BatteryLevelState;
import org.radarcns.util.EmailSender;
import org.radarcns.util.RadarSingletonFactory;
@@ -74,7 +74,7 @@ public BatteryLevelMonitor(RadarPropertyHandler radar, Collection topics
@Override
protected void evaluateRecord(ConsumerRecord record) {
try {
- MeasurementKey key = extractKey(record);
+ ObservationKey key = extractKey(record);
float batteryLevel = extractBatteryLevel(record);
float previousLevel = state.updateLevel(key, batteryLevel);
@@ -109,7 +109,7 @@ protected void evaluateRecord(ConsumerRecord recor
}
}
- private void updateStatus(MeasurementKey key, Status status) {
+ private void updateStatus(ObservationKey key, Status status) {
if (sender == null) {
return;
}
@@ -186,7 +186,7 @@ public void setLevels(Map levels) {
}
/** Update a single battery level. */
- public float updateLevel(MeasurementKey key, float level) {
+ public float updateLevel(ObservationKey key, float level) {
Float previousLevel = levels.put(measurementKeyToString(key), level);
return previousLevel == null ? 1.0f : previousLevel;
}
diff --git a/src/main/java/org/radarcns/monitor/DisconnectMonitor.java b/src/main/java/org/radarcns/monitor/DisconnectMonitor.java
index a3544a9e..e58d9e5a 100644
--- a/src/main/java/org/radarcns/monitor/DisconnectMonitor.java
+++ b/src/main/java/org/radarcns/monitor/DisconnectMonitor.java
@@ -25,7 +25,7 @@
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.radarcns.config.DisconnectMonitorConfig;
import org.radarcns.config.RadarPropertyHandler;
-import org.radarcns.key.MeasurementKey;
+import org.radarcns.kafka.ObservationKey;
import org.radarcns.monitor.DisconnectMonitor.DisconnectMonitorState;
import org.radarcns.util.EmailSender;
import org.radarcns.util.Monitor;
@@ -141,7 +141,7 @@ protected void evaluateRecords(ConsumerRecords rec
@Override
protected void evaluateRecord(ConsumerRecord record) {
- MeasurementKey key = extractKey(record);
+ ObservationKey key = extractKey(record);
this.monitor.increment();
@@ -172,7 +172,7 @@ private void scheduleRepetition(final String key, final MissingRecordsReport rep
}
private void reportMissing(String keyString, MissingRecordsReport report) {
- MeasurementKey key = stringToKey(keyString);
+ ObservationKey key = stringToKey(keyString);
long timeout = report.getTimeout();
logger.info("Device {} timeout {} (message {} of {}). Reporting it missing.", key,
timeout, report.messageNumber, numRepetitions);
@@ -205,7 +205,7 @@ private void reportMissing(String keyString, MissingRecordsReport report) {
}
}
- private void reportRecovered(MeasurementKey key, long reportedMissingTime) {
+ private void reportRecovered(ObservationKey key, long reportedMissingTime) {
logger.info("Device {} seen again. Reporting it recovered.", key);
try {
Date reportedMissingDate = new Date(reportedMissingTime);
diff --git a/src/main/java/org/radarcns/stream/CombinedStreamMaster.java b/src/main/java/org/radarcns/stream/CombinedStreamMaster.java
index 5fde2586..55ca9685 100644
--- a/src/main/java/org/radarcns/stream/CombinedStreamMaster.java
+++ b/src/main/java/org/radarcns/stream/CombinedStreamMaster.java
@@ -72,7 +72,7 @@ public List getTopicNames() {
}
@Override
- public StreamDefinition getStreamDefinition(String inputTopic) {
+ public Collection getStreamDefinition(String inputTopic) {
for (StreamMaster master : streamMasters) {
if (master.getStreamGroup().getTopicNames().contains(inputTopic)) {
return master.getStreamGroup().getStreamDefinition(inputTopic);
diff --git a/src/main/java/org/radarcns/stream/GeneralStreamGroup.java b/src/main/java/org/radarcns/stream/GeneralStreamGroup.java
index dd95e111..744e64ff 100644
--- a/src/main/java/org/radarcns/stream/GeneralStreamGroup.java
+++ b/src/main/java/org/radarcns/stream/GeneralStreamGroup.java
@@ -16,13 +16,18 @@
package org.radarcns.stream;
+import org.radarcns.topic.KafkaTopic;
+
import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.Collections;
import java.util.HashMap;
-import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
-import org.radarcns.topic.KafkaTopic;
+import java.util.TreeSet;
+import java.util.stream.Collectors;
/**
* Implementation of a {@link StreamGroup}. Override to create specific streams for a given
@@ -35,12 +40,23 @@
public class GeneralStreamGroup implements StreamGroup {
public static final String OUTPUT_LABEL = "_output";
- private final Map topicMap;
+ private final Map> topicMap;
private final Set topicNames;
public GeneralStreamGroup() {
topicMap = new HashMap<>();
- topicNames = new HashSet<>();
+ topicNames = new TreeSet<>();
+ }
+
+ /**
+ * Create a stream from input to output topic. By using this method, {@link #getTopicNames()}
+ * and {@link #getStreamDefinition(String)} automatically get updated.
+ * @param input input topic name
+ * @param output output topic name
+ * @return stream definition.
+ */
+ protected Collection createStream(String input, String output) {
+ return createStream(input, output, 0L);
}
/**
@@ -48,10 +64,12 @@ public GeneralStreamGroup() {
* and {@link #getStreamDefinition(String)} automatically get updated.
* @param input input topic name
* @param output output topic name
+ * @param window time windows size in milliseconds, 0 if none.
* @return stream definition.
*/
- protected StreamDefinition createStream(String input, String output) {
- StreamDefinition ret = new StreamDefinition(new KafkaTopic(input), new KafkaTopic(output));
+ protected Collection createStream(String input, String output, long window) {
+ Collection ret = Collections.singleton(
+ new StreamDefinition(new KafkaTopic(input), new KafkaTopic(output), window));
topicMap.put(input, ret);
topicNames.add(input);
topicNames.add(output);
@@ -64,13 +82,45 @@ protected StreamDefinition createStream(String input, String output) {
* @param input input topic name
* @return sensor stream definition
*/
- protected StreamDefinition createSensorStream(String input) {
- return createStream(input, input + OUTPUT_LABEL);
+ protected Collection createSensorStream(String input) {
+ return createStream(input, input + OUTPUT_LABEL, 0L);
+ }
+
+ protected Collection createAggregateStream(String input, long window) {
+ return createStream(input, input + OUTPUT_LABEL, window);
+ }
+
+ protected Collection createWindowedSensorStream(String input) {
+ return createWindowedSensorStream(input, input);
+ }
+
+ protected Collection createWindowedSensorStream(String input,
+ String outputBase) {
+
+ topicNames.add(input);
+ Collection streams = new TreeSet<>(
+ Arrays.stream(TimeWindowMetadata.values())
+ .map(w -> new StreamDefinition(new KafkaTopic(input),
+ new KafkaTopic(w.getTopicLabel(outputBase)),
+ w.getIntervalInMilliSec()))
+ .collect(Collectors.toList()));
+
+ topicNames.addAll(streams.stream()
+ .map(t -> t.getOutputTopic().getName())
+ .collect(Collectors.toList()));
+
+ topicMap.merge(input, streams, (v1, v2) -> {
+ Set newSet = new TreeSet<>(v1);
+ newSet.addAll(v2);
+ return newSet;
+ });
+
+ return streams;
}
@Override
- public StreamDefinition getStreamDefinition(String inputTopic) {
- StreamDefinition topic = topicMap.get(inputTopic);
+ public Collection getStreamDefinition(String inputTopic) {
+ Collection topic = topicMap.get(inputTopic);
if (topic == null) {
throw new IllegalArgumentException("Topic " + inputTopic + " unknown");
}
diff --git a/src/main/java/org/radarcns/stream/StreamDefinition.java b/src/main/java/org/radarcns/stream/StreamDefinition.java
index edc4227d..9b38c249 100644
--- a/src/main/java/org/radarcns/stream/StreamDefinition.java
+++ b/src/main/java/org/radarcns/stream/StreamDefinition.java
@@ -16,35 +16,64 @@
package org.radarcns.stream;
+import org.apache.kafka.streams.kstream.TimeWindows;
import org.radarcns.topic.KafkaTopic;
-public class StreamDefinition {
+import javax.annotation.Nonnull;
+import javax.annotation.Nullable;
+import java.util.Objects;
- public static final String FROM_LABEL = "From-";
- public static final String TO_LABEL = "-To-";
+import static org.radarcns.util.Comparison.compare;
+public class StreamDefinition implements Comparable {
private final KafkaTopic inputTopic;
private final KafkaTopic outputTopic;
+ private final TimeWindows window;
+
+ /**
+ * Constructor. It takes in input the topic name to be consumed and to topic name where the
+ * related stream will write the computed values. Default 0 window is used.
+ * @param input source {@link KafkaTopic}
+ * @param output output {@link KafkaTopic}
+ */
+ public StreamDefinition(@Nonnull KafkaTopic input, @Nonnull KafkaTopic output) {
+ this(input, output, 0L);
+ }
/**
* Constructor. It takes in input the topic name to be consumed and to topic name where the
* related stream will write the computed values.
- *
* @param input source {@link KafkaTopic}
* @param output output {@link KafkaTopic}
+ * @param window time window for aggregation.
*/
- public StreamDefinition(KafkaTopic input, KafkaTopic output) {
- if (input == null || output == null) {
- throw new IllegalArgumentException("Input and output topic may not be null");
- }
+ public StreamDefinition(@Nonnull KafkaTopic input, @Nonnull KafkaTopic output, long window) {
+ this(input, output, window == 0 ? null : TimeWindows.of(window));
+ }
+
+ /**
+ * Constructor. It takes in input the topic name to be consumed and to topic name where the
+ * related stream will write the computed values.
+ * @param input source {@link KafkaTopic}
+ * @param output output {@link KafkaTopic}
+ * @param window time window for aggregation.
+ */
+ public StreamDefinition(@Nonnull KafkaTopic input, @Nonnull KafkaTopic output,
+ @Nullable TimeWindows window) {
+ Objects.requireNonNull(input);
+ Objects.requireNonNull(output);
+
this.inputTopic = input;
this.outputTopic = output;
+ this.window = window;
}
+ @Nonnull
public KafkaTopic getInputTopic() {
return inputTopic;
}
+ @Nonnull
public KafkaTopic getOutputTopic() {
return outputTopic;
}
@@ -56,7 +85,43 @@ public KafkaTopic getOutputTopic() {
*
* @return {@code String} representing the changelog topic name
*/
+ @Nonnull
public String getStateStoreName() {
- return FROM_LABEL + getInputTopic().getName() + TO_LABEL + getOutputTopic().getName();
+ return "From-" + getInputTopic().getName() + "-To-" + getOutputTopic().getName();
+ }
+
+ @Nullable
+ public TimeWindows getTimeWindows() {
+ return window;
+ }
+
+
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) {
+ return true;
+ }
+ if (o == null || getClass() != o.getClass()) {
+ return false;
+ }
+ StreamDefinition that = (StreamDefinition) o;
+ return Objects.equals(inputTopic, that.inputTopic)
+ && Objects.equals(outputTopic, that.outputTopic)
+ && Objects.equals(window, that.window);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(inputTopic, outputTopic, window);
+ }
+
+ @Override
+ public int compareTo(@Nonnull StreamDefinition o) {
+ return compare((StreamDefinition d) -> d.getInputTopic().getName())
+ .then(d -> d.getOutputTopic().getName())
+ .then(d -> d.getTimeWindows() == null ? 0 : d.getTimeWindows().sizeMs)
+ .then(d -> d.getTimeWindows() == null ? 0 : d.getTimeWindows().advanceMs)
+ .apply(this, o);
}
}
diff --git a/src/main/java/org/radarcns/stream/StreamGroup.java b/src/main/java/org/radarcns/stream/StreamGroup.java
index 70385f7c..7bf65575 100644
--- a/src/main/java/org/radarcns/stream/StreamGroup.java
+++ b/src/main/java/org/radarcns/stream/StreamGroup.java
@@ -16,6 +16,7 @@
package org.radarcns.stream;
+import java.util.Collection;
import java.util.List;
/**
@@ -35,5 +36,5 @@ public interface StreamGroup {
* @return stream definition of given input topic
* @throws IllegalArgumentException if the input topic is not found.
*/
- StreamDefinition getStreamDefinition(String inputTopic);
+ Collection getStreamDefinition(String inputTopic);
}
diff --git a/src/main/java/org/radarcns/stream/StreamMaster.java b/src/main/java/org/radarcns/stream/StreamMaster.java
index 41227519..9ab02233 100644
--- a/src/main/java/org/radarcns/stream/StreamMaster.java
+++ b/src/main/java/org/radarcns/stream/StreamMaster.java
@@ -28,6 +28,7 @@
import java.util.concurrent.Executors;
import java.util.concurrent.RejectedExecutionException;
import java.util.concurrent.ScheduledExecutorService;
+import java.util.concurrent.ScheduledFuture;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
@@ -110,7 +111,7 @@ public void start() {
* Signal all workers to shut down. This does not wait for the workers to shut down.
*/
@Override
- public void shutdown() {
+ public void shutdown() throws InterruptedException {
log.info("Shutting down all streams for {}", nameSensor);
while (!streamWorkers.isEmpty()) {
@@ -119,6 +120,7 @@ public void shutdown() {
}
executor.shutdown();
+ executor.awaitTermination(30, TimeUnit.SECONDS);
}
/**
@@ -158,7 +160,11 @@ public void notifyCrashedStream(@Nonnull String stream) {
log.info("Forcing shutdown of {}", nameSensor);
- //TODO implement forcing shutdown
+ try {
+ shutdown();
+ } catch (InterruptedException ex) {
+ log.warn("Shutdown interrupted");
+ }
}
public void restartStream(final StreamWorker, ?> worker) {
@@ -183,8 +189,8 @@ protected void announceTopics() {
protected abstract StreamGroup getStreamGroup();
/** Add a monitor to the master. It will run every 30 seconds. */
- void addMonitor(Monitor monitor) {
- executor.scheduleAtFixedRate(monitor, 0, 30, TimeUnit.SECONDS);
+ ScheduledFuture> addMonitor(Monitor monitor) {
+ return executor.scheduleAtFixedRate(monitor, 0, 30, TimeUnit.SECONDS);
}
protected synchronized int lowPriority() {
diff --git a/src/main/java/org/radarcns/stream/StreamWorker.java b/src/main/java/org/radarcns/stream/StreamWorker.java
index da387bdf..22c4b389 100644
--- a/src/main/java/org/radarcns/stream/StreamWorker.java
+++ b/src/main/java/org/radarcns/stream/StreamWorker.java
@@ -16,19 +16,41 @@
package org.radarcns.stream;
-import java.io.IOException;
-import javax.annotation.Nonnull;
import org.apache.avro.specific.SpecificRecord;
import org.apache.kafka.streams.KafkaStreams;
import org.apache.kafka.streams.KeyValue;
import org.apache.kafka.streams.errors.StreamsException;
import org.apache.kafka.streams.kstream.KStream;
import org.apache.kafka.streams.kstream.KStreamBuilder;
+import org.apache.kafka.streams.kstream.TimeWindows;
import org.radarcns.config.KafkaProperty;
+import org.radarcns.config.RadarPropertyHandler;
+import org.radarcns.kafka.AggregateKey;
+import org.radarcns.kafka.ObservationKey;
+import org.radarcns.stream.aggregator.DoubleAggregation;
+import org.radarcns.stream.aggregator.DoubleArrayAggregation;
+import org.radarcns.stream.collector.DoubleArrayCollector;
+import org.radarcns.stream.collector.DoubleValueCollector;
import org.radarcns.util.Monitor;
+import org.radarcns.util.RadarSingletonFactory;
+import org.radarcns.util.RadarUtilities;
+import org.radarcns.util.serde.RadarSerdes;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
+import javax.annotation.Nonnull;
+import java.util.Collection;
+import java.util.List;
+import java.util.Properties;
+import java.util.concurrent.ScheduledFuture;
+import java.util.function.Function;
+import java.util.stream.Collectors;
+
+import static org.apache.kafka.streams.KeyValue.pair;
+import static org.radarcns.util.Serialization.floatToDouble;
+import static org.radarcns.util.StreamUtil.first;
+import static org.radarcns.util.StreamUtil.second;
+
/**
* Abstraction of a Kafka Stream.
*/
@@ -36,63 +58,84 @@ public abstract class StreamWorker streamDefinitions;
+ private final String buildVersion;
+ private Collection> monitors;
private final KafkaProperty kafkaProperty;
- private KafkaStreams streams;
+ protected final RadarUtilities utilities = RadarSingletonFactory.getRadarUtilities();
+
+ private List streams;
- public StreamWorker(@Nonnull StreamDefinition streamDefinition, @Nonnull String clientId,
- int numThreads, @Nonnull StreamMaster aggregator, KafkaProperty kafkaProperty,
+ public StreamWorker(@Nonnull Collection streamDefinitions,
+ int numThreads, @Nonnull StreamMaster aggregator, RadarPropertyHandler properties,
Logger monitorLog) {
if (numThreads < 1) {
throw new IllegalStateException(
"The number of concurrent threads must be at least 1");
}
- this.clientId = clientId;
this.master = aggregator;
- this.streamDefinition = streamDefinition;
+ this.streamDefinitions = streamDefinitions;
this.numThreads = numThreads;
- this.kafkaProperty = kafkaProperty;
+ this.buildVersion = properties.getRadarProperties().getBuildVersion();
+ this.kafkaProperty = properties.getKafkaProperties();
this.streams = null;
-
- if (monitorLog == null) {
- this.monitor = null;
- } else {
- this.monitor = new Monitor(monitorLog, "records have been read from "
- + streamDefinition.getInputTopic());
- }
+ this.monitors = null;
+ this.monitorLog = monitorLog;
}
/**
* Create a Kafka Stream builder. This implementation will create a stream from given
* input topic to given output topic. It monitors the amount of messages that are read.
*/
- protected KStreamBuilder createBuilder() throws IOException {
- KStreamBuilder builder = new KStreamBuilder();
+ protected KeyValue, KafkaStreams> createBuilder(StreamDefinition def) {
+ Monitor monitor;
+ ScheduledFuture> future = null;
+ if (monitorLog != null) {
+ monitor = new Monitor(monitorLog, "records have been read from "
+ + def.getInputTopic() + " to " + def.getOutputTopic());
+ future = master.addMonitor(monitor);
+ } else {
+ monitor = null;
+ }
- StreamDefinition definition = getStreamDefinition();
- String inputTopic = definition.getInputTopic().getName();
- String outputTopic = definition.getOutputTopic().getName();
+ KStreamBuilder builder = new KStreamBuilder();
- KStream inputStream = builder.stream(inputTopic)
- .map((k, v) -> {
- incrementMonitor();
- return new KeyValue<>(k, v);
- });
+ implementStream(def,
+ builder.stream(def.getInputTopic().getName())
+ .map((k, v) -> {
+ if (monitor != null) {
+ monitor.increment();
+ }
+ return pair(k, v);
+ })
+ ).to(def.getOutputTopic().getName());
+
+ return pair(future, new KafkaStreams(builder, getStreamProperties(def)));
+ }
- defineStream(inputStream).to(outputTopic);
+ /**
+ * @return Properties for a Kafka Stream
+ */
+ protected Properties getStreamProperties(@Nonnull StreamDefinition definition) {
+ String localClientId = getClass().getName() + "-" + buildVersion;
+ TimeWindows window = definition.getTimeWindows();
+ if (window != null) {
+ localClientId += '-' + window.sizeMs + '-' + window.advanceMs;
+ }
- return builder;
+ return kafkaProperty.getStreamProperties(localClientId, numThreads,
+ DeviceTimestampExtractor.class);
}
/**
* Defines the stream computation.
*/
- protected abstract KStream, ?> defineStream(@Nonnull KStream kstream);
+ protected abstract KStream, ?> implementStream(StreamDefinition definition,
+ @Nonnull KStream kstream);
/**
* Starts the stream and notify the StreamMaster.
@@ -102,30 +145,32 @@ public void start() {
throw new IllegalStateException("Streams already started. Cannot start them again.");
}
- log.info("Creating the stream {} from topic {} to topic {}",
- clientId, streamDefinition.getInputTopic(),
- streamDefinition.getOutputTopic());
-
- try {
- if (monitor != null) {
- master.addMonitor(monitor);
- }
- streams = new KafkaStreams(createBuilder(),
- kafkaProperty.getStream(clientId, numThreads, DeviceTimestampExtractor.class));
- streams.setUncaughtExceptionHandler(this);
- streams.start();
-
- master.notifyStartedStream(this);
- } catch (IOException ex) {
- uncaughtException(Thread.currentThread(), ex);
- }
+ List, KafkaStreams>> streamBuilders = getStreamDefinitions()
+ .parallelStream()
+ .map(this::createBuilder)
+ .collect(Collectors.toList());
+
+ monitors = streamBuilders.stream()
+ .map(first())
+ .collect(Collectors.toList());
+
+ streams = streamBuilders.stream()
+ .map(second())
+ .collect(Collectors.toList());
+
+ streams.forEach(stream -> {
+ stream.setUncaughtExceptionHandler(this);
+ stream.start();
+ });
+
+ master.notifyStartedStream(this);
}
/**
* Close the stream and notify the StreamMaster.
*/
public void shutdown() {
- log.info("Shutting down {} stream", clientId);
+ log.info("Shutting down {} stream", getClass().getSimpleName());
closeStreams();
@@ -145,28 +190,62 @@ public void uncaughtException(Thread t, Throwable e) {
if (e instanceof StreamsException) {
master.restartStream(this);
} else {
- master.notifyCrashedStream(clientId);
+ master.notifyCrashedStream(getClass().getSimpleName());
}
}
private void closeStreams() {
if (streams != null) {
- streams.close();
+ streams.forEach(KafkaStreams::close);
streams = null;
}
+
+ if (monitors != null) {
+ monitors.forEach(f -> f.cancel(false));
+ monitors = null;
+ }
+ }
+
+ protected Collection getStreamDefinitions() {
+ return streamDefinitions;
+ }
+
+ protected final KStream aggregateFloat(
+ StreamDefinition definition,
+ @Nonnull KStream kstream, Function field) {
+ return aggregateDouble(definition, kstream, v -> floatToDouble(field.apply(v)));
}
- protected StreamDefinition getStreamDefinition() {
- return streamDefinition;
+ protected final KStream aggregateDouble(
+ StreamDefinition definition,
+ @Nonnull KStream kstream, Function field) {
+ return kstream.groupByKey()
+ .aggregate(
+ DoubleValueCollector::new,
+ (k, v, valueCollector) -> valueCollector.add(field.apply(v)),
+ definition.getTimeWindows(),
+ RadarSerdes.getInstance().getDoubleCollector(),
+ definition.getStateStoreName())
+ .toStream()
+ .map(utilities::collectorToAvro);
}
- /** Increment the number of messages processed. */
- protected void incrementMonitor() {
- monitor.increment();
+ protected final KStream aggregateDoubleArray(
+ StreamDefinition definition,
+ @Nonnull KStream kstream, Function field) {
+ return kstream.groupByKey()
+ .aggregate(
+ DoubleArrayCollector::new,
+ (k, v, valueCollector) -> valueCollector.add(field.apply(v)),
+ definition.getTimeWindows(),
+ RadarSerdes.getInstance().getDoubleArrayCollector(),
+ definition.getStateStoreName())
+ .toStream()
+ .map(utilities::collectorToAvro);
}
@Override
public String toString() {
- return getClass().getSimpleName() + '<' + clientId + '>';
+ return getClass().getSimpleName();
}
}
diff --git a/src/main/java/org/radarcns/stream/empatica/E4AccelerationStream.java b/src/main/java/org/radarcns/stream/empatica/E4AccelerationStream.java
index 343823d0..f0397663 100644
--- a/src/main/java/org/radarcns/stream/empatica/E4AccelerationStream.java
+++ b/src/main/java/org/radarcns/stream/empatica/E4AccelerationStream.java
@@ -16,47 +16,41 @@
package org.radarcns.stream.empatica;
-import javax.annotation.Nonnull;
import org.apache.kafka.streams.kstream.KStream;
-import org.apache.kafka.streams.kstream.TimeWindows;
-import org.radarcns.aggregator.DoubleArrayAggregator;
-import org.radarcns.config.KafkaProperty;
-import org.radarcns.empatica.EmpaticaE4Acceleration;
-import org.radarcns.key.MeasurementKey;
-import org.radarcns.key.WindowedKey;
+import org.radarcns.config.RadarPropertyHandler;
+import org.radarcns.kafka.AggregateKey;
+import org.radarcns.kafka.ObservationKey;
+import org.radarcns.passive.empatica.EmpaticaE4Acceleration;
+import org.radarcns.stream.StreamDefinition;
import org.radarcns.stream.StreamMaster;
import org.radarcns.stream.StreamWorker;
-import org.radarcns.stream.collector.DoubleArrayCollector;
-import org.radarcns.util.RadarSingletonFactory;
-import org.radarcns.util.RadarUtilities;
-import org.radarcns.util.serde.RadarSerdes;
+import org.radarcns.stream.aggregator.DoubleArrayAggregation;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
+import javax.annotation.Nonnull;
+import java.util.Collection;
+
+import static org.radarcns.util.Serialization.floatToDouble;
+
/**
* Definition of Kafka Stream for aggregating data collected by Empatica E4 Accelerometer sensor.
*/
-public class E4AccelerationStream extends StreamWorker {
- private static final Logger log = LoggerFactory.getLogger(E4AccelerationStream.class);
- private final RadarUtilities utilities = RadarSingletonFactory.getRadarUtilities();
+public class E4AccelerationStream extends StreamWorker {
+ private static final Logger logger = LoggerFactory.getLogger(E4AccelerationStream.class);
- public E4AccelerationStream(String clientId, int numThread, StreamMaster master,
- KafkaProperty kafkaProperties) {
- super(E4Streams.getInstance().getAccelerationStream(), clientId,
- numThread, master, kafkaProperties, log);
+ public E4AccelerationStream(Collection definitions, int numThread,
+ StreamMaster master, RadarPropertyHandler properties) {
+ super(definitions, numThread, master, properties, logger);
}
@Override
- protected KStream defineStream(
- @Nonnull KStream kstream) {
- return kstream.groupByKey()
- .aggregate(
- DoubleArrayCollector::new,
- (k, v, valueCollector) -> valueCollector.add(utilities.accelerationToArray(v)),
- TimeWindows.of(10 * 1000L),
- RadarSerdes.getInstance().getDoubleArrayCollector(),
- getStreamDefinition().getStateStoreName())
- .toStream()
- .map(utilities::collectorToAvro);
+ protected KStream implementStream(
+ StreamDefinition definition,
+ @Nonnull KStream kstream) {
+ return aggregateDoubleArray(definition, kstream, v -> new double[] {
+ floatToDouble(v.getX()),
+ floatToDouble(v.getY()),
+ floatToDouble(v.getZ())});
}
}
diff --git a/src/main/java/org/radarcns/stream/empatica/E4BatteryLevelStream.java b/src/main/java/org/radarcns/stream/empatica/E4BatteryLevelStream.java
index 52c8c394..727489b4 100644
--- a/src/main/java/org/radarcns/stream/empatica/E4BatteryLevelStream.java
+++ b/src/main/java/org/radarcns/stream/empatica/E4BatteryLevelStream.java
@@ -16,47 +16,35 @@
package org.radarcns.stream.empatica;
-import javax.annotation.Nonnull;
import org.apache.kafka.streams.kstream.KStream;
-import org.apache.kafka.streams.kstream.TimeWindows;
-import org.radarcns.aggregator.DoubleAggregator;
-import org.radarcns.config.KafkaProperty;
-import org.radarcns.empatica.EmpaticaE4BatteryLevel;
-import org.radarcns.key.MeasurementKey;
-import org.radarcns.key.WindowedKey;
+import org.radarcns.config.RadarPropertyHandler;
+import org.radarcns.kafka.AggregateKey;
+import org.radarcns.kafka.ObservationKey;
+import org.radarcns.passive.empatica.EmpaticaE4BatteryLevel;
+import org.radarcns.stream.StreamDefinition;
import org.radarcns.stream.StreamMaster;
import org.radarcns.stream.StreamWorker;
-import org.radarcns.stream.collector.DoubleValueCollector;
-import org.radarcns.util.RadarSingletonFactory;
-import org.radarcns.util.RadarUtilities;
-import org.radarcns.util.serde.RadarSerdes;
+import org.radarcns.stream.aggregator.DoubleAggregation;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
+import javax.annotation.Nonnull;
+import java.util.Collection;
+
/**
* Kafka Stream for aggregating data about Empatica E4 battery level.
*/
-public class E4BatteryLevelStream extends StreamWorker {
- private static final Logger log = LoggerFactory.getLogger(E4BatteryLevelStream.class);
- private final RadarUtilities utilities = RadarSingletonFactory.getRadarUtilities();
+public class E4BatteryLevelStream extends StreamWorker {
+ private static final Logger logger = LoggerFactory.getLogger(E4BatteryLevelStream.class);
- public E4BatteryLevelStream(String clientId, int numThread, StreamMaster master,
- KafkaProperty kafkaProperties) {
- super(E4Streams.getInstance().getBatteryLevelStream(), clientId,
- numThread, master, kafkaProperties, log);
+ public E4BatteryLevelStream(Collection definitions, int numThread,
+ StreamMaster master, RadarPropertyHandler properties) {
+ super(definitions, numThread, master, properties, logger);
}
@Override
- protected KStream defineStream(
- @Nonnull KStream kstream) {
- return kstream.groupByKey()
- .aggregate(
- DoubleValueCollector::new,
- (k, v, valueCollector) -> valueCollector.add(v.getBatteryLevel()),
- TimeWindows.of(10 * 1000L),
- RadarSerdes.getInstance().getDoubleCollector(),
- getStreamDefinition().getStateStoreName())
- .toStream()
- .map(utilities::collectorToAvro);
+ protected KStream implementStream(StreamDefinition definition,
+ @Nonnull KStream kstream) {
+ return aggregateFloat(definition, kstream, EmpaticaE4BatteryLevel::getBatteryLevel);
}
}
diff --git a/src/main/java/org/radarcns/stream/empatica/E4BloodVolumePulseStream.java b/src/main/java/org/radarcns/stream/empatica/E4BloodVolumePulseStream.java
index 762dbefb..81af643d 100644
--- a/src/main/java/org/radarcns/stream/empatica/E4BloodVolumePulseStream.java
+++ b/src/main/java/org/radarcns/stream/empatica/E4BloodVolumePulseStream.java
@@ -16,49 +16,36 @@
package org.radarcns.stream.empatica;
-import javax.annotation.Nonnull;
import org.apache.kafka.streams.kstream.KStream;
-import org.apache.kafka.streams.kstream.TimeWindows;
-import org.radarcns.aggregator.DoubleAggregator;
-import org.radarcns.config.KafkaProperty;
-import org.radarcns.empatica.EmpaticaE4BloodVolumePulse;
-import org.radarcns.key.MeasurementKey;
-import org.radarcns.key.WindowedKey;
+import org.radarcns.config.RadarPropertyHandler;
+import org.radarcns.kafka.AggregateKey;
+import org.radarcns.kafka.ObservationKey;
+import org.radarcns.passive.empatica.EmpaticaE4BloodVolumePulse;
+import org.radarcns.stream.StreamDefinition;
import org.radarcns.stream.StreamMaster;
import org.radarcns.stream.StreamWorker;
-import org.radarcns.stream.collector.DoubleValueCollector;
-import org.radarcns.util.RadarSingletonFactory;
-import org.radarcns.util.RadarUtilities;
-import org.radarcns.util.serde.RadarSerdes;
+import org.radarcns.stream.aggregator.DoubleAggregation;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
+import javax.annotation.Nonnull;
+import java.util.Collection;
+
/**
* Kafka Stream for aggregating data about Blood Volume Pulse collected by Empatica E4.
*/
public class E4BloodVolumePulseStream extends
- StreamWorker {
- private static final Logger log = LoggerFactory.getLogger(E4BloodVolumePulseStream.class);
-
- private final RadarUtilities utilities = RadarSingletonFactory.getRadarUtilities();
+ StreamWorker {
+ private static final Logger logger = LoggerFactory.getLogger(E4BloodVolumePulseStream.class);
- public E4BloodVolumePulseStream(String clientId, int numThread, StreamMaster master,
- KafkaProperty kafkaProperties) {
- super(E4Streams.getInstance().getBloodVolumePulseStream(),
- clientId, numThread, master, kafkaProperties, log);
+ public E4BloodVolumePulseStream(Collection definitions, int numThread,
+ StreamMaster master, RadarPropertyHandler properties) {
+ super(definitions, numThread, master, properties, logger);
}
@Override
- protected KStream defineStream(
- @Nonnull KStream kstream) {
- return kstream.groupByKey()
- .aggregate(
- DoubleValueCollector::new,
- (k, v, valueCollector) -> valueCollector.add(v.getBloodVolumePulse()),
- TimeWindows.of(10 * 1000L),
- RadarSerdes.getInstance().getDoubleCollector(),
- getStreamDefinition().getStateStoreName())
- .toStream()
- .map(utilities::collectorToAvro);
+ protected KStream implementStream(StreamDefinition definition,
+ @Nonnull KStream kstream) {
+ return aggregateFloat(definition, kstream, EmpaticaE4BloodVolumePulse::getBloodVolumePulse);
}
}
diff --git a/src/main/java/org/radarcns/stream/empatica/E4ElectroDermalActivityStream.java b/src/main/java/org/radarcns/stream/empatica/E4ElectroDermalActivityStream.java
index 1a0d34b9..5aeca80a 100644
--- a/src/main/java/org/radarcns/stream/empatica/E4ElectroDermalActivityStream.java
+++ b/src/main/java/org/radarcns/stream/empatica/E4ElectroDermalActivityStream.java
@@ -16,49 +16,38 @@
package org.radarcns.stream.empatica;
-import javax.annotation.Nonnull;
import org.apache.kafka.streams.kstream.KStream;
-import org.apache.kafka.streams.kstream.TimeWindows;
-import org.radarcns.aggregator.DoubleAggregator;
-import org.radarcns.config.KafkaProperty;
-import org.radarcns.empatica.EmpaticaE4ElectroDermalActivity;
-import org.radarcns.key.MeasurementKey;
-import org.radarcns.key.WindowedKey;
+import org.radarcns.config.RadarPropertyHandler;
+import org.radarcns.kafka.AggregateKey;
+import org.radarcns.kafka.ObservationKey;
+import org.radarcns.passive.empatica.EmpaticaE4ElectroDermalActivity;
+import org.radarcns.stream.StreamDefinition;
import org.radarcns.stream.StreamMaster;
import org.radarcns.stream.StreamWorker;
-import org.radarcns.stream.collector.DoubleValueCollector;
-import org.radarcns.util.RadarSingletonFactory;
-import org.radarcns.util.RadarUtilities;
-import org.radarcns.util.serde.RadarSerdes;
+import org.radarcns.stream.aggregator.DoubleAggregation;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
+import javax.annotation.Nonnull;
+import java.util.Collection;
+
/**
* Kafka Stream for aggregating data about electrodermal activity collected by Empatica E4.
*/
public class E4ElectroDermalActivityStream extends
- StreamWorker {
- private static final Logger log = LoggerFactory.getLogger(E4ElectroDermalActivityStream.class);
-
- private final RadarUtilities utilities = RadarSingletonFactory.getRadarUtilities();
+ StreamWorker {
+ private static final Logger logger = LoggerFactory.getLogger(
+ E4ElectroDermalActivityStream.class);
- public E4ElectroDermalActivityStream(String clientId, int numThread, StreamMaster master,
- KafkaProperty kafkaProperties) {
- super(E4Streams.getInstance().getElectroDermalActivityStream(),
- clientId, numThread, master, kafkaProperties, log);
+ public E4ElectroDermalActivityStream(Collection definitions, int numThread,
+ StreamMaster master, RadarPropertyHandler properties) {
+ super(definitions, numThread, master, properties, logger);
}
@Override
- protected KStream defineStream(
- @Nonnull KStream kstream) {
- return kstream.groupByKey()
- .aggregate(
- DoubleValueCollector::new,
- (k, v, valueCollector) -> valueCollector.add(v.getElectroDermalActivity()),
- TimeWindows.of(10 * 1000L),
- RadarSerdes.getInstance().getDoubleCollector(),
- getStreamDefinition().getStateStoreName())
- .toStream()
- .map(utilities::collectorToAvro);
+ protected KStream implementStream(StreamDefinition definition,
+ @Nonnull KStream kstream) {
+ return aggregateFloat(definition, kstream,
+ EmpaticaE4ElectroDermalActivity::getElectroDermalActivity);
}
}
diff --git a/src/main/java/org/radarcns/stream/empatica/E4HeartRateStream.java b/src/main/java/org/radarcns/stream/empatica/E4HeartRateStream.java
index e699118b..50f01790 100644
--- a/src/main/java/org/radarcns/stream/empatica/E4HeartRateStream.java
+++ b/src/main/java/org/radarcns/stream/empatica/E4HeartRateStream.java
@@ -16,50 +16,36 @@
package org.radarcns.stream.empatica;
-import javax.annotation.Nonnull;
import org.apache.kafka.streams.kstream.KStream;
-import org.apache.kafka.streams.kstream.TimeWindows;
-import org.radarcns.aggregator.DoubleAggregator;
-import org.radarcns.config.KafkaProperty;
-import org.radarcns.empatica.EmpaticaE4InterBeatInterval;
-import org.radarcns.key.MeasurementKey;
-import org.radarcns.key.WindowedKey;
+import org.radarcns.config.RadarPropertyHandler;
+import org.radarcns.kafka.AggregateKey;
+import org.radarcns.kafka.ObservationKey;
+import org.radarcns.passive.empatica.EmpaticaE4InterBeatInterval;
+import org.radarcns.stream.StreamDefinition;
import org.radarcns.stream.StreamMaster;
import org.radarcns.stream.StreamWorker;
-import org.radarcns.stream.collector.DoubleValueCollector;
-import org.radarcns.util.RadarSingletonFactory;
-import org.radarcns.util.RadarUtilities;
-import org.radarcns.util.serde.RadarSerdes;
+import org.radarcns.stream.aggregator.DoubleAggregation;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
+import javax.annotation.Nonnull;
+import java.util.Collection;
+
/**
* Kafka Stream for computing and aggregating Heart Rate values collected by Empatica E4.
*/
-public class E4HeartRateStream extends StreamWorker {
- private static final Logger log = LoggerFactory.getLogger(E4HeartRateStream.class);
- private final RadarUtilities utilities = RadarSingletonFactory.getRadarUtilities();
+public class E4HeartRateStream extends StreamWorker {
+ private static final Logger logger = LoggerFactory.getLogger(E4HeartRateStream.class);
- public E4HeartRateStream(String clientId, int numThread, StreamMaster master,
- KafkaProperty kafkaProperties) {
- super(E4Streams.getInstance().getHeartRateStream(), clientId,
- numThread, master, kafkaProperties, log);
+ public E4HeartRateStream(Collection definitions, int numThread,
+ StreamMaster master, RadarPropertyHandler properties) {
+ super(definitions, numThread, master, properties, logger);
}
- @Override
- protected KStream defineStream(
- @Nonnull KStream kstream) {
- return kstream.groupByKey()
- .aggregate(
- DoubleValueCollector::new,
- (k, v, valueCollector) -> {
- double value = utilities.ibiToHeartRate(v.getInterBeatInterval());
- return valueCollector.add(value);
- },
- TimeWindows.of(10 * 1000L),
- RadarSerdes.getInstance().getDoubleCollector(),
- getStreamDefinition().getStateStoreName())
- .toStream()
- .map(utilities::collectorToAvro);
+ protected KStream implementStream(
+ StreamDefinition definition,
+ @Nonnull KStream kstream) {
+ return aggregateDouble(definition, kstream,
+ v -> utilities.ibiToHeartRate(v.getInterBeatInterval()));
}
}
diff --git a/src/main/java/org/radarcns/stream/empatica/E4InterBeatIntervalStream.java b/src/main/java/org/radarcns/stream/empatica/E4InterBeatIntervalStream.java
index 0fc1e0d2..66ff8701 100644
--- a/src/main/java/org/radarcns/stream/empatica/E4InterBeatIntervalStream.java
+++ b/src/main/java/org/radarcns/stream/empatica/E4InterBeatIntervalStream.java
@@ -16,48 +16,37 @@
package org.radarcns.stream.empatica;
-import javax.annotation.Nonnull;
import org.apache.kafka.streams.kstream.KStream;
-import org.apache.kafka.streams.kstream.TimeWindows;
-import org.radarcns.aggregator.DoubleAggregator;
-import org.radarcns.config.KafkaProperty;
-import org.radarcns.empatica.EmpaticaE4InterBeatInterval;
-import org.radarcns.key.MeasurementKey;
-import org.radarcns.key.WindowedKey;
+import org.radarcns.config.RadarPropertyHandler;
+import org.radarcns.kafka.AggregateKey;
+import org.radarcns.kafka.ObservationKey;
+import org.radarcns.passive.empatica.EmpaticaE4InterBeatInterval;
+import org.radarcns.stream.StreamDefinition;
import org.radarcns.stream.StreamMaster;
import org.radarcns.stream.StreamWorker;
-import org.radarcns.stream.collector.DoubleValueCollector;
-import org.radarcns.util.RadarSingletonFactory;
-import org.radarcns.util.RadarUtilities;
-import org.radarcns.util.serde.RadarSerdes;
+import org.radarcns.stream.aggregator.DoubleAggregation;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
+import javax.annotation.Nonnull;
+import java.util.Collection;
+
/**
* Definition of Kafka Stream for aggregating Inter Beat Interval values collected by Empatica E4.
*/
public class E4InterBeatIntervalStream extends
- StreamWorker {
- private static final Logger log = LoggerFactory.getLogger(E4InterBeatIntervalStream.class);
- private final RadarUtilities utilities = RadarSingletonFactory.getRadarUtilities();
+ StreamWorker {
+ private static final Logger logger = LoggerFactory.getLogger(E4InterBeatIntervalStream.class);
- public E4InterBeatIntervalStream(String clientId, int numThread, StreamMaster master,
- KafkaProperty kafkaProperties) {
- super(E4Streams.getInstance().getInterBeatIntervalStream(),
- clientId, numThread, master, kafkaProperties, log);
+ public E4InterBeatIntervalStream(Collection definitions, int numThread,
+ StreamMaster master, RadarPropertyHandler properties) {
+ super(definitions, numThread, master, properties, logger);
}
@Override
- protected KStream defineStream(
- @Nonnull KStream kstream) {
- return kstream.groupByKey()
- .aggregate(
- DoubleValueCollector::new,
- (k, v, valueCollector) -> valueCollector.add(v.getInterBeatInterval()),
- TimeWindows.of(10 * 1000L),
- RadarSerdes.getInstance().getDoubleCollector(),
- getStreamDefinition().getStateStoreName())
- .toStream()
- .map(utilities::collectorToAvro);
+ protected KStream implementStream(StreamDefinition definition,
+ @Nonnull KStream kstream) {
+ return aggregateFloat(definition, kstream,
+ EmpaticaE4InterBeatInterval::getInterBeatInterval);
}
}
diff --git a/src/main/java/org/radarcns/stream/empatica/E4StreamMaster.java b/src/main/java/org/radarcns/stream/empatica/E4StreamMaster.java
index a2013dea..335ba294 100644
--- a/src/main/java/org/radarcns/stream/empatica/E4StreamMaster.java
+++ b/src/main/java/org/radarcns/stream/empatica/E4StreamMaster.java
@@ -16,14 +16,14 @@
package org.radarcns.stream.empatica;
-import java.util.List;
-import org.radarcns.config.KafkaProperty;
import org.radarcns.config.RadarPropertyHandler;
import org.radarcns.stream.StreamGroup;
import org.radarcns.stream.StreamMaster;
import org.radarcns.stream.StreamWorker;
import org.radarcns.util.RadarSingletonFactory;
+import java.util.List;
+
/**
* Singleton StreamMaster for Empatica E4.
* @see StreamMaster
@@ -37,20 +37,20 @@ protected StreamGroup getStreamGroup() {
@Override
protected void createWorkers(List> list, StreamMaster master) {
RadarPropertyHandler propertyHandler = RadarSingletonFactory.getRadarPropertyHandler();
- KafkaProperty kafkaProperty = propertyHandler.getKafkaProperties();
+ E4Streams defs = E4Streams.getInstance();
list.add(new E4AccelerationStream(
- "E4AccelerationStream", highPriority(), master, kafkaProperty));
+ defs.getAccelerationStream(), highPriority(), master, propertyHandler));
list.add(new E4BatteryLevelStream(
- "E4BatteryLevelStream", lowPriority(), master, kafkaProperty));
+ defs.getBatteryLevelStream(), lowPriority(), master, propertyHandler));
list.add(new E4BloodVolumePulseStream(
- "E4BloodVolumePulseStream", highPriority(), master, kafkaProperty));
+ defs.getBloodVolumePulseStream(), highPriority(), master, propertyHandler));
list.add(new E4ElectroDermalActivityStream(
- "E4ElectroDermalActivityStream", normalPriority(), master, kafkaProperty));
+ defs.getElectroDermalActivityStream(), normalPriority(), master, propertyHandler));
list.add(new E4HeartRateStream(
- "E4HeartRateStream", highPriority(), master, kafkaProperty));
+ defs.getHeartRateStream(), lowPriority(), master, propertyHandler));
list.add(new E4InterBeatIntervalStream(
- "E4InterBeatIntervalStream", highPriority(), master, kafkaProperty));
+ defs.getInterBeatIntervalStream(), lowPriority(), master, propertyHandler));
list.add(new E4TemperatureStream(
- "E4TemperatureStream", highPriority(), master, kafkaProperty));
+ defs.getTemperatureStream(), lowPriority(), master, propertyHandler));
}
}
diff --git a/src/main/java/org/radarcns/stream/empatica/E4Streams.java b/src/main/java/org/radarcns/stream/empatica/E4Streams.java
index 42ef06c7..6e404259 100644
--- a/src/main/java/org/radarcns/stream/empatica/E4Streams.java
+++ b/src/main/java/org/radarcns/stream/empatica/E4Streams.java
@@ -20,6 +20,8 @@
import org.radarcns.stream.GeneralStreamGroup;
import org.radarcns.stream.StreamDefinition;
+import java.util.Collection;
+
/**
* Singleton class representing the list of Empatica E4 topics
*/
@@ -27,71 +29,64 @@ public final class E4Streams extends GeneralStreamGroup {
private static final E4Streams INSTANCE = new E4Streams();
//All sensor topics
- private final StreamDefinition accelerationStream;
- private final StreamDefinition batteryLevelStream;
- private final StreamDefinition bloodVolumePulseStream;
- private final StreamDefinition electroDermalActivityStream;
- private final StreamDefinition interBeatIntervalStream;
- private final StreamDefinition sensorStatusStream;
- private final StreamDefinition temperatureStream;
+ private final Collection accelerationStream;
+ private final Collection batteryLevelStream;
+ private final Collection bloodVolumePulseStream;
+ private final Collection electroDermalActivityStream;
+ private final Collection interBeatIntervalStream;
+ private final Collection temperatureStream;
// Internal topics
- private final StreamDefinition heartRateStream;
+ private final Collection heartRateStream;
public static E4Streams getInstance() {
return INSTANCE;
}
private E4Streams() {
- accelerationStream = createSensorStream(
+ accelerationStream = createWindowedSensorStream(
"android_empatica_e4_acceleration");
- batteryLevelStream = createSensorStream(
+ batteryLevelStream = createWindowedSensorStream(
"android_empatica_e4_battery_level");
- bloodVolumePulseStream = createSensorStream(
+ bloodVolumePulseStream = createWindowedSensorStream(
"android_empatica_e4_blood_volume_pulse");
- electroDermalActivityStream = createSensorStream(
+ electroDermalActivityStream = createWindowedSensorStream(
"android_empatica_e4_electrodermal_activity");
- interBeatIntervalStream = createSensorStream(
+ interBeatIntervalStream = createWindowedSensorStream(
"android_empatica_e4_inter_beat_interval");
- sensorStatusStream = createSensorStream(
- "android_empatica_e4_sensor_status");
- temperatureStream = createSensorStream(
+ temperatureStream = createWindowedSensorStream(
"android_empatica_e4_temperature");
- heartRateStream = createStream(
+ heartRateStream = createWindowedSensorStream(
"android_empatica_e4_inter_beat_interval",
- "android_empatica_e4_heartrate");
+ "android_empatica_e4_heart_rate");
}
- public StreamDefinition getAccelerationStream() {
+ public Collection getAccelerationStream() {
return accelerationStream;
}
- public StreamDefinition getBatteryLevelStream() {
+ public Collection getBatteryLevelStream() {
return batteryLevelStream;
}
- public StreamDefinition getBloodVolumePulseStream() {
+ public Collection getBloodVolumePulseStream() {
return bloodVolumePulseStream;
}
- public StreamDefinition getElectroDermalActivityStream() {
+ public Collection getElectroDermalActivityStream() {
return electroDermalActivityStream;
}
- public StreamDefinition getInterBeatIntervalStream() {
+ public Collection getInterBeatIntervalStream() {
return interBeatIntervalStream;
}
- public StreamDefinition getSensorStatusStream() {
- return sensorStatusStream;
- }
-
- public StreamDefinition getTemperatureStream() {
+ public Collection getTemperatureStream() {
return temperatureStream;
}
- public StreamDefinition getHeartRateStream() {
+ public Collection getHeartRateStream() {
return heartRateStream;
}
}
diff --git a/src/main/java/org/radarcns/stream/empatica/E4TemperatureStream.java b/src/main/java/org/radarcns/stream/empatica/E4TemperatureStream.java
index b2cf2f8c..5662adb0 100644
--- a/src/main/java/org/radarcns/stream/empatica/E4TemperatureStream.java
+++ b/src/main/java/org/radarcns/stream/empatica/E4TemperatureStream.java
@@ -16,47 +16,35 @@
package org.radarcns.stream.empatica;
-import javax.annotation.Nonnull;
import org.apache.kafka.streams.kstream.KStream;
-import org.apache.kafka.streams.kstream.TimeWindows;
-import org.radarcns.aggregator.DoubleAggregator;
-import org.radarcns.config.KafkaProperty;
-import org.radarcns.empatica.EmpaticaE4Temperature;
-import org.radarcns.key.MeasurementKey;
-import org.radarcns.key.WindowedKey;
+import org.radarcns.config.RadarPropertyHandler;
+import org.radarcns.kafka.AggregateKey;
+import org.radarcns.kafka.ObservationKey;
+import org.radarcns.passive.empatica.EmpaticaE4Temperature;
+import org.radarcns.stream.StreamDefinition;
import org.radarcns.stream.StreamMaster;
import org.radarcns.stream.StreamWorker;
-import org.radarcns.stream.collector.DoubleValueCollector;
-import org.radarcns.util.RadarSingletonFactory;
-import org.radarcns.util.RadarUtilities;
-import org.radarcns.util.serde.RadarSerdes;
+import org.radarcns.stream.aggregator.DoubleAggregation;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
+import javax.annotation.Nonnull;
+import java.util.Collection;
+
/**
* Definition of Kafka Stream for aggregating temperature values collected by Empatica E4.
*/
-public class E4TemperatureStream extends StreamWorker {
- private final RadarUtilities utilities = RadarSingletonFactory.getRadarUtilities();
- private static final Logger log = LoggerFactory.getLogger(E4TemperatureStream.class);
+public class E4TemperatureStream extends StreamWorker {
+ private static final Logger logger = LoggerFactory.getLogger(E4TemperatureStream.class);
- public E4TemperatureStream(String clientId, int numThread, StreamMaster master,
- KafkaProperty kafkaProperties) {
- super(E4Streams.getInstance().getTemperatureStream(), clientId,
- numThread, master, kafkaProperties, log);
+ public E4TemperatureStream(Collection definitions, int numThread,
+ StreamMaster master, RadarPropertyHandler properties) {
+ super(definitions, numThread, master, properties, logger);
}
@Override
- protected KStream defineStream(
- @Nonnull KStream kstream) {
- return kstream.groupByKey()
- .aggregate(
- DoubleValueCollector::new,
- (k, v, valueCollector) -> valueCollector.add(v.getTemperature()),
- TimeWindows.of(10 * 1000L),
- RadarSerdes.getInstance().getDoubleCollector(),
- getStreamDefinition().getStateStoreName())
- .toStream()
- .map(utilities::collectorToAvro);
+ protected KStream implementStream(StreamDefinition definition,
+ @Nonnull KStream kstream) {
+ return aggregateFloat(definition, kstream, EmpaticaE4Temperature::getTemperature);
}
}
diff --git a/src/main/java/org/radarcns/stream/phone/PhoneAccelerationStream.java b/src/main/java/org/radarcns/stream/phone/PhoneAccelerationStream.java
index d83af76f..0dccb43b 100644
--- a/src/main/java/org/radarcns/stream/phone/PhoneAccelerationStream.java
+++ b/src/main/java/org/radarcns/stream/phone/PhoneAccelerationStream.java
@@ -1,51 +1,38 @@
package org.radarcns.stream.phone;
import org.apache.kafka.streams.kstream.KStream;
-import org.apache.kafka.streams.kstream.TimeWindows;
-import org.radarcns.aggregator.DoubleArrayAggregator;
-import org.radarcns.config.KafkaProperty;
-import org.radarcns.key.MeasurementKey;
-import org.radarcns.key.WindowedKey;
-import org.radarcns.phone.PhoneAcceleration;
+import org.radarcns.config.RadarPropertyHandler;
+import org.radarcns.kafka.AggregateKey;
+import org.radarcns.kafka.ObservationKey;
+import org.radarcns.passive.phone.PhoneAcceleration;
+import org.radarcns.stream.StreamDefinition;
import org.radarcns.stream.StreamMaster;
import org.radarcns.stream.StreamWorker;
-import org.radarcns.stream.collector.DoubleArrayCollector;
-import org.radarcns.util.RadarSingletonFactory;
-import org.radarcns.util.RadarUtilities;
-import org.radarcns.util.serde.RadarSerdes;
+import org.radarcns.stream.aggregator.DoubleArrayAggregation;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.annotation.Nonnull;
+import java.util.Collection;
import static org.radarcns.util.Serialization.floatToDouble;
-public class PhoneAccelerationStream extends StreamWorker {
+public class PhoneAccelerationStream extends StreamWorker {
private static final Logger logger = LoggerFactory.getLogger(PhoneAccelerationStream.class);
- private final RadarUtilities utilities = RadarSingletonFactory.getRadarUtilities();
- public PhoneAccelerationStream(String phoneAccelerationStream, int priority,
- StreamMaster phoneStreamMaster, KafkaProperty kafkaProperty) {
- super(PhoneStreams.getInstance().getAccelerationStream(), phoneAccelerationStream,
- priority, phoneStreamMaster,
- kafkaProperty, logger);
+ public PhoneAccelerationStream(Collection definitions, int numThread,
+ StreamMaster master, RadarPropertyHandler properties) {
+ super(definitions, numThread, master, properties, logger);
}
@Override
- protected KStream defineStream(
- @Nonnull KStream kstream) {
- return kstream.groupByKey()
- .aggregate(
- DoubleArrayCollector::new,
- (k, v, valueCollector) -> valueCollector.add(new double[] {
- floatToDouble(v.getX()),
- floatToDouble(v.getY()),
- floatToDouble(v.getZ())
- }),
- TimeWindows.of(10 * 1000L),
- RadarSerdes.getInstance().getDoubleArrayCollector(),
- getStreamDefinition().getStateStoreName())
- .toStream()
- .map(utilities::collectorToAvro);
+ protected KStream implementStream(
+ StreamDefinition definition,
+ @Nonnull KStream kstream) {
+ return aggregateDoubleArray(definition, kstream, v -> new double[] {
+ floatToDouble(v.getX()),
+ floatToDouble(v.getY()),
+ floatToDouble(v.getZ())
+ });
}
}
diff --git a/src/main/java/org/radarcns/stream/phone/PhoneBatteryStream.java b/src/main/java/org/radarcns/stream/phone/PhoneBatteryStream.java
index 2395cb32..43198faf 100644
--- a/src/main/java/org/radarcns/stream/phone/PhoneBatteryStream.java
+++ b/src/main/java/org/radarcns/stream/phone/PhoneBatteryStream.java
@@ -1,45 +1,31 @@
package org.radarcns.stream.phone;
import org.apache.kafka.streams.kstream.KStream;
-import org.apache.kafka.streams.kstream.TimeWindows;
-import org.radarcns.aggregator.DoubleAggregator;
-import org.radarcns.config.KafkaProperty;
-import org.radarcns.key.MeasurementKey;
-import org.radarcns.key.WindowedKey;
-import org.radarcns.phone.PhoneBatteryLevel;
+import org.radarcns.config.RadarPropertyHandler;
+import org.radarcns.kafka.AggregateKey;
+import org.radarcns.kafka.ObservationKey;
+import org.radarcns.passive.phone.PhoneBatteryLevel;
+import org.radarcns.stream.StreamDefinition;
import org.radarcns.stream.StreamMaster;
import org.radarcns.stream.StreamWorker;
-import org.radarcns.stream.collector.DoubleValueCollector;
-import org.radarcns.util.RadarSingletonFactory;
-import org.radarcns.util.RadarUtilities;
-import org.radarcns.util.serde.RadarSerdes;
+import org.radarcns.stream.aggregator.DoubleAggregation;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.annotation.Nonnull;
+import java.util.Collection;
-public class PhoneBatteryStream extends StreamWorker {
+public class PhoneBatteryStream extends StreamWorker {
private static final Logger logger = LoggerFactory.getLogger(PhoneBatteryStream.class);
- private final RadarUtilities utilities = RadarSingletonFactory.getRadarUtilities();
- public PhoneBatteryStream(String phoneBatteryStream, int priority, StreamMaster master,
- KafkaProperty kafkaProperty) {
- super(PhoneStreams.getInstance().getBatteryStream(), phoneBatteryStream,
- priority, master,
- kafkaProperty, logger);
+ public PhoneBatteryStream(Collection definitions, int numThread,
+ StreamMaster master, RadarPropertyHandler properties) {
+ super(definitions, numThread, master, properties, logger);
}
@Override
- protected KStream defineStream(
- @Nonnull KStream kstream) {
- return kstream.groupByKey()
- .aggregate(
- DoubleValueCollector::new,
- (k, v, valueCollector) -> valueCollector.add(v.getBatteryLevel()),
- TimeWindows.of(10 * 1000L),
- RadarSerdes.getInstance().getDoubleCollector(),
- getStreamDefinition().getStateStoreName())
- .toStream()
- .map(utilities::collectorToAvro);
+ protected KStream implementStream(StreamDefinition definition,
+ @Nonnull KStream kstream) {
+ return aggregateFloat(definition, kstream, PhoneBatteryLevel::getBatteryLevel);
}
}
diff --git a/src/main/java/org/radarcns/stream/phone/PhoneStreamMaster.java b/src/main/java/org/radarcns/stream/phone/PhoneStreamMaster.java
index e0d9d75e..48e260c5 100644
--- a/src/main/java/org/radarcns/stream/phone/PhoneStreamMaster.java
+++ b/src/main/java/org/radarcns/stream/phone/PhoneStreamMaster.java
@@ -16,14 +16,14 @@
package org.radarcns.stream.phone;
-import java.util.List;
-import org.radarcns.config.KafkaProperty;
import org.radarcns.config.RadarPropertyHandler;
import org.radarcns.stream.StreamGroup;
import org.radarcns.stream.StreamMaster;
import org.radarcns.stream.StreamWorker;
import org.radarcns.util.RadarSingletonFactory;
+import java.util.List;
+
/**
* Singleton StreamMaster for Phone
* @see StreamMaster
@@ -37,13 +37,14 @@ protected StreamGroup getStreamGroup() {
@Override
protected void createWorkers(List> list, StreamMaster master) {
RadarPropertyHandler propertyHandler = RadarSingletonFactory.getRadarPropertyHandler();
- KafkaProperty kafkaProperty = propertyHandler.getKafkaProperties();
- list.add(new PhoneUsageStream("PhoneUsageStream", lowPriority(), master, kafkaProperty));
+ PhoneStreams defs = PhoneStreams.getInstance();
+ list.add(new PhoneUsageStream(
+ defs.getUsageStream(), lowPriority(), master, propertyHandler));
list.add(new PhoneUsageAggregationStream(
- "PhoneUsageAggregationStream", lowPriority(), master, kafkaProperty));
- list.add(new PhoneBatteryStream("PhoneBatteryStream", lowPriority(), master,
- kafkaProperty));
- list.add(new PhoneAccelerationStream("PhoneAccelerationStream", normalPriority(), master,
- kafkaProperty));
+ defs.getUsageEventAggregationStream(), lowPriority(), master, propertyHandler));
+ list.add(new PhoneBatteryStream(
+ defs.getBatteryStream(), lowPriority(), master, propertyHandler));
+ list.add(new PhoneAccelerationStream(
+ defs.getAccelerationStream(), normalPriority(), master, propertyHandler));
}
}
diff --git a/src/main/java/org/radarcns/stream/phone/PhoneStreams.java b/src/main/java/org/radarcns/stream/phone/PhoneStreams.java
index 26df8d68..cd0628d0 100644
--- a/src/main/java/org/radarcns/stream/phone/PhoneStreams.java
+++ b/src/main/java/org/radarcns/stream/phone/PhoneStreams.java
@@ -19,13 +19,16 @@
import org.radarcns.stream.GeneralStreamGroup;
import org.radarcns.stream.StreamDefinition;
+import java.util.Collection;
+import java.util.concurrent.TimeUnit;
+
public final class PhoneStreams extends GeneralStreamGroup {
private static final PhoneStreams INSTANCE = new PhoneStreams();
- private final StreamDefinition usageEventStream;
- private final StreamDefinition usageEventAggregationStream;
- private final StreamDefinition accelerationStream;
- private final StreamDefinition batteryStream;
+ private final Collection usageEventStream;
+ private final Collection usageEventAggregationStream;
+ private final Collection accelerationStream;
+ private final Collection batteryStream;
public static PhoneStreams getInstance() {
return INSTANCE;
@@ -36,24 +39,25 @@ private PhoneStreams() {
"android_phone_usage_event");
usageEventAggregationStream = createStream(
"android_phone_usage_event_output",
- "android_phone_usage_event_aggregated");
- accelerationStream = createSensorStream("android_phone_acceleration");
- batteryStream = createSensorStream("android_phone_battery_level");
+ "android_phone_usage_event_aggregated",
+ TimeUnit.DAYS.toMillis(1));
+ accelerationStream = createWindowedSensorStream("android_phone_acceleration");
+ batteryStream = createWindowedSensorStream("android_phone_battery_level");
}
- public StreamDefinition getUsageStream() {
+ public Collection getUsageStream() {
return usageEventStream;
}
- public StreamDefinition getUsageEventAggregationStream() {
+ public Collection getUsageEventAggregationStream() {
return usageEventAggregationStream;
}
- public StreamDefinition getAccelerationStream() {
+ public Collection getAccelerationStream() {
return accelerationStream;
}
- public StreamDefinition getBatteryStream() {
+ public Collection getBatteryStream() {
return batteryStream;
}
}
diff --git a/src/main/java/org/radarcns/stream/phone/PhoneUsageAggregationStream.java b/src/main/java/org/radarcns/stream/phone/PhoneUsageAggregationStream.java
index 0ddbcf9e..c12d7acd 100644
--- a/src/main/java/org/radarcns/stream/phone/PhoneUsageAggregationStream.java
+++ b/src/main/java/org/radarcns/stream/phone/PhoneUsageAggregationStream.java
@@ -1,50 +1,49 @@
package org.radarcns.stream.phone;
import org.apache.kafka.streams.kstream.KStream;
-import org.apache.kafka.streams.kstream.TimeWindows;
import org.radarcns.aggregator.PhoneUsageAggregator;
-import org.radarcns.config.KafkaProperty;
-import org.radarcns.key.MeasurementKey;
-import org.radarcns.key.WindowedKey;
-import org.radarcns.phone.PhoneUsageEvent;
+import org.radarcns.config.RadarPropertyHandler;
+import org.radarcns.kafka.AggregateKey;
+import org.radarcns.kafka.ObservationKey;
+import org.radarcns.passive.phone.PhoneUsageEvent;
+import org.radarcns.stream.StreamDefinition;
import org.radarcns.stream.StreamMaster;
import org.radarcns.stream.StreamWorker;
-import org.radarcns.util.RadarSingletonFactory;
-import org.radarcns.util.RadarUtilities;
import org.radarcns.util.serde.RadarSerdes;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.annotation.Nonnull;
+import java.util.Collection;
/**
* Created by piotrzakrzewski on 26/07/2017.
*/
-public class PhoneUsageAggregationStream extends StreamWorker {
+public class PhoneUsageAggregationStream extends StreamWorker {
+ private static final Logger logger = LoggerFactory.getLogger(PhoneUsageAggregationStream.class);
- private static final Logger log = LoggerFactory.getLogger(PhoneUsageAggregationStream.class);
- private static final long DAY_IN_MS = 24 * 60 * 60 * 1000;
- private final RadarUtilities utilities = RadarSingletonFactory.getRadarUtilities();
-
- public PhoneUsageAggregationStream(@Nonnull String clientId,
- int numThreads,
- @Nonnull StreamMaster master,
- KafkaProperty kafkaProperties) {
- super(PhoneStreams.getInstance().getUsageEventAggregationStream(), clientId,
- numThreads, master, kafkaProperties, log);
+ public PhoneUsageAggregationStream(Collection definitions, int numThread,
+ StreamMaster master, RadarPropertyHandler properties) {
+ super(definitions, numThread, master, properties, logger);
}
@Override
- protected KStream defineStream(
- @Nonnull KStream kstream) {
- return kstream.groupBy((k, v) -> new TemporaryPackageKey(k, v.getPackageName()))
+ protected KStream implementStream(
+ StreamDefinition definition,
+ @Nonnull KStream kstream) {
+ return kstream.groupBy(PhoneUsageAggregationStream::temporaryKey)
.aggregate(
PhoneUsageCollector::new,
(k, v, valueCollector) -> valueCollector.update(v),
- TimeWindows.of(DAY_IN_MS),
+ definition.getTimeWindows(),
RadarSerdes.getInstance().getPhoneUsageCollector(),
- getStreamDefinition().getStateStoreName())
+ definition.getStateStoreName())
.toStream()
.map(utilities::collectorToAvro);
}
+
+ private static TemporaryPackageKey temporaryKey(ObservationKey key, PhoneUsageEvent value) {
+ return new TemporaryPackageKey(key.getProjectId(), key.getUserId(), key.getSourceId(),
+ value.getPackageName());
+ }
}
diff --git a/src/main/java/org/radarcns/stream/phone/PhoneUsageCollector.java b/src/main/java/org/radarcns/stream/phone/PhoneUsageCollector.java
index 870de7f0..8c7e1679 100644
--- a/src/main/java/org/radarcns/stream/phone/PhoneUsageCollector.java
+++ b/src/main/java/org/radarcns/stream/phone/PhoneUsageCollector.java
@@ -1,7 +1,7 @@
package org.radarcns.stream.phone;
-import org.radarcns.phone.PhoneUsageEvent;
-import org.radarcns.phone.UsageEventType;
+import org.radarcns.passive.phone.PhoneUsageEvent;
+import org.radarcns.passive.phone.UsageEventType;
import java.math.BigDecimal;
import java.math.MathContext;
diff --git a/src/main/java/org/radarcns/stream/phone/PhoneUsageStream.java b/src/main/java/org/radarcns/stream/phone/PhoneUsageStream.java
index 4f779409..09592c8d 100644
--- a/src/main/java/org/radarcns/stream/phone/PhoneUsageStream.java
+++ b/src/main/java/org/radarcns/stream/phone/PhoneUsageStream.java
@@ -16,19 +16,21 @@
package org.radarcns.stream.phone;
-import javax.annotation.Nonnull;
-import org.apache.kafka.streams.KeyValue;
import org.apache.kafka.streams.kstream.KStream;
-import org.radarcns.config.KafkaProperty;
-import org.radarcns.key.MeasurementKey;
-import org.radarcns.phone.PhoneUsageEvent;
+import org.radarcns.config.RadarPropertyHandler;
+import org.radarcns.kafka.ObservationKey;
+import org.radarcns.passive.phone.PhoneUsageEvent;
+import org.radarcns.stream.StreamDefinition;
import org.radarcns.stream.StreamMaster;
import org.radarcns.stream.StreamWorker;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
-public class PhoneUsageStream extends StreamWorker {
- private static final Logger log = LoggerFactory.getLogger(PhoneUsageStream.class);
+import javax.annotation.Nonnull;
+import java.util.Collection;
+
+public class PhoneUsageStream extends StreamWorker {
+ private static final Logger logger = LoggerFactory.getLogger(PhoneUsageStream.class);
// 1 day until an item is refreshed
private static final int CACHE_TIMEOUT = 24 * 3600;
@@ -38,23 +40,23 @@ public class PhoneUsageStream extends StreamWorker definitions, int numThread,
+ StreamMaster master, RadarPropertyHandler properties) {
+ super(definitions, numThread, master, properties, logger);
+ playStoreLookup = new PlayStoreLookup(CACHE_TIMEOUT, MAX_CACHE_SIZE);
}
@Override
- protected KStream defineStream(
- @Nonnull KStream kstream) {
+ protected KStream implementStream(StreamDefinition definition,
+ @Nonnull KStream kstream) {
return kstream
- .map((key, value) -> {
+ .mapValues(value -> {
String packageName = value.getPackageName();
PlayStoreLookup.AppCategory category = playStoreLookup.lookupCategory(packageName);
+ logger.info("Looked up {}: {}", packageName, category.getCategoryName());
value.setCategoryName(category.getCategoryName());
value.setCategoryNameFetchTime(category.getFetchTimeStamp());
- return new KeyValue<>(key, value);
+ return value;
});
}
}
diff --git a/src/main/java/org/radarcns/stream/phone/TemporaryPackageKey.java b/src/main/java/org/radarcns/stream/phone/TemporaryPackageKey.java
index f3f7e0f1..cd76ebf4 100644
--- a/src/main/java/org/radarcns/stream/phone/TemporaryPackageKey.java
+++ b/src/main/java/org/radarcns/stream/phone/TemporaryPackageKey.java
@@ -13,8 +13,8 @@
@SuppressWarnings("all")
@org.apache.avro.specific.AvroGenerated
public class TemporaryPackageKey extends org.apache.avro.specific.SpecificRecordBase implements org.apache.avro.specific.SpecificRecord {
- private static final long serialVersionUID = -8584100260725184052L;
- public static final org.apache.avro.Schema SCHEMA$ = new org.apache.avro.Schema.Parser().parse("{\"type\":\"record\",\"name\":\"TemporaryPackageKey\",\"namespace\":\"org.radarcns.stream.phone\",\"fields\":[{\"name\":\"key\",\"type\":{\"type\":\"record\",\"name\":\"MeasurementKey\",\"namespace\":\"org.radarcns.key\",\"doc\":\"Key of an observation.\",\"fields\":[{\"name\":\"userId\",\"type\":{\"type\":\"string\",\"avro.java.string\":\"String\"},\"doc\":\"User Identifier created during the enrolment.\"},{\"name\":\"sourceId\",\"type\":{\"type\":\"string\",\"avro.java.string\":\"String\"},\"doc\":\"Unique identifier associated with the source.\"}]},\"doc\":\"Observation key.\"},{\"name\":\"packageName\",\"type\":{\"type\":\"string\",\"avro.java.string\":\"String\"},\"doc\":\"Package name.\"}]}");
+ private static final long serialVersionUID = 5825691401492875312L;
+ public static final org.apache.avro.Schema SCHEMA$ = new org.apache.avro.Schema.Parser().parse("{\"type\":\"record\",\"name\":\"TemporaryPackageKey\",\"namespace\":\"org.radarcns.stream.phone\",\"fields\":[{\"name\":\"projectId\",\"type\":[\"null\",{\"type\":\"string\",\"avro.java.string\":\"String\"}],\"doc\":\"Project ID.\"},{\"name\":\"userId\",\"type\":{\"type\":\"string\",\"avro.java.string\":\"String\"},\"doc\":\"User ID.\"},{\"name\":\"sourceId\",\"type\":{\"type\":\"string\",\"avro.java.string\":\"String\"},\"doc\":\"Source ID.\"},{\"name\":\"packageName\",\"type\":{\"type\":\"string\",\"avro.java.string\":\"String\"},\"doc\":\"Package name.\"}]}");
public static org.apache.avro.Schema getClassSchema() { return SCHEMA$; }
private static SpecificData MODEL$ = new SpecificData();
@@ -51,8 +51,12 @@ public static TemporaryPackageKey fromByteBuffer(
return DECODER.decode(b);
}
- /** Observation key. */
- @Deprecated public org.radarcns.key.MeasurementKey key;
+ /** Project ID. */
+ @Deprecated public java.lang.String projectId;
+ /** User ID. */
+ @Deprecated public java.lang.String userId;
+ /** Source ID. */
+ @Deprecated public java.lang.String sourceId;
/** Package name. */
@Deprecated public java.lang.String packageName;
@@ -65,11 +69,15 @@ public TemporaryPackageKey() {}
/**
* All-args constructor.
- * @param key Observation key.
+ * @param projectId Project ID.
+ * @param userId User ID.
+ * @param sourceId Source ID.
* @param packageName Package name.
*/
- public TemporaryPackageKey(org.radarcns.key.MeasurementKey key, java.lang.String packageName) {
- this.key = key;
+ public TemporaryPackageKey(java.lang.String projectId, java.lang.String userId, java.lang.String sourceId, java.lang.String packageName) {
+ this.projectId = projectId;
+ this.userId = userId;
+ this.sourceId = sourceId;
this.packageName = packageName;
}
@@ -77,8 +85,10 @@ public TemporaryPackageKey(org.radarcns.key.MeasurementKey key, java.lang.String
// Used by DatumWriter. Applications should not call.
public java.lang.Object get(int field$) {
switch (field$) {
- case 0: return key;
- case 1: return packageName;
+ case 0: return projectId;
+ case 1: return userId;
+ case 2: return sourceId;
+ case 3: return packageName;
default: throw new org.apache.avro.AvroRuntimeException("Bad index");
}
}
@@ -87,27 +97,63 @@ public java.lang.Object get(int field$) {
@SuppressWarnings(value="unchecked")
public void put(int field$, java.lang.Object value$) {
switch (field$) {
- case 0: key = (org.radarcns.key.MeasurementKey)value$; break;
- case 1: packageName = (java.lang.String)value$; break;
+ case 0: projectId = (java.lang.String)value$; break;
+ case 1: userId = (java.lang.String)value$; break;
+ case 2: sourceId = (java.lang.String)value$; break;
+ case 3: packageName = (java.lang.String)value$; break;
default: throw new org.apache.avro.AvroRuntimeException("Bad index");
}
}
/**
- * Gets the value of the 'key' field.
- * @return Observation key.
+ * Gets the value of the 'projectId' field.
+ * @return Project ID.
*/
- public org.radarcns.key.MeasurementKey getKey() {
- return key;
+ public java.lang.String getProjectId() {
+ return projectId;
}
/**
- * Sets the value of the 'key' field.
- * Observation key.
+ * Sets the value of the 'projectId' field.
+ * Project ID.
* @param value the value to set.
*/
- public void setKey(org.radarcns.key.MeasurementKey value) {
- this.key = value;
+ public void setProjectId(java.lang.String value) {
+ this.projectId = value;
+ }
+
+ /**
+ * Gets the value of the 'userId' field.
+ * @return User ID.
+ */
+ public java.lang.String getUserId() {
+ return userId;
+ }
+
+ /**
+ * Sets the value of the 'userId' field.
+ * User ID.
+ * @param value the value to set.
+ */
+ public void setUserId(java.lang.String value) {
+ this.userId = value;
+ }
+
+ /**
+ * Gets the value of the 'sourceId' field.
+ * @return Source ID.
+ */
+ public java.lang.String getSourceId() {
+ return sourceId;
+ }
+
+ /**
+ * Sets the value of the 'sourceId' field.
+ * Source ID.
+ * @param value the value to set.
+ */
+ public void setSourceId(java.lang.String value) {
+ this.sourceId = value;
}
/**
@@ -159,9 +205,12 @@ public static org.radarcns.stream.phone.TemporaryPackageKey.Builder newBuilder(o
public static class Builder extends org.apache.avro.specific.SpecificRecordBuilderBase
implements org.apache.avro.data.RecordBuilder {
- /** Observation key. */
- private org.radarcns.key.MeasurementKey key;
- private org.radarcns.key.MeasurementKey.Builder keyBuilder;
+ /** Project ID. */
+ private java.lang.String projectId;
+ /** User ID. */
+ private java.lang.String userId;
+ /** Source ID. */
+ private java.lang.String sourceId;
/** Package name. */
private java.lang.String packageName;
@@ -176,17 +225,22 @@ private Builder() {
*/
private Builder(org.radarcns.stream.phone.TemporaryPackageKey.Builder other) {
super(other);
- if (isValidValue(fields()[0], other.key)) {
- this.key = data().deepCopy(fields()[0].schema(), other.key);
+ if (isValidValue(fields()[0], other.projectId)) {
+ this.projectId = data().deepCopy(fields()[0].schema(), other.projectId);
fieldSetFlags()[0] = true;
}
- if (other.hasKeyBuilder()) {
- this.keyBuilder = org.radarcns.key.MeasurementKey.newBuilder(other.getKeyBuilder());
- }
- if (isValidValue(fields()[1], other.packageName)) {
- this.packageName = data().deepCopy(fields()[1].schema(), other.packageName);
+ if (isValidValue(fields()[1], other.userId)) {
+ this.userId = data().deepCopy(fields()[1].schema(), other.userId);
fieldSetFlags()[1] = true;
}
+ if (isValidValue(fields()[2], other.sourceId)) {
+ this.sourceId = data().deepCopy(fields()[2].schema(), other.sourceId);
+ fieldSetFlags()[2] = true;
+ }
+ if (isValidValue(fields()[3], other.packageName)) {
+ this.packageName = data().deepCopy(fields()[3].schema(), other.packageName);
+ fieldSetFlags()[3] = true;
+ }
}
/**
@@ -195,95 +249,150 @@ private Builder(org.radarcns.stream.phone.TemporaryPackageKey.Builder other) {
*/
private Builder(org.radarcns.stream.phone.TemporaryPackageKey other) {
super(SCHEMA$);
- if (isValidValue(fields()[0], other.key)) {
- this.key = data().deepCopy(fields()[0].schema(), other.key);
+ if (isValidValue(fields()[0], other.projectId)) {
+ this.projectId = data().deepCopy(fields()[0].schema(), other.projectId);
fieldSetFlags()[0] = true;
}
- this.keyBuilder = null;
- if (isValidValue(fields()[1], other.packageName)) {
- this.packageName = data().deepCopy(fields()[1].schema(), other.packageName);
+ if (isValidValue(fields()[1], other.userId)) {
+ this.userId = data().deepCopy(fields()[1].schema(), other.userId);
fieldSetFlags()[1] = true;
}
+ if (isValidValue(fields()[2], other.sourceId)) {
+ this.sourceId = data().deepCopy(fields()[2].schema(), other.sourceId);
+ fieldSetFlags()[2] = true;
+ }
+ if (isValidValue(fields()[3], other.packageName)) {
+ this.packageName = data().deepCopy(fields()[3].schema(), other.packageName);
+ fieldSetFlags()[3] = true;
+ }
}
/**
- * Gets the value of the 'key' field.
- * Observation key.
+ * Gets the value of the 'projectId' field.
+ * Project ID.
* @return The value.
*/
- public org.radarcns.key.MeasurementKey getKey() {
- return key;
+ public java.lang.String getProjectId() {
+ return projectId;
}
/**
- * Sets the value of the 'key' field.
- * Observation key.
- * @param value The value of 'key'.
+ * Sets the value of the 'projectId' field.
+ * Project ID.
+ * @param value The value of 'projectId'.
* @return This builder.
*/
- public org.radarcns.stream.phone.TemporaryPackageKey.Builder setKey(org.radarcns.key.MeasurementKey value) {
+ public org.radarcns.stream.phone.TemporaryPackageKey.Builder setProjectId(java.lang.String value) {
validate(fields()[0], value);
- this.keyBuilder = null;
- this.key = value;
+ this.projectId = value;
fieldSetFlags()[0] = true;
return this;
}
/**
- * Checks whether the 'key' field has been set.
- * Observation key.
- * @return True if the 'key' field has been set, false otherwise.
+ * Checks whether the 'projectId' field has been set.
+ * Project ID.
+ * @return True if the 'projectId' field has been set, false otherwise.
*/
- public boolean hasKey() {
+ public boolean hasProjectId() {
return fieldSetFlags()[0];
}
+
/**
- * Gets the Builder instance for the 'key' field and creates one if it doesn't exist yet.
- * Observation key.
- * @return This builder.
- */
- public org.radarcns.key.MeasurementKey.Builder getKeyBuilder() {
- if (keyBuilder == null) {
- if (hasKey()) {
- setKeyBuilder(org.radarcns.key.MeasurementKey.newBuilder(key));
- } else {
- setKeyBuilder(org.radarcns.key.MeasurementKey.newBuilder());
- }
- }
- return keyBuilder;
+ * Clears the value of the 'projectId' field.
+ * Project ID.
+ * @return This builder.
+ */
+ public org.radarcns.stream.phone.TemporaryPackageKey.Builder clearProjectId() {
+ projectId = null;
+ fieldSetFlags()[0] = false;
+ return this;
}
/**
- * Sets the Builder instance for the 'key' field
- * Observation key.
- * @param value The builder instance that must be set.
- * @return This builder.
- */
- public org.radarcns.stream.phone.TemporaryPackageKey.Builder setKeyBuilder(org.radarcns.key.MeasurementKey.Builder value) {
- clearKey();
- keyBuilder = value;
+ * Gets the value of the 'userId' field.
+ * User ID.
+ * @return The value.
+ */
+ public java.lang.String getUserId() {
+ return userId;
+ }
+
+ /**
+ * Sets the value of the 'userId' field.
+ * User ID.
+ * @param value The value of 'userId'.
+ * @return This builder.
+ */
+ public org.radarcns.stream.phone.TemporaryPackageKey.Builder setUserId(java.lang.String value) {
+ validate(fields()[1], value);
+ this.userId = value;
+ fieldSetFlags()[1] = true;
return this;
}
/**
- * Checks whether the 'key' field has an active Builder instance
- * Observation key.
- * @return True if the 'key' field has an active Builder instance
- */
- public boolean hasKeyBuilder() {
- return keyBuilder != null;
+ * Checks whether the 'userId' field has been set.
+ * User ID.
+ * @return True if the 'userId' field has been set, false otherwise.
+ */
+ public boolean hasUserId() {
+ return fieldSetFlags()[1];
}
+
/**
- * Clears the value of the 'key' field.
- * Observation key.
+ * Clears the value of the 'userId' field.
+ * User ID.
* @return This builder.
*/
- public org.radarcns.stream.phone.TemporaryPackageKey.Builder clearKey() {
- key = null;
- keyBuilder = null;
- fieldSetFlags()[0] = false;
+ public org.radarcns.stream.phone.TemporaryPackageKey.Builder clearUserId() {
+ userId = null;
+ fieldSetFlags()[1] = false;
+ return this;
+ }
+
+ /**
+ * Gets the value of the 'sourceId' field.
+ * Source ID.
+ * @return The value.
+ */
+ public java.lang.String getSourceId() {
+ return sourceId;
+ }
+
+ /**
+ * Sets the value of the 'sourceId' field.
+ * Source ID.
+ * @param value The value of 'sourceId'.
+ * @return This builder.
+ */
+ public org.radarcns.stream.phone.TemporaryPackageKey.Builder setSourceId(java.lang.String value) {
+ validate(fields()[2], value);
+ this.sourceId = value;
+ fieldSetFlags()[2] = true;
+ return this;
+ }
+
+ /**
+ * Checks whether the 'sourceId' field has been set.
+ * Source ID.
+ * @return True if the 'sourceId' field has been set, false otherwise.
+ */
+ public boolean hasSourceId() {
+ return fieldSetFlags()[2];
+ }
+
+
+ /**
+ * Clears the value of the 'sourceId' field.
+ * Source ID.
+ * @return This builder.
+ */
+ public org.radarcns.stream.phone.TemporaryPackageKey.Builder clearSourceId() {
+ sourceId = null;
+ fieldSetFlags()[2] = false;
return this;
}
@@ -303,9 +412,9 @@ public java.lang.String getPackageName() {
* @return This builder.
*/
public org.radarcns.stream.phone.TemporaryPackageKey.Builder setPackageName(java.lang.String value) {
- validate(fields()[1], value);
+ validate(fields()[3], value);
this.packageName = value;
- fieldSetFlags()[1] = true;
+ fieldSetFlags()[3] = true;
return this;
}
@@ -315,7 +424,7 @@ public org.radarcns.stream.phone.TemporaryPackageKey.Builder setPackageName(java
* @return True if the 'packageName' field has been set, false otherwise.
*/
public boolean hasPackageName() {
- return fieldSetFlags()[1];
+ return fieldSetFlags()[3];
}
@@ -326,7 +435,7 @@ public boolean hasPackageName() {
*/
public org.radarcns.stream.phone.TemporaryPackageKey.Builder clearPackageName() {
packageName = null;
- fieldSetFlags()[1] = false;
+ fieldSetFlags()[3] = false;
return this;
}
@@ -335,12 +444,10 @@ public org.radarcns.stream.phone.TemporaryPackageKey.Builder clearPackageName()
public TemporaryPackageKey build() {
try {
TemporaryPackageKey record = new TemporaryPackageKey();
- if (keyBuilder != null) {
- record.key = this.keyBuilder.build();
- } else {
- record.key = fieldSetFlags()[0] ? this.key : (org.radarcns.key.MeasurementKey) defaultValue(fields()[0]);
- }
- record.packageName = fieldSetFlags()[1] ? this.packageName : (java.lang.String) defaultValue(fields()[1]);
+ record.projectId = fieldSetFlags()[0] ? this.projectId : (java.lang.String) defaultValue(fields()[0]);
+ record.userId = fieldSetFlags()[1] ? this.userId : (java.lang.String) defaultValue(fields()[1]);
+ record.sourceId = fieldSetFlags()[2] ? this.sourceId : (java.lang.String) defaultValue(fields()[2]);
+ record.packageName = fieldSetFlags()[3] ? this.packageName : (java.lang.String) defaultValue(fields()[3]);
return record;
} catch (java.lang.Exception e) {
throw new org.apache.avro.AvroRuntimeException(e);
diff --git a/src/main/java/org/radarcns/util/Comparison.java b/src/main/java/org/radarcns/util/Comparison.java
new file mode 100644
index 00000000..b185c045
--- /dev/null
+++ b/src/main/java/org/radarcns/util/Comparison.java
@@ -0,0 +1,20 @@
+package org.radarcns.util;
+
+import java.util.function.BiFunction;
+import java.util.function.Function;
+
+public interface Comparison extends BiFunction {
+ static > Comparison compare(Function property) {
+ return (a, b) -> property.apply(a).compareTo(property.apply(b));
+ }
+
+ default > Comparison then(Function property) {
+ return (a, b) -> {
+ int ret = apply(a, b);
+ if (ret != 0) {
+ return ret;
+ }
+ return compare(property).apply(a, b);
+ };
+ }
+}
diff --git a/src/main/java/org/radarcns/util/PersistentStateStore.java b/src/main/java/org/radarcns/util/PersistentStateStore.java
index 2070b5c5..f5a43aa3 100644
--- a/src/main/java/org/radarcns/util/PersistentStateStore.java
+++ b/src/main/java/org/radarcns/util/PersistentStateStore.java
@@ -20,7 +20,7 @@
import java.io.FileOutputStream;
import java.io.IOException;
import org.radarcns.config.YamlConfigLoader;
-import org.radarcns.key.MeasurementKey;
+import org.radarcns.kafka.ObservationKey;
/** Store a state for a Kafka consumer. */
public class PersistentStateStore {
@@ -94,10 +94,17 @@ private File getFile(String groupId, String clientId) {
* @param key key to serialize
* @return unique serialized form
*/
- public static String measurementKeyToString(MeasurementKey key) {
+ public static String measurementKeyToString(ObservationKey key) {
+ String projectId = key.getProjectId();
String userId = key.getUserId();
String sourceId = key.getSourceId();
- StringBuilder builder = new StringBuilder(userId.length() + 5 + sourceId.length());
+ StringBuilder builder = new StringBuilder(
+ (projectId == null ? 0 : projectId.length())
+ + userId.length() + 6 + sourceId.length());
+ if (projectId != null) {
+ escape(projectId, builder);
+ }
+ builder.append(SEPARATOR);
escape(userId, builder);
builder.append(SEPARATOR);
escape(sourceId, builder);
@@ -118,15 +125,16 @@ private static void escape(String string, StringBuilder builder) {
/**
* Efficiently serializes a measurement key serialized with
- * {@link #measurementKeyToString(MeasurementKey)}.
+ * {@link #measurementKeyToString(ObservationKey)}.
*
* @param string serialized form
* @return original measurement key
*/
- public static MeasurementKey stringToKey(String string) {
+ public static ObservationKey stringToKey(String string) {
StringBuilder builder = new StringBuilder(string.length());
- MeasurementKey key = new MeasurementKey();
+ ObservationKey key = new ObservationKey();
boolean hasSlash = false;
+ int numFound = 0;
for (char c : string.toCharArray()) {
if (c == '\\') {
if (hasSlash) {
@@ -140,8 +148,18 @@ public static MeasurementKey stringToKey(String string) {
builder.append(c);
hasSlash = false;
} else {
- key.setUserId(builder.toString());
- builder.setLength(0);
+ if (numFound == 0) {
+ numFound++;
+ if (builder.length() == 0) {
+ key.setProjectId(null);
+ } else {
+ key.setProjectId(builder.toString());
+ builder.setLength(0);
+ }
+ } else {
+ key.setUserId(builder.toString());
+ builder.setLength(0);
+ }
}
} else {
builder.append(c);
diff --git a/src/main/java/org/radarcns/util/RadarUtilities.java b/src/main/java/org/radarcns/util/RadarUtilities.java
index e7727b87..884aa5af 100644
--- a/src/main/java/org/radarcns/util/RadarUtilities.java
+++ b/src/main/java/org/radarcns/util/RadarUtilities.java
@@ -18,12 +18,12 @@
import org.apache.kafka.streams.KeyValue;
import org.apache.kafka.streams.kstream.Windowed;
-import org.radarcns.aggregator.DoubleAggregator;
-import org.radarcns.aggregator.DoubleArrayAggregator;
import org.radarcns.aggregator.PhoneUsageAggregator;
-import org.radarcns.empatica.EmpaticaE4Acceleration;
-import org.radarcns.key.MeasurementKey;
-import org.radarcns.key.WindowedKey;
+import org.radarcns.kafka.AggregateKey;
+import org.radarcns.kafka.ObservationKey;
+import org.radarcns.passive.empatica.EmpaticaE4Acceleration;
+import org.radarcns.stream.aggregator.DoubleAggregation;
+import org.radarcns.stream.aggregator.DoubleArrayAggregation;
import org.radarcns.stream.collector.DoubleArrayCollector;
import org.radarcns.stream.collector.DoubleValueCollector;
import org.radarcns.stream.phone.PhoneUsageCollector;
@@ -35,21 +35,21 @@
public interface RadarUtilities {
/**
- * Creates a WindowedKey for a window of MeasurementKey.
+ * Creates a AggregateKey for a window of ObservationKey.
* @param window Windowed measurement keys
- * @return relevant WindowedKey
+ * @return relevant AggregateKey
*/
- WindowedKey getWindowed(Windowed window);
+ AggregateKey getWindowed(Windowed window);
- WindowedKey getWindowedTuple(Windowed window);
+ AggregateKey getWindowedTuple(Windowed window);
- KeyValue collectorToAvro(
- Windowed window, DoubleArrayCollector collector);
+ KeyValue collectorToAvro(
+ Windowed window, DoubleArrayCollector collector);
- KeyValue collectorToAvro(
- Windowed window, DoubleValueCollector collector);
+ KeyValue collectorToAvro(
+ Windowed window, DoubleValueCollector collector);
- KeyValue collectorToAvro(
+ KeyValue collectorToAvro(
Windowed window, PhoneUsageCollector collector);
double floatToDouble(float input);
diff --git a/src/main/java/org/radarcns/util/RadarUtilitiesImpl.java b/src/main/java/org/radarcns/util/RadarUtilitiesImpl.java
index 81461fd2..b5fa11c4 100644
--- a/src/main/java/org/radarcns/util/RadarUtilitiesImpl.java
+++ b/src/main/java/org/radarcns/util/RadarUtilitiesImpl.java
@@ -18,12 +18,12 @@
import org.apache.kafka.streams.KeyValue;
import org.apache.kafka.streams.kstream.Windowed;
-import org.radarcns.aggregator.DoubleAggregator;
-import org.radarcns.aggregator.DoubleArrayAggregator;
import org.radarcns.aggregator.PhoneUsageAggregator;
-import org.radarcns.empatica.EmpaticaE4Acceleration;
-import org.radarcns.key.MeasurementKey;
-import org.radarcns.key.WindowedKey;
+import org.radarcns.passive.empatica.EmpaticaE4Acceleration;
+import org.radarcns.kafka.ObservationKey;
+import org.radarcns.kafka.AggregateKey;
+import org.radarcns.stream.aggregator.DoubleAggregation;
+import org.radarcns.stream.aggregator.DoubleArrayAggregation;
import org.radarcns.stream.collector.DoubleArrayCollector;
import org.radarcns.stream.collector.DoubleValueCollector;
import org.radarcns.stream.phone.PhoneUsageCollector;
@@ -41,16 +41,18 @@ protected RadarUtilitiesImpl() {
}
@Override
- public WindowedKey getWindowed(Windowed window) {
- return new WindowedKey(window.key().getUserId(), window.key().getSourceId(),
- window.window().start(), window.window().end());
+ public AggregateKey getWindowed(Windowed window) {
+ return new AggregateKey(window.key().getProjectId(), window.key().getUserId(),
+ window.key().getSourceId(), window.window().start(), window.window().end());
}
@Override
- public WindowedKey getWindowedTuple(Windowed window) {
- MeasurementKey measurementKey = window.key().getKey();
- return new WindowedKey(measurementKey.getUserId(), measurementKey.getSourceId(),
- window.window().start(), window.window().end());
+ public AggregateKey getWindowedTuple(Windowed window) {
+ TemporaryPackageKey temp = window.key();
+ ObservationKey measurementKey = new ObservationKey(temp.getProjectId(), temp.getUserId(),
+ temp.getSourceId());
+ return new AggregateKey(measurementKey.getProjectId(), measurementKey.getUserId(),
+ measurementKey.getSourceId(), window.window().start(), window.window().end());
}
@Override
@@ -60,7 +62,7 @@ public double floatToDouble(float input) {
@Override
- public KeyValue collectorToAvro(
+ public KeyValue collectorToAvro(
Windowed window, PhoneUsageCollector collector
) {
return new KeyValue<>(getWindowedTuple(window) , new PhoneUsageAggregator(
@@ -73,8 +75,8 @@ public KeyValue collectorToAvro(
}
@Override
- public KeyValue collectorToAvro(
- Windowed window, DoubleArrayCollector collector) {
+ public KeyValue collectorToAvro(
+ Windowed window, DoubleArrayCollector collector) {
List subcollectors = collector.getCollectors();
int len = subcollectors.size();
List min = new ArrayList<>(len);
@@ -96,14 +98,14 @@ public KeyValue collectorToAvro(
}
return new KeyValue<>(getWindowed(window),
- new DoubleArrayAggregator(min, max, sum, count, avg, quartile, iqr));
+ new DoubleArrayAggregation(min, max, sum, count, avg, quartile, iqr));
}
@Override
- public KeyValue collectorToAvro(
- Windowed window, DoubleValueCollector collector) {
+ public KeyValue collectorToAvro(
+ Windowed window, DoubleValueCollector collector) {
return new KeyValue<>(getWindowed(window),
- new DoubleAggregator(collector.getMin(), collector.getMax(), collector.getSum(),
+ new DoubleAggregation(collector.getMin(), collector.getMax(), collector.getSum(),
collector.getCount(), collector.getAvg(), collector.getQuartile(),
collector.getIqr()));
}
diff --git a/src/main/java/org/radarcns/util/StreamUtil.java b/src/main/java/org/radarcns/util/StreamUtil.java
new file mode 100644
index 00000000..ea982a22
--- /dev/null
+++ b/src/main/java/org/radarcns/util/StreamUtil.java
@@ -0,0 +1,45 @@
+package org.radarcns.util;
+
+import org.apache.kafka.streams.KeyValue;
+
+import java.util.function.BiFunction;
+import java.util.function.BiPredicate;
+import java.util.function.Function;
+import java.util.function.Predicate;
+import java.util.stream.Stream;
+
+public final class StreamUtil {
+ private StreamUtil() {
+ // utility class
+ }
+
+ public static Predicate> test(BiPredicate super K, ? super V> bip) {
+ return entry -> bip.test(entry.key, entry.value);
+ }
+
+ public static Function, R> apply(
+ BiFunction super K, ? super V, R> bif) {
+ return entry -> bif.apply(entry.key, entry.value);
+ }
+
+ public static Function, K> first() {
+ return e -> e.key;
+ }
+
+ public static Function, V> second() {
+ return e -> e.value;
+ }
+
+ @FunctionalInterface
+ public interface StreamSupplier {
+ Stream get();
+
+ default StreamSupplier concat(StreamSupplier extends T> other) {
+ return () -> Stream.concat(get(), other.get());
+ }
+
+ static StreamSupplier supply(StreamSupplier supplier) {
+ return supplier;
+ }
+ }
+}
diff --git a/src/main/java/org/radarcns/util/serde/GenericAvroDeserializer.java b/src/main/java/org/radarcns/util/serde/GenericAvroDeserializer.java
deleted file mode 100644
index 3e8208a5..00000000
--- a/src/main/java/org/radarcns/util/serde/GenericAvroDeserializer.java
+++ /dev/null
@@ -1,50 +0,0 @@
-/*
- * Copyright 2017 King's College London and The Hyve
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.radarcns.util.serde;
-
-import io.confluent.kafka.serializers.KafkaAvroDeserializer;
-import org.apache.avro.generic.GenericRecord;
-import org.apache.kafka.common.serialization.Deserializer;
-
-import java.util.Map;
-
-public class GenericAvroDeserializer implements Deserializer {
-
- private final KafkaAvroDeserializer inner;
-
- /**
- * Constructor used by Kafka Streams.
- */
- public GenericAvroDeserializer() {
- inner = new KafkaAvroDeserializer();
- }
-
- @Override
- public void configure(Map configs, boolean isKey) {
- inner.configure(configs, isKey);
- }
-
- @Override
- public GenericRecord deserialize(String s, byte[] bytes) {
- return (GenericRecord) inner.deserialize(s, bytes);
- }
-
- @Override
- public void close() {
- inner.close();
- }
-}
diff --git a/src/main/java/org/radarcns/util/serde/GenericAvroSerde.java b/src/main/java/org/radarcns/util/serde/GenericAvroSerde.java
deleted file mode 100644
index 0a5ce631..00000000
--- a/src/main/java/org/radarcns/util/serde/GenericAvroSerde.java
+++ /dev/null
@@ -1,61 +0,0 @@
-/*
- * Copyright 2017 King's College London and The Hyve
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.radarcns.util.serde;
-
-import java.util.Map;
-import org.apache.avro.generic.GenericRecord;
-import org.apache.kafka.common.serialization.Deserializer;
-import org.apache.kafka.common.serialization.Serde;
-import org.apache.kafka.common.serialization.Serdes;
-import org.apache.kafka.common.serialization.Serializer;
-
-/**
- * Created by Francesco Nobilia on 12/10/2016.
- */
-public class GenericAvroSerde implements Serde {
-
- private final Serde inner;
-
- /**
- * Constructor used by Kafka Streams.
- */
- public GenericAvroSerde() {
- inner = Serdes.serdeFrom(new GenericAvroSerializer(), new GenericAvroDeserializer());
- }
-
- @Override
- public Serializer serializer() {
- return inner.serializer();
- }
-
- @Override
- public Deserializer deserializer() {
- return inner.deserializer();
- }
-
- @Override
- public void configure(Map configs, boolean isKey) {
- inner.serializer().configure(configs, isKey);
- inner.deserializer().configure(configs, isKey);
- }
-
- @Override
- public void close() {
- inner.serializer().close();
- inner.deserializer().close();
- }
-}
diff --git a/src/main/java/org/radarcns/util/serde/GenericAvroSerializer.java b/src/main/java/org/radarcns/util/serde/GenericAvroSerializer.java
deleted file mode 100644
index b0e0e97a..00000000
--- a/src/main/java/org/radarcns/util/serde/GenericAvroSerializer.java
+++ /dev/null
@@ -1,55 +0,0 @@
-/*
- * Copyright 2017 King's College London and The Hyve
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.radarcns.util.serde;
-
-/**
- * Created by Francesco Nobilia on 12/10/2016.
- */
-
-import io.confluent.kafka.serializers.KafkaAvroSerializer;
-import org.apache.avro.generic.GenericRecord;
-import org.apache.kafka.common.serialization.Serializer;
-
-import java.util.Map;
-
-
-public class GenericAvroSerializer implements Serializer {
-
- private final KafkaAvroSerializer inner;
-
- /**
- * Constructor used by Kafka Streams.
- */
- public GenericAvroSerializer() {
- inner = new KafkaAvroSerializer();
- }
-
- @Override
- public void configure(Map configs, boolean isKey) {
- inner.configure(configs, isKey);
- }
-
- @Override
- public byte[] serialize(String topic, GenericRecord record) {
- return inner.serialize(topic, record);
- }
-
- @Override
- public void close() {
- inner.close();
- }
-}
diff --git a/src/main/java/org/radarcns/util/serde/SpecificAvroDeserializer.java b/src/main/java/org/radarcns/util/serde/SpecificAvroDeserializer.java
deleted file mode 100644
index 44740035..00000000
--- a/src/main/java/org/radarcns/util/serde/SpecificAvroDeserializer.java
+++ /dev/null
@@ -1,54 +0,0 @@
-/*
- * Copyright 2017 King's College London and The Hyve
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.radarcns.util.serde;
-
-import static io.confluent.kafka.serializers.KafkaAvroDeserializerConfig.SPECIFIC_AVRO_READER_CONFIG;
-
-import io.confluent.kafka.serializers.KafkaAvroDeserializer;
-import java.util.HashMap;
-import java.util.Map;
-import org.apache.avro.specific.SpecificRecord;
-import org.apache.kafka.common.serialization.Deserializer;
-
-public class SpecificAvroDeserializer implements Deserializer {
- private final KafkaAvroDeserializer inner;
-
- /**
- * Constructor used by Kafka Streams.
- */
- public SpecificAvroDeserializer() {
- inner = new KafkaAvroDeserializer();
- }
-
- @Override
- public void configure(Map configs, boolean isKey) {
- Map effectiveConfigs = new HashMap<>(configs);
- effectiveConfigs.put(SPECIFIC_AVRO_READER_CONFIG, true);
- inner.configure(effectiveConfigs, isKey);
- }
-
- @SuppressWarnings("unchecked")
- @Override
- public T deserialize(String s, byte[] bytes) {
- return (T) inner.deserialize(s, bytes);
- }
-
- @Override
- public void close() {
- inner.close();
- }
-}
diff --git a/src/main/java/org/radarcns/util/serde/SpecificAvroSerde.java b/src/main/java/org/radarcns/util/serde/SpecificAvroSerde.java
deleted file mode 100644
index 8377e8a9..00000000
--- a/src/main/java/org/radarcns/util/serde/SpecificAvroSerde.java
+++ /dev/null
@@ -1,57 +0,0 @@
-/*
- * Copyright 2017 King's College London and The Hyve
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.radarcns.util.serde;
-
-import java.util.Map;
-import org.apache.avro.specific.SpecificRecord;
-import org.apache.kafka.common.serialization.Deserializer;
-import org.apache.kafka.common.serialization.Serde;
-import org.apache.kafka.common.serialization.Serdes;
-import org.apache.kafka.common.serialization.Serializer;
-
-public class SpecificAvroSerde implements Serde {
- private final Serde inner;
-
- /**
- * Constructor used by Kafka Streams.
- */
- public SpecificAvroSerde() {
- inner = Serdes.serdeFrom(new SpecificAvroSerializer<>(), new SpecificAvroDeserializer<>());
- }
-
- @Override
- public Serializer serializer() {
- return inner.serializer();
- }
-
- @Override
- public Deserializer deserializer() {
- return inner.deserializer();
- }
-
- @Override
- public void configure(Map configs, boolean isKey) {
- inner.serializer().configure(configs, isKey);
- inner.deserializer().configure(configs, isKey);
- }
-
- @Override
- public void close() {
- inner.serializer().close();
- inner.deserializer().close();
- }
-}
\ No newline at end of file
diff --git a/src/main/java/org/radarcns/util/serde/SpecificAvroSerializer.java b/src/main/java/org/radarcns/util/serde/SpecificAvroSerializer.java
deleted file mode 100644
index 6a4ea11d..00000000
--- a/src/main/java/org/radarcns/util/serde/SpecificAvroSerializer.java
+++ /dev/null
@@ -1,55 +0,0 @@
-/*
- * Copyright 2017 King's College London and The Hyve
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.radarcns.util.serde;
-
-import static io.confluent.kafka.serializers.KafkaAvroDeserializerConfig.SPECIFIC_AVRO_READER_CONFIG;
-
-import io.confluent.kafka.serializers.KafkaAvroSerializer;
-import java.util.HashMap;
-import java.util.Map;
-import org.apache.avro.specific.SpecificRecord;
-import org.apache.kafka.common.serialization.Serializer;
-
-public class SpecificAvroSerializer implements Serializer {
-
- private final KafkaAvroSerializer inner;
-
- /**
- * Constructor used by Kafka Streams.
- */
- public SpecificAvroSerializer() {
- inner = new KafkaAvroSerializer();
- }
-
- @Override
- @SuppressWarnings("unchecked")
- public void configure(Map configs, boolean isKey) {
- Map effectiveConfigs = new HashMap<>(configs);
- effectiveConfigs.put(SPECIFIC_AVRO_READER_CONFIG, true);
- inner.configure(effectiveConfigs, isKey);
- }
-
- @Override
- public byte[] serialize(String topic, T record) {
- return inner.serialize(topic, record);
- }
-
- @Override
- public void close() {
- inner.close();
- }
-}
diff --git a/src/main/resources/build.properties b/src/main/resources/build.properties
new file mode 100644
index 00000000..75ac2d20
--- /dev/null
+++ b/src/main/resources/build.properties
@@ -0,0 +1 @@
+version=${version}
\ No newline at end of file
diff --git a/src/main/resources/log4j.properties b/src/main/resources/log4j.properties
index db6aa12b..1c05bf0a 100644
--- a/src/main/resources/log4j.properties
+++ b/src/main/resources/log4j.properties
@@ -15,15 +15,11 @@
#
# Root logger option
-log4j.rootLogger=INFO, stdout, file
+log4j.rootLogger=INFO, stdout
-log4j.logger.org.radarcns.RadarBackend=INFO
-
-log4j.logger.org.radarcns.stream.StreamMaster=INFO
-
-log4j.logger.org.radarcns.stream.aggregator.SensorAggregator=INFO
-log4j.logger.org.radarcns.stream.aggregator.InternalAggregator=INFO
-log4j.logger.org.radarcns.process.AbstractKafkaMonitor=INFO
+log4j.logger.org.apache.kafka.streams.processor.internals=WARN
+log4j.logger.org.apache.kafka.clients.consumer.internals=WARN
+log4j.logger.org.apache.kafka.clients.producer.internals=WARN
# Redirect log messages to console
log4j.appender.stdout=org.apache.log4j.ConsoleAppender
@@ -31,11 +27,3 @@ log4j.appender.stdout.Target=System.out
log4j.appender.stdout.layout=org.apache.log4j.PatternLayout
log4j.appender.stdout.layout.ConversionPattern=[%d{yyyy-MM-dd HH:mm:ss zzz}] %5p [%t] (%F:%L) - %m (%c)%n
#log4j.appender.stdout.layout.ConversionPattern=[%d{yyyy-MM-dd HH:mm:ss zzz}] %-5p %t %C.%M:%L - %m%n
-
-# Redirect log messages to a log file, support file rolling.
-log4j.appender.file=org.apache.log4j.RollingFileAppender
-log4j.appender.file.File=./backend.log
-log4j.appender.file.MaxFileSize=10MB
-log4j.appender.file.MaxBackupIndex=10
-log4j.appender.file.layout=org.apache.log4j.PatternLayout
-log4j.appender.file.layout.ConversionPattern=[%d{yyyy-MM-dd HH:mm:ss zzz}] %-5p %t %c{1}:%L - %m%n
diff --git a/src/test/java/org/radarcns/monitor/BatteryLevelMonitorTest.java b/src/test/java/org/radarcns/monitor/BatteryLevelMonitorTest.java
index 6f2cc21c..4a3bef78 100644
--- a/src/test/java/org/radarcns/monitor/BatteryLevelMonitorTest.java
+++ b/src/test/java/org/radarcns/monitor/BatteryLevelMonitorTest.java
@@ -39,7 +39,7 @@
import org.junit.rules.TemporaryFolder;
import org.radarcns.config.ConfigRadar;
import org.radarcns.config.RadarPropertyHandler;
-import org.radarcns.key.MeasurementKey;
+import org.radarcns.kafka.ObservationKey;
import org.radarcns.monitor.BatteryLevelMonitor.BatteryLevelState;
import org.radarcns.util.EmailSender;
import org.radarcns.util.PersistentStateStore;
@@ -60,6 +60,7 @@ public class BatteryLevelMonitorTest {
public void setUp() {
Parser parser = new Parser();
keySchema = parser.parse("{\"name\": \"key\", \"type\": \"record\", \"fields\": ["
+ + "{\"name\": \"projectId\", \"type\": [\"null\", \"string\"]},"
+ "{\"name\": \"userId\", \"type\": \"string\"},"
+ "{\"name\": \"sourceId\", \"type\": \"string\"}"
+ "]}");
@@ -102,6 +103,7 @@ public void evaluateRecord() throws Exception {
private void sendMessage(BatteryLevelMonitor monitor, float batteryLevel, boolean sentMessage)
throws MessagingException {
Record key = new Record(keySchema);
+ key.put("projectId", "test");
key.put("sourceId", "1");
key.put("userId", "me");
@@ -121,7 +123,7 @@ public void retrieveState() throws Exception {
File base = folder.newFolder();
PersistentStateStore stateStore = new PersistentStateStore(base);
BatteryLevelState state = new BatteryLevelState();
- MeasurementKey key1 = new MeasurementKey("a", "b");
+ ObservationKey key1 = new ObservationKey("test", "a", "b");
state.updateLevel(key1, 0.1f);
stateStore.storeState("one", "two", state);
diff --git a/src/test/java/org/radarcns/monitor/DisconnectMonitorTest.java b/src/test/java/org/radarcns/monitor/DisconnectMonitorTest.java
index f85b8f99..8dcb7770 100644
--- a/src/test/java/org/radarcns/monitor/DisconnectMonitorTest.java
+++ b/src/test/java/org/radarcns/monitor/DisconnectMonitorTest.java
@@ -30,7 +30,7 @@
import org.radarcns.config.ConfigRadar;
import org.radarcns.config.DisconnectMonitorConfig;
import org.radarcns.config.RadarPropertyHandler;
-import org.radarcns.key.MeasurementKey;
+import org.radarcns.kafka.ObservationKey;
import org.radarcns.monitor.DisconnectMonitor.DisconnectMonitorState;
import org.radarcns.monitor.DisconnectMonitor.MissingRecordsReport;
import org.radarcns.util.EmailSender;
@@ -68,6 +68,7 @@ public class DisconnectMonitorTest {
public void setUp() {
Parser parser = new Parser();
keySchema = parser.parse("{\"name\": \"key\", \"type\": \"record\", \"fields\": ["
+ + "{\"name\": \"projectId\", \"type\": [\"null\", \"string\"]},"
+ "{\"name\": \"userId\", \"type\": \"string\"},"
+ "{\"name\": \"sourceId\", \"type\": \"string\"}"
+ "]}");
@@ -135,6 +136,7 @@ public void evaluateRecordsWithScheduledAlerts() throws Exception {
private void sendMessage(DisconnectMonitor monitor, String source, int sentMessages)
throws MessagingException {
Record key = new Record(keySchema);
+ key.put("projectId", "test");
key.put("sourceId", source);
key.put("userId", "me");
@@ -154,9 +156,9 @@ public void retrieveState() throws Exception {
File base = folder.newFolder();
PersistentStateStore stateStore = new PersistentStateStore(base);
DisconnectMonitorState state = new DisconnectMonitorState();
- MeasurementKey key1 = new MeasurementKey("a", "b");
- MeasurementKey key2 = new MeasurementKey("b", "c");
- MeasurementKey key3 = new MeasurementKey("c", "d");
+ ObservationKey key1 = new ObservationKey("test", "a", "b");
+ ObservationKey key2 = new ObservationKey("test", "b", "c");
+ ObservationKey key3 = new ObservationKey("test", "c", "d");
long now = System.currentTimeMillis();
state.getLastSeen().put(measurementKeyToString(key1), now);
state.getLastSeen().put(measurementKeyToString(key2), now + 1L);
diff --git a/src/test/java/org/radarcns/stream/StreamDefinitionTest.java b/src/test/java/org/radarcns/stream/StreamDefinitionTest.java
index 4979ff1b..1486162d 100644
--- a/src/test/java/org/radarcns/stream/StreamDefinitionTest.java
+++ b/src/test/java/org/radarcns/stream/StreamDefinitionTest.java
@@ -1,5 +1,3 @@
-package org.radarcns.stream;
-
/*
* Copyright 2017 King's College London and The Hyve
*
@@ -16,13 +14,19 @@
* limitations under the License.
*/
+package org.radarcns.stream;
+
import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
-import org.apache.kafka.common.errors.InvalidTopicException;
import org.junit.Test;
import org.radarcns.topic.KafkaTopic;
+import java.util.regex.Pattern;
+
public class StreamDefinitionTest {
+ private static final Pattern TOPIC_PATTERN = Pattern.compile("^[A-Za-z0-9_-]+$");
private static final String INPUT = "android_empatica_e4_blood_volume_pulse";
private static final String OUTPUT = INPUT + GeneralStreamGroup.OUTPUT_LABEL;
@@ -34,10 +38,9 @@ public void nameValidation() {
StreamDefinition definition = new StreamDefinition(inputTopic, outputTopic);
- kafka.common.Topic.validate(definition.getStateStoreName());
-
- assertEquals("From-" + "android_empatica_e4_blood_volume_pulse" + "-To-" +
- "android_empatica_e4_blood_volume_pulse" + "_output",
+ assertTrue(TOPIC_PATTERN.matcher(definition.getStateStoreName()).matches());
+ assertEquals("From-android_empatica_e4_blood_volume_pulse"
+ + "-To-android_empatica_e4_blood_volume_pulse_output",
definition.getStateStoreName());
}
@@ -47,7 +50,6 @@ public void faultyNameValidation() {
KafkaTopic outputTopic = new KafkaTopic(OUTPUT);
StreamDefinition definition = new StreamDefinition(inputTopic, outputTopic);
-
- kafka.common.Topic.validate(definition.getStateStoreName());
+ assertFalse(TOPIC_PATTERN.matcher(definition.getStateStoreName()).matches());
}
}
diff --git a/src/test/java/org/radarcns/stream/StreamWorkerTest.java b/src/test/java/org/radarcns/stream/StreamWorkerTest.java
index d1611415..636a853a 100644
--- a/src/test/java/org/radarcns/stream/StreamWorkerTest.java
+++ b/src/test/java/org/radarcns/stream/StreamWorkerTest.java
@@ -16,6 +16,7 @@
package org.radarcns.stream;
+import static org.mockito.ArgumentMatchers.eq;
import static org.mockito.Mockito.any;
import static org.mockito.Mockito.doCallRealMethod;
import static org.mockito.Mockito.mock;
@@ -24,10 +25,18 @@
import static org.mockito.Mockito.when;
import java.io.IOException;
+import java.util.Collections;
+import java.util.Properties;
+
import org.apache.kafka.streams.kstream.KStream;
import org.junit.Before;
import org.junit.Test;
+import org.radarcns.config.ConfigRadar;
+import org.radarcns.config.KafkaProperty;
+import org.radarcns.config.RadarPropertyHandler;
import org.radarcns.topic.KafkaTopic;
+import org.radarcns.util.RadarSingletonFactory;
+
/**
* Created by nivethika on 20-12-16.
*/
@@ -43,11 +52,17 @@ public void setUp() {
public void getBuilder() throws IOException {
String topicName = "TESTTopic";
StreamDefinition sensorTopic = new StreamDefinition(new KafkaTopic(topicName), new KafkaTopic(topicName + "_output"));
- when(aggregator.getStreamDefinition()).thenReturn(sensorTopic);
- when(aggregator.defineStream(any())).thenReturn(mock(KStream.class));
- doCallRealMethod().when(aggregator).createBuilder();
- aggregator.createBuilder();
+ when(aggregator.getStreamDefinitions()).thenReturn(Collections.singleton(sensorTopic));
+
+ RadarPropertyHandler propertyHandler = RadarSingletonFactory.getRadarPropertyHandler();
+ propertyHandler.load("src/test/resources/config/radar.yml");
+ KafkaProperty kafkaProperty = propertyHandler.getKafkaProperties();
+ when(aggregator.getStreamProperties(eq(sensorTopic))).thenReturn(
+ kafkaProperty.getStreamProperties("test", 1, DeviceTimestampExtractor.class));
+ when(aggregator.implementStream(eq(sensorTopic), any())).thenReturn(mock(KStream.class));
+ doCallRealMethod().when(aggregator).createBuilder(sensorTopic);
+ aggregator.createBuilder(sensorTopic);
- verify(aggregator, times(1)).defineStream(any());
+ verify(aggregator, times(1)).implementStream(eq(sensorTopic), any());
}
}
diff --git a/src/test/java/org/radarcns/stream/empatica/E4AccelerationTest.java.bak b/src/test/java/org/radarcns/stream/empatica/E4AccelerationTest.java.bak
index 38690470..d91d783d 100644
--- a/src/test/java/org/radarcns/stream/empatica/E4AccelerationTest.java.bak
+++ b/src/test/java/org/radarcns/stream/empatica/E4AccelerationTest.java.bak
@@ -1,4 +1,4 @@
-package org.radarcns.empatica.streams;
+package org.radarcns.passive.empatica.streams;
import org.apache.kafka.streams.kstream.KStream;
import org.apache.kafka.streams.kstream.KStreamBuilder;
@@ -10,10 +10,10 @@ import org.mockito.Mockito;
import org.mortbay.util.IO;
import org.radarcns.config.ConfigRadar;
import org.radarcns.config.PropertiesRadar;
-import org.radarcns.empatica.EmpaticaE4Acceleration;
-import org.radarcns.empatica.topic.E4SensorStreams;
+import org.radarcns.passive.empatica.EmpaticaE4Acceleration;
+import org.radarcns.passive.empatica.topic.E4SensorStreams;
import E4Streams;
-import org.radarcns.key.MeasurementKey;
+import org.radarcns.kafka.ObservationKey;
import org.radarcns.stream.StreamWorker;
import org.radarcns.stream.StreamMaster;
import org.radarcns.topic.SensorTopic;
@@ -49,7 +49,7 @@ public class E4AccelerationTest {
when(PropertiesRadar.getInstance()).thenReturn(propertiesRadar);
KStreamBuilder streamBuilder = new KStreamBuilder();
- KStream stream = streamBuilder.stream("E4Acceleration");
+ KStream stream = streamBuilder.stream("E4Acceleration");
SensorTopic sensorTopic = E4Streams.getInstance().getSensorTopics().getAccelerationStream();
e4Acceleration.setStream(stream, sensorTopic);
diff --git a/src/test/java/org/radarcns/stream/empatica/E4StreamsTest.java b/src/test/java/org/radarcns/stream/empatica/E4StreamsTest.java
index f771cb3a..669667dd 100644
--- a/src/test/java/org/radarcns/stream/empatica/E4StreamsTest.java
+++ b/src/test/java/org/radarcns/stream/empatica/E4StreamsTest.java
@@ -19,12 +19,14 @@
import static org.junit.Assert.assertEquals;
import java.util.Arrays;
+import java.util.Collection;
import java.util.List;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.ExpectedException;
import org.radarcns.stream.StreamDefinition;
+import org.radarcns.stream.TimeWindowMetadata;
import org.radarcns.stream.empatica.E4Streams;
/**
@@ -49,32 +51,33 @@ public void getTopic() {
"android_empatica_e4_blood_volume_pulse",
"android_empatica_e4_electrodermal_activity",
"android_empatica_e4_inter_beat_interval",
- "android_empatica_e4_sensor_status",
"android_empatica_e4_temperature");
for(String topicName : topicNames) {
- StreamDefinition topic = this.e4Streams.getStreamDefinition(topicName);
- assertEquals(topic.getInputTopic().getName(), topicName);
+ Collection topic = this.e4Streams.getStreamDefinition(topicName);
+ assertEquals(topic.iterator().next().getInputTopic().getName(), topicName);
}
}
@Test
public void getInternalTopic() {
- StreamDefinition topic = this.e4Streams.getStreamDefinition("android_empatica_e4_inter_beat_interval");
+ StreamDefinition topic = this.e4Streams.getStreamDefinition("android_empatica_e4_inter_beat_interval").iterator().next();
assertEquals("android_empatica_e4_inter_beat_interval", topic.getInputTopic().getName());
- assertEquals("android_empatica_e4_heartrate", topic.getOutputTopic().getName());
- assertEquals("From-android_empatica_e4_inter_beat_interval-To-android_empatica_e4_heartrate", topic.getStateStoreName());
+ assertEquals("android_empatica_e4_heart_rate_10min", topic.getOutputTopic().getName());
+ assertEquals("From-android_empatica_e4_inter_beat_interval-To-android_empatica_e4_heart_rate_10min", topic.getStateStoreName());
}
@Test
public void getInvalidTopic() {
exception.expect(IllegalArgumentException.class);
exception.expectMessage("Topic something unknown");
- StreamDefinition topic = this.e4Streams.getStreamDefinition("something");
+ this.e4Streams.getStreamDefinition("something");
}
@Test
public void getTopicNames() {
- assertEquals(15, this.e4Streams.getTopicNames().size()); // sort removes the redundant
+ System.out.println(this.e4Streams.getTopicNames());
+ assertEquals(8 * TimeWindowMetadata.values().length,
+ this.e4Streams.getTopicNames().size()); // sort removes the redundant
}
}
diff --git a/src/test/java/org/radarcns/util/PersistentStateStoreTest.java b/src/test/java/org/radarcns/util/PersistentStateStoreTest.java
index 71a8fd6b..b222ce19 100644
--- a/src/test/java/org/radarcns/util/PersistentStateStoreTest.java
+++ b/src/test/java/org/radarcns/util/PersistentStateStoreTest.java
@@ -28,7 +28,7 @@
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.TemporaryFolder;
-import org.radarcns.key.MeasurementKey;
+import org.radarcns.kafka.ObservationKey;
import org.radarcns.monitor.BatteryLevelMonitor.BatteryLevelState;
public class PersistentStateStoreTest {
@@ -40,14 +40,14 @@ public void retrieveState() throws Exception {
File base = folder.newFolder();
PersistentStateStore stateStore = new PersistentStateStore(base);
BatteryLevelState state = new BatteryLevelState();
- MeasurementKey key1 = new MeasurementKey("a", "b");
+ ObservationKey key1 = new ObservationKey("test", "a", "b");
state.updateLevel(key1, 0.1f);
stateStore.storeState("one", "two", state);
File outputFile = new File(base, "one_two.yml");
assertThat(outputFile.exists(), is(true));
String rawFile = new String(Files.readAllBytes(outputFile.toPath()));
- assertThat(rawFile, equalTo("---\nlevels:\n a#b: 0.1\n"));
+ assertThat(rawFile, equalTo("---\nlevels:\n test#a#b: 0.1\n"));
PersistentStateStore stateStore2 = new PersistentStateStore(base);
BatteryLevelState state2 = stateStore2.retrieveState("one", "two", new BatteryLevelState());
diff --git a/src/test/java/org/radarcns/util/RadarUtilsTest.java b/src/test/java/org/radarcns/util/RadarUtilsTest.java
index f9dd4190..fe8bad7b 100644
--- a/src/test/java/org/radarcns/util/RadarUtilsTest.java
+++ b/src/test/java/org/radarcns/util/RadarUtilsTest.java
@@ -23,9 +23,9 @@
import org.apache.kafka.streams.kstream.internals.TimeWindow;
import org.junit.Before;
import org.junit.Test;
-import org.radarcns.empatica.EmpaticaE4Acceleration;
-import org.radarcns.key.MeasurementKey;
-import org.radarcns.key.WindowedKey;
+import org.radarcns.passive.empatica.EmpaticaE4Acceleration;
+import org.radarcns.kafka.ObservationKey;
+import org.radarcns.kafka.AggregateKey;
public class RadarUtilsTest {
@@ -41,14 +41,14 @@ public void getWindowed() {
String userId = "userId";
String sourceId = "sourceId";
- MeasurementKey measurementKey = new MeasurementKey();
+ ObservationKey measurementKey = new ObservationKey();
measurementKey.setUserId(userId);
measurementKey.setSourceId(sourceId);
Window window = new TimeWindow(1, 4);
- Windowed measurementKeyWindowed = new Windowed<>(measurementKey, window);
+ Windowed measurementKeyWindowed = new Windowed<>(measurementKey, window);
- WindowedKey windowedKey = radarUtilities.getWindowed(measurementKeyWindowed);
+ AggregateKey windowedKey = radarUtilities.getWindowed(measurementKeyWindowed);
assertEquals(windowedKey.getUserId(), userId);
assertEquals(windowedKey.getSourceId(), sourceId);
diff --git a/src/test/java/org/radarcns/util/serde/GenericAvroSerdeTest.java b/src/test/java/org/radarcns/util/serde/GenericAvroSerdeTest.java
deleted file mode 100644
index a6ee3306..00000000
--- a/src/test/java/org/radarcns/util/serde/GenericAvroSerdeTest.java
+++ /dev/null
@@ -1,54 +0,0 @@
-/*
- * Copyright 2017 King's College London and The Hyve
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.radarcns.util.serde;
-
-import static org.junit.Assert.assertEquals;
-
-import java.util.HashMap;
-import java.util.Map;
-import org.junit.Before;
-import org.junit.Test;
-
-/**
- * Created by nivethika on 21-12-16.
- */
-public class GenericAvroSerdeTest {
-
- private GenericAvroSerde genericAvroSerde;
-
- @Before
- public void setUp() {
- this.genericAvroSerde = new GenericAvroSerde();
- }
-
- @Test
- public void serializer() {
- assertEquals(this.genericAvroSerde.serializer().getClass(), GenericAvroSerializer.class);
- }
-
- @Test
- public void deserializer() {
- assertEquals(this.genericAvroSerde.deserializer().getClass(), GenericAvroDeserializer.class);
- }
-
- @Test
- public void configure() {
- Map map = new HashMap<>();
- map.put("schema.registry.url", "testvalue");
- this.genericAvroSerde.configure(map, false);
- }
-}
diff --git a/src/test/java/org/radarcns/util/serde/JsonDeserializerTest.java b/src/test/java/org/radarcns/util/serde/JsonDeserializerTest.java
index 019625c6..0f6815ac 100644
--- a/src/test/java/org/radarcns/util/serde/JsonDeserializerTest.java
+++ b/src/test/java/org/radarcns/util/serde/JsonDeserializerTest.java
@@ -18,14 +18,14 @@
import java.nio.charset.Charset;
import junit.framework.TestCase;
-import org.radarcns.key.MeasurementKey;
+import org.radarcns.kafka.ObservationKey;
public class JsonDeserializerTest extends TestCase {
public void testSerialize() throws Exception {
byte[] json = "{\"userId\":\"user\",\"sourceId\":\"source\"}"
.getBytes(Charset.forName("UTF-8"));
- JsonDeserializer serializer = new JsonDeserializer<>(MeasurementKey.class);
- MeasurementKey key = serializer.deserialize("mytest", json);
+ JsonDeserializer serializer = new JsonDeserializer<>(ObservationKey.class);
+ ObservationKey key = serializer.deserialize("mytest", json);
assertEquals("user", key.getUserId());
assertEquals("source", key.getSourceId());
}
diff --git a/src/test/java/org/radarcns/util/serde/JsonSerializerTest.java b/src/test/java/org/radarcns/util/serde/JsonSerializerTest.java
index aea2ad7b..aa9132d0 100644
--- a/src/test/java/org/radarcns/util/serde/JsonSerializerTest.java
+++ b/src/test/java/org/radarcns/util/serde/JsonSerializerTest.java
@@ -17,13 +17,13 @@
package org.radarcns.util.serde;
import junit.framework.TestCase;
-import org.radarcns.key.MeasurementKey;
+import org.radarcns.kafka.ObservationKey;
public class JsonSerializerTest extends TestCase {
public void testSerialize() throws Exception {
- JsonSerializer serializer = new JsonSerializer<>();
- MeasurementKey key = new MeasurementKey("user", "source");
+ JsonSerializer