Skip to content

Commit

Permalink
Merge pull request #42 from RADAR-CNS/release_0.1.1
Browse files Browse the repository at this point in the history
Prepare Release 0.1.1
  • Loading branch information
yatharthranjan committed Dec 6, 2017
2 parents 7fff01f + b0103ef commit 6a80d59
Show file tree
Hide file tree
Showing 70 changed files with 1,170 additions and 1,336 deletions.
10 changes: 0 additions & 10 deletions .idea/compiler.xml

This file was deleted.

11 changes: 0 additions & 11 deletions .idea/modules.xml

This file was deleted.

21 changes: 14 additions & 7 deletions .travis.yml
Original file line number Diff line number Diff line change
Expand Up @@ -2,18 +2,22 @@ language: java
jdk:
- oraclejdk8
sudo: required

services:
- docker

env:
DOCKER_COMPOSE_VERSION: 1.9.0
DOCKER_COMPOSE_VERSION: 1.16.1
TERM: dumb
before_cache:
- rm -f $HOME/.gradle/caches/modules-2/modules-2.lock
- rm -fr $HOME/.gradle/caches/*/plugin-resolution/

cache:
directories:
- $HOME/.gradle/caches/
- $HOME/.gradle/wrapper/
- $HOME/.gradle/caches/jars-1
- $HOME/.gradle/caches/jars-2
- $HOME/.gradle/caches/jars-3
- $HOME/.gradle/caches/modules-2/files-2.1/
- $HOME/.gradle/native
- $HOME/.gradle/wrapper

before_install:
- mkdir -p "$HOME/bin";
Expand All @@ -31,7 +35,10 @@ deploy:
provider: releases
api_key: ${GH_TOKEN}
file_glob: true
file: "build/libs/*.jar"
file:
- build/libs/*.jar
- build/distributions/*.zip
- build/distributions/*.tar.gz
skip_cleanup: true
on:
tags: true
Expand Down
2 changes: 1 addition & 1 deletion Dockerfile
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
FROM confluentinc/cp-base:3.2.1-5
FROM confluentinc/cp-base:3.3.0

ENV TERM=dumb

Expand Down
6 changes: 3 additions & 3 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -135,8 +135,8 @@ To get email notifications for Empatica E4 battery status, an email server witho
data:
- topic: topic1
file: topic1.csv
key_schema: org.radarcns.key.MeasurementKey
value_schema: org.radarcns.empatica.EmpaticaE4Acceleration
key_schema: org.radarcns.kafka.ObservationKey
value_schema: org.radarcns.passive.empatica.EmpaticaE4Acceleration
```

Each value has a topic to send the data to, a file containing the data, a schema class for the key and a schema class for the value. Also create a CSV file for each of these entries:
Expand Down Expand Up @@ -177,7 +177,7 @@ RADAR-Stream is a layer on top of Kafka streams. Topics are processed by streams
KafkaStreams currently communicates using master-slave model. The [MasterAggregator][1] defines the stream-master, while [AggregatorWorker][2] represents the stream-slave. The master-stream creates, starts and stops a list of stream-slaves registered with the corresponding master.
While the classical Kafka Consumer requires two implementations to support standalone and group executions, the AggregatorWorker provides both behaviors with one implementation.

To extend the RADAR-Stream API, follow these steps (see the `org.radarcns.empatica` package as an example):
To extend the RADAR-Stream API, follow these steps (see the `org.radarcns.passive.empatica` package as an example):

- Create a stream group by overriding [GeneralStreamGroup][8]. Use its `createSensorStream` and `createStream` methods to create the stream definitions.
- For each topic, create a [AggregatorWorker][2].
Expand Down
50 changes: 33 additions & 17 deletions build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ plugins {
//---------------------------------------------------------------------------//

group = 'org.radarcns'
version = '0.1'
version = '0.1.1'
ext.description = 'Kafka backend for processing device data.'

mainClassName = 'org.radarcns.RadarBackend'
Expand All @@ -30,21 +30,22 @@ sourceCompatibility = '1.8'

ext.boundaryVersion = '1.0.6'
ext.codacyVersion = '1.0.10'
ext.confluentVersion = '3.2.1'
ext.confluentVersion = '3.3.0'
ext.hamcrestVersion = '1.3'
ext.kafkaVersion = '0.10.2.1'
ext.kafkaVersion = '0.11.0.1'
ext.jacksonVersion='2.8.5'
ext.javaMailVersion = '1.5.6'
ext.junitVersion = '4.12'
ext.mathVersion = '3.0'
ext.findbugVersion = '3.0.1'
ext.commonsCliVersion = '1.2'
ext.mockitoVersion = '2.2.29'
ext.radarCommonsVersion = '0.5'
ext.radarSchemasVersion = '0.2-alpha.1'
ext.radarCommonsVersion = '0.6.3'
ext.radarSchemasVersion = '0.2.2'
ext.subethamailVersion = '3.1.7'
ext.jsoupVersion = '1.10.2'
ext.slf4jVersion = '1.7.21'
ext.slf4jVersion = '1.7.25'
ext.log4jVersion = '1.2.17'
ext.avroVersion = '1.8.2'

ext.githubUrl = 'https://github.com/' + githubRepoName + '.git'
Expand Down Expand Up @@ -81,9 +82,6 @@ configurations.all {

// In this section you declare where to find the dependencies of your project
repositories {
// For working with dev-branch commons
maven { url "${rootProject.projectDir}/libs" }

// Use 'jcenter' for resolving your dependencies.
// You can declare any Maven/Ivy/file repository here.
jcenter()
Expand All @@ -100,15 +98,16 @@ repositories {
dependencies {
compile group: 'org.radarcns', name: 'radar-commons', version: radarCommonsVersion
compile group: 'org.apache.avro', name: 'avro', version: avroVersion
compile (group: 'org.radarcns', name: 'radar-commons-testing', version: radarCommonsVersion) {
exclude group: 'org.slf4j', module: 'slf4j-simple'
}
compile group: 'org.radarcns', name: 'radar-commons-testing', version: radarCommonsVersion
compile group: 'org.radarcns', name: 'radar-schemas-commons', version: radarSchemasVersion

// Kafka streaming API
compile group: 'org.apache.kafka', name: 'kafka-streams', version: kafkaVersion
compile (group: 'io.confluent', name: 'kafka-avro-serializer', version: confluentVersion) {
compile (group: 'io.confluent', name: 'kafka-streams-avro-serde', version: confluentVersion) {
exclude group: 'org.slf4j', module: 'slf4j-log4j12'
exclude group: 'log4j', module: 'log4j'
exclude group: 'jline', module: 'jline'
exclude group: 'io.netty', module: 'netty'
}

// Nonnull annotation
Expand All @@ -135,8 +134,12 @@ dependencies {
// Mock mail server
testCompile group: 'org.subethamail', name: 'subethasmtp', version: subethamailVersion

testImplementation group: 'org.slf4j', name: 'slf4j-simple', version: slf4jVersion
integrationTestImplementation group: 'org.slf4j', name: 'slf4j-simple', version: slf4jVersion
runtimeOnly group: 'log4j', name: 'log4j', version: log4jVersion
runtimeOnly group: 'org.slf4j', name: 'slf4j-log4j12', version: slf4jVersion
testImplementation group: 'log4j', name: 'log4j', version: log4jVersion
testImplementation group: 'org.slf4j', name: 'slf4j-log4j12', version: slf4jVersion
integrationTestImplementation group: 'log4j', name: 'log4j', version: log4jVersion
integrationTestImplementation group: 'org.slf4j', name: 'slf4j-log4j12', version: slf4jVersion

// For Topic name validation based on Kafka classes
testImplementation (group: 'org.apache.kafka', name: 'kafka_2.11', version: kafkaVersion) {
Expand Down Expand Up @@ -300,6 +303,10 @@ ext.sharedManifest = manifest {
// Packaging //
//---------------------------------------------------------------------------//

processResources {
expand(version: version)
}

jar {
manifest {
from sharedManifest
Expand All @@ -309,7 +316,11 @@ jar {

shadowJar {
mergeServiceFiles()
exclude 'log4j.properties'
}

tasks.withType(Tar){
compression = Compression.GZIP
extension = 'tar.gz'
}

// custom tasks for creating source/javadoc jars
Expand Down Expand Up @@ -345,6 +356,11 @@ publishing {
root.appendNode('description', description)
root.appendNode('name', rootProject.name)
root.appendNode('url', githubUrl)
root.dependencies.'*'.findAll() {
it.artifactId.text() in ['log4j', 'slf4j-log4j12']
}.each() {
it.parent().remove(it)
}
root.children().last() + pomConfig
}
}
Expand Down Expand Up @@ -380,7 +396,7 @@ bintray {

task wrapper(type: Wrapper) {
gradleVersion = '4.1'
distributionUrl distributionUrl.replace("bin", "all")
distributionType 'all'
}

run {
Expand Down
18 changes: 14 additions & 4 deletions docker-compose.yml
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ services:
# Zookeeper Cluster #
#---------------------------------------------------------------------------#
zookeeper-1:
image: confluentinc/cp-zookeeper:3.2.1
image: confluentinc/cp-zookeeper:3.3.0
environment:
ZOOKEEPER_SERVER_ID: 1
ZOOKEEPER_CLIENT_PORT: 2181
Expand All @@ -19,7 +19,7 @@ services:
# Kafka Cluster #
#---------------------------------------------------------------------------#
kafka-1:
image: confluentinc/cp-kafka:3.2.1
image: confluentinc/cp-kafka:3.3.0
depends_on:
- zookeeper-1
environment:
Expand All @@ -28,12 +28,19 @@ services:
KAFKA_ADVERTISED_LISTENERS: PLAINTEXT://kafka-1:9092
KAFKA_AUTO_CREATE_TOPICS_ENABLE: "true"
KAFKA_GROUP_MIN_SESSION_TIMEOUT_MS: 5000
KAFKA_INTER_BROKER_PROTOCOL_VERSION: 0.11.0
KAFKA_LOG_MESSAGE_FORMAT_VERSION: 0.11.0
KAFKA_GROUP_INITIAL_REBALANCE_DELAY_MS: 0
KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1
KAFKA_TRANSACTION_STATE_LOG_REPLICATION_FACTOR: 1
KAFKA_TRANSACTION_STATE_LOG_MIN_ISR: 1


#---------------------------------------------------------------------------#
# Schema Registry #
#---------------------------------------------------------------------------#
schema-registry-1:
image: confluentinc/cp-schema-registry:3.2.1
image: confluentinc/cp-schema-registry:3.3.0
depends_on:
- zookeeper-1
- kafka-1
Expand All @@ -50,7 +57,7 @@ services:
# REST proxy #
#---------------------------------------------------------------------------#
rest-proxy-1:
image: confluentinc/cp-kafka-rest:3.2.1
image: confluentinc/cp-kafka-rest:3.3.0
depends_on:
- kafka-1
- schema-registry-1
Expand All @@ -74,3 +81,6 @@ services:
- schema-registry-1
command:
- integrationTest
volumes:
- ./build/jacoco:/code/build/jacoco
- ./build/reports:/code/build/reports
4 changes: 2 additions & 2 deletions radar.yml
Original file line number Diff line number Diff line change
Expand Up @@ -19,8 +19,8 @@ broker:

#Kafka internal parameters
stream_properties:
auto_commit_interval_ms: 1000
session_timeout_ms: 10000
auto.commit.interval.ms: 1000
session.timeout.ms: 10000

#============================ Kafka Streams ============================#
#The number of threads that a stream must be run according is priority
Expand Down

This file was deleted.

Loading

0 comments on commit 6a80d59

Please sign in to comment.