Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Experiment with unindenting stuff. #5

Open
wants to merge 1 commit into
base: 0.2.13
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
The table of contents is too big for display.
Diff view
Diff view
  •  
  •  
  •  
The diff you're trying to view is too large. We only load the first 3000 changed files.
Original file line number Diff line number Diff line change
Expand Up @@ -234,7 +234,7 @@ class Engine[TD, EI, PD, Q, P, A](
model match {
case modelManifest: PersistentModelManifest => {
logger.info("Custom-persisted model detected for algorithm " +
algo.getClass.getName)
algo.getClass.getName)
SparkWorkflowUtils.getPersistentModel(
modelManifest,
Seq(engineInstanceId, ax, algoName).mkString("-"),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -192,7 +192,7 @@ class MetricEvaluator[EI, Q, P, A, R](
preparator =
new MetricEvaluator.NameParams(engineParams.preparatorParams),
algorithms = engineParams.algorithmParamsList.map(np =>
new MetricEvaluator.NameParams(np)),
new MetricEvaluator.NameParams(np)),
serving = new MetricEvaluator.NameParams(engineParams.servingParams))

implicit lazy val formats = Utils.json4sDefaultFormats
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -56,10 +56,9 @@ object Doer extends Logging {
zeroConstr.newInstance()
} catch {
case e: NoSuchMethodException =>
error(
s"${params.getClass.getName} was used as the constructor " +
s"argument to ${e.getMessage}, but no constructor can handle it. " +
"Aborting.")
error(s"${params.getClass.getName} was used as the constructor " +
s"argument to ${e.getMessage}, but no constructor can handle it. " +
"Aborting.")
sys.exit(1)
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -136,7 +136,7 @@ object CreateServer extends Logging {
c.copy(env = Some(x))
} text
("Comma-separated list of environmental variables (in 'FOO=BAR' " +
"format) to pass to the Spark execution environment.")
"format) to pass to the Spark execution environment.")
opt[Int]("port") action { (x, c) =>
c.copy(port = x)
} text ("Port to bind to (default: 8000).")
Expand Down Expand Up @@ -467,7 +467,7 @@ class ServerActor[Q, P](val args: ServerConfig,
scalaj.http
.Http(logUrl)
.postData(logPrefix + write(
Map("engineInstance" -> engineInstance, "message" -> message)))
Map("engineInstance" -> engineInstance, "message" -> message)))
.asString
} catch {
case e: Throwable =>
Expand Down Expand Up @@ -608,7 +608,7 @@ class ServerActor[Q, P](val args: ServerConfig,
case Success(code) => {
if (code != 201) {
log.error(s"Feedback event failed. Status code: $code." +
s"Data: ${write(data)}.")
s"Data: ${write(data)}.")
}
}
case Failure(t) => {
Expand Down Expand Up @@ -636,7 +636,7 @@ class ServerActor[Q, P](val args: ServerConfig,
val servingEndTime = DateTime.now
lastServingSec =
(servingEndTime.getMillis -
servingStartTime.getMillis) / 1000.0
servingStartTime.getMillis) / 1000.0
avgServingSec =
((avgServingSec * requestCount) + lastServingSec) /
(requestCount + 1)
Expand Down Expand Up @@ -697,23 +697,23 @@ class ServerActor[Q, P](val args: ServerConfig,
respondWithMediaType(MediaTypes.`application/json`) {
complete {
Map("plugins" -> Map(
"outputblockers" -> pluginContext.outputBlockers.map {
"outputblockers" -> pluginContext.outputBlockers.map {
case (n, p) =>
n -> Map(
"name" -> p.pluginName,
"description" -> p.pluginDescription,
"class" -> p.getClass.getName,
"params" -> pluginContext.pluginParams(p.pluginName))
},
"outputsniffers" -> pluginContext.outputSniffers.map {
"outputsniffers" -> pluginContext.outputSniffers.map {
case (n, p) =>
n -> Map(
"name" -> p.pluginName,
"description" -> p.pluginDescription,
"class" -> p.getClass.getName,
"params" -> pluginContext.pluginParams(p.pluginName))
}
))
))
}
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -146,10 +146,9 @@ object WorkflowUtils extends Logging {
val pClass = clazz.getConstructors.head.getParameterTypes
if (pClass.size == 0) {
if (json != "") {
warn(
s"Non-empty parameters supplied to ${clazz.getName}, but its " +
"constructor does not accept any arguments. Stubbing with empty " +
"parameters.")
warn(s"Non-empty parameters supplied to ${clazz.getName}, but its " +
"constructor does not accept any arguments. Stubbing with empty " +
"parameters.")
}
EmptyParams()
} else {
Expand Down Expand Up @@ -191,7 +190,7 @@ object WorkflowUtils extends Logging {
try {
if (!classMap.contains(np.name)) {
error(s"Unable to find $field class with name '${np.name}'" +
" defined in Engine.")
" defined in Engine.")
sys.exit(1)
}
WorkflowUtils.extractParams(engineLanguage,
Expand Down Expand Up @@ -338,15 +337,15 @@ object WorkflowUtils extends Logging {
}

flatten(root \ "sparkConf").map(x =>
(x._1.reduce((a, b) => s"$a.$b"), x._2))
(x._1.reduce((a, b) => s"$a.$b"), x._2))
}
}

case class NameParams(name: String, params: Option[JValue])

class NameParamsSerializer
extends CustomSerializer[NameParams](format =>
({
({
case jv: JValue => WorkflowUtils.extractNameParams(jv)
}, {
case x: NameParams =>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -344,9 +344,8 @@ class JsonExtractorSuite extends FunSuite with Matchers {
val json =
JsonExtractor.engineParamsToJson(JsonExtractorOption.Json4sNative, ep)

json should be(
"""{"dataSourceParams":{"ds":{"a":"dsp"}},"preparatorParams":{"":{}},""" +
""""algorithmParamsList":[{"a0":{"a":"ap"}}],"servingParams":{"":{}}}""")
json should be("""{"dataSourceParams":{"ds":{"a":"dsp"}},"preparatorParams":{"":{}},""" +
""""algorithmParamsList":[{"a0":{"a":"ap"}}],"servingParams":{"":{}}}""")
}

test("Serializing Java EngineParams works using option Gson") {
Expand Down Expand Up @@ -386,7 +385,7 @@ private case class ScalaQuery(string: String,

private class UpperCaseFormat
extends CustomSerializer[ScalaQuery](format =>
({
({
case JObject(
JField("string", JString(string)) :: JField(
"optional",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -165,19 +165,19 @@ class EventServiceActor(val eventClient: LEvents,
respondWithMediaType(MediaTypes.`application/json`) {
complete {
Map("plugins" -> Map(
"inputblockers" -> pluginContext.inputBlockers.map {
"inputblockers" -> pluginContext.inputBlockers.map {
case (n, p) =>
n -> Map("name" -> p.pluginName,
"description" -> p.pluginDescription,
"class" -> p.getClass.getName)
},
"inputsniffers" -> pluginContext.inputSniffers.map {
"inputsniffers" -> pluginContext.inputSniffers.map {
case (n, p) =>
n -> Map("name" -> p.pluginName,
"description" -> p.pluginDescription,
"class" -> p.getClass.getName)
}
))
))
}
}
}
Expand Down Expand Up @@ -330,12 +330,12 @@ class EventServiceActor(val eventClient: LEvents,
respondWithMediaType(MediaTypes.`application/json`) {
complete {
logger.debug(s"GET events of appId=${appId} " +
s"st=${startTimeStr} ut=${untilTimeStr} " +
s"et=${entityType} eid=${entityId} " +
s"li=${limit} rev=${reversed} ")
s"st=${startTimeStr} ut=${untilTimeStr} " +
s"et=${entityType} eid=${entityId} " +
s"li=${limit} rev=${reversed} ")

require(!((reversed == Some(true)) &&
(entityType.isEmpty || entityId.isEmpty)),
(entityType.isEmpty || entityId.isEmpty)),
"the parameter reversed can only be used with" +
" both entityType and entityId specified.")

Expand Down Expand Up @@ -425,9 +425,9 @@ class EventServiceActor(val eventClient: LEvents,
}
data
} else {
Future.successful(
Map("status" -> StatusCodes.Forbidden.intValue,
"message" -> s"${event.event} events are not allowed"))
Future.successful(Map(
"status" -> StatusCodes.Forbidden.intValue,
"message" -> s"${event.event} events are not allowed"))
}
}
case Failure(exception) => {
Expand All @@ -444,8 +444,8 @@ class EventServiceActor(val eventClient: LEvents,
} else {
(StatusCodes.BadRequest,
Map("message" ->
(s"Batch request must have less than or equal to " +
s"${MaxNumberOfEventsPerBatchRequest} events")))
(s"Batch request must have less than or equal to " +
s"${MaxNumberOfEventsPerBatchRequest} events")))
}
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -42,5 +42,5 @@ object DateTimeJson4sSupport {
/** Custom JSON4S serializer for Joda-Time */
class Serializer
extends CustomSerializer[DateTime](format =>
(deserializeFromJValue, serializeToJValue))
(deserializeFromJValue, serializeToJValue))
}
Original file line number Diff line number Diff line change
Expand Up @@ -99,7 +99,7 @@ trait EngineInstances {
@DeveloperApi
class EngineInstanceSerializer
extends CustomSerializer[EngineInstance](format =>
({
({
case JObject(fields) =>
implicit val formats = DefaultFormats
val seed = EngineInstance(id = "",
Expand Down Expand Up @@ -140,7 +140,7 @@ class EngineInstanceSerializer
i.copy(env = Extraction.extract[Map[String, String]](env))
case JField("sparkConf", sparkConf) =>
i.copy(sparkConf =
Extraction.extract[Map[String, String]](sparkConf))
Extraction.extract[Map[String, String]](sparkConf))
case JField("dataSourceParams", JString(dataSourceParams)) =>
i.copy(dataSourceParams = dataSourceParams)
case JField("preparatorParams", JString(preparatorParams)) =>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -69,7 +69,7 @@ trait EngineManifests {
@DeveloperApi
class EngineManifestSerializer
extends CustomSerializer[EngineManifest](format =>
({
({
case JObject(fields) =>
val seed = EngineManifest(id = "",
version = "",
Expand All @@ -89,7 +89,7 @@ class EngineManifestSerializer
enginemanifest.copy(description = Some(description))
case JField("files", JArray(s)) =>
enginemanifest.copy(files = s.map(t =>
t match {
t match {
case JString(file) => file
case _ => ""
}))
Expand All @@ -111,8 +111,8 @@ class EngineManifestSerializer
.map(x => JString(x))
.getOrElse(JNothing)) :: JField("files",
JArray(enginemanifest.files
.map(x => JString(x))
.toList)) :: JField(
.map(x => JString(x))
.toList)) :: JField(
"engineFactory",
JString(enginemanifest.engineFactory)) :: Nil)
}))
Original file line number Diff line number Diff line change
Expand Up @@ -103,15 +103,15 @@ class EvaluationInstanceSerializer
case JField("engineParamsGeneratorClass",
JString(engineParamsGeneratorClass)) =>
i.copy(engineParamsGeneratorClass =
engineParamsGeneratorClass)
engineParamsGeneratorClass)
case JField("batch", JString(batch)) =>
i.copy(batch = batch)
case JField("env", env) =>
i.copy(
env = Extraction.extract[Map[String, String]](env))
case JField("sparkConf", sparkConf) =>
i.copy(sparkConf =
Extraction.extract[Map[String, String]](sparkConf))
Extraction.extract[Map[String, String]](sparkConf))
case JField("evaluatorResults",
JString(evaluatorResults)) =>
i.copy(evaluatorResults = evaluatorResults)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -112,29 +112,26 @@ object EventJson4sSupport {
@DeveloperApi
def writeJson: PartialFunction[Any, JValue] = {
case d: Event => {
JObject(JField("eventId",
d.eventId
.map(eid => JString(eid))
.getOrElse(JNothing)) :: JField(
"event",
JString(d.event)) :: JField("entityType", JString(d.entityType)) :: JField(
"entityId",
JString(d.entityId)) :: JField("targetEntityType",
d.targetEntityType
.map(JString(_))
.getOrElse(JNothing)) :: JField(
"targetEntityId",
d.targetEntityId.map(JString(_)).getOrElse(JNothing)) :: JField(
"properties",
d.properties.toJObject) :: JField(
"eventTime",
JString(DataUtils.dateTimeToString(d.eventTime))) :: // disable tags from API for now
// JField("tags", JArray(d.tags.toList.map(JString(_)))) ::
// disable tags from API for now
JField("prId", d.prId.map(JString(_)).getOrElse(JNothing)) :: // don't show creationTime for now
JField(
"creationTime",
JString(DataUtils.dateTimeToString(d.creationTime))) :: Nil)
JObject(JField(
"eventId",
d.eventId.map(eid => JString(eid)).getOrElse(JNothing)) :: JField(
"event",
JString(d.event)) :: JField("entityType", JString(d.entityType)) :: JField(
"entityId",
JString(d.entityId)) :: JField(
"targetEntityType",
d.targetEntityType.map(JString(_)).getOrElse(JNothing)) :: JField(
"targetEntityId",
d.targetEntityId.map(JString(_)).getOrElse(JNothing)) :: JField(
"properties",
d.properties.toJObject) :: JField(
"eventTime",
JString(DataUtils.dateTimeToString(d.eventTime))) :: // disable tags from API for now
// JField("tags", JArray(d.tags.toList.map(JString(_)))) ::
// disable tags from API for now
JField("prId", d.prId.map(JString(_)).getOrElse(JNothing)) :: // don't show creationTime for now
JField("creationTime",
JString(DataUtils.dateTimeToString(d.creationTime))) :: Nil)
}
}

Expand Down Expand Up @@ -209,7 +206,7 @@ object EventJson4sSupport {
@DeveloperApi
class DBSerializer
extends CustomSerializer[Event](format =>
(deserializeFromJValue, serializeToJValue))
(deserializeFromJValue, serializeToJValue))

/** :: DeveloperApi ::
* Custom JSON4S serializer for [[Event]] intended to be used by the Event
Expand Down Expand Up @@ -240,5 +237,5 @@ object BatchEventsJson4sSupport {
@DeveloperApi
class APISerializer
extends CustomSerializer[Seq[Try[Event]]](format =>
(readJson, Map.empty))
(readJson, Map.empty))
}
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@ object LEventAggregator {
events.toList
.groupBy(_.entityId)
.mapValues(_.sortBy(_.eventTime.getMillis)
.foldLeft[Prop](Prop())(propAggregator))
.foldLeft[Prop](Prop())(propAggregator))
.filter { case (k, v) => v.dm.isDefined }
.mapValues { v =>
require(v.firstUpdated.isDefined,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -54,7 +54,7 @@ trait Models {
@DeveloperApi
class ModelSerializer
extends CustomSerializer[Model](format =>
({
({
case JObject(fields) =>
implicit val formats = DefaultFormats
val seed = Model(id = "", models = Array[Byte]())
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -114,7 +114,7 @@ private[prediction] case class EventOp(
setProp.flatMap { set =>
val unsetKeys: Set[String] = unsetProp
.map(unset =>
unset.fields.filter { case (k, v) => (v >= set.fields(k).t) }.keySet)
unset.fields.filter { case (k, v) => (v >= set.fields(k).t) }.keySet)
.getOrElse(Set())

val combinedFields = deleteEntity.map { delete =>
Expand Down
Loading