From 54f5d672cb3f2b30869815fe036c5c73fba66e86 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=93lafur=20P=C3=A1ll=20Geirsson?= Date: Sat, 17 Dec 2016 17:56:15 +0100 Subject: [PATCH] Run penalizeSingleSelectMultiArgList=true --- .../io/prediction/core/BaseAlgorithm.scala | 5 +- .../io/prediction/workflow/CreateServer.scala | 5 +- .../prediction/workflow/JsonExtractor.scala | 10 +- .../controller/MetricEvaluatorTest.scala | 6 +- .../workflow/JsonExtractorSuite.scala | 35 +- .../io/prediction/data/storage/Storage.scala | 5 +- .../src/main/scala/DataSource.scala | 6 +- .../src/main/scala/SimRankAlgorithm.scala | 6 +- .../scala/io/prediction/tools/RunServer.scala | 6 +- .../io/prediction/tools/RunWorkflow.scala | 6 +- .../io/prediction/tools/console/Console.scala | 5 +- .../akka/actor/ActorCreationPerfSpec.scala | 8 +- .../test/scala/akka/actor/ActorDSLSpec.scala | 3 +- .../scala/akka/actor/ActorLookupSpec.scala | 12 +- .../scala/akka/actor/ActorSelectionSpec.scala | 10 +- .../scala/akka/actor/ActorSystemSpec.scala | 8 +- .../scala/akka/actor/ConsistencySpec.scala | 3 +- .../akka/actor/DeadLetterSupressionSpec.scala | 8 +- .../test/scala/akka/actor/FSMActorSpec.scala | 6 +- .../test/scala/akka/actor/SchedulerSpec.scala | 12 +- .../scala/akka/actor/SupervisorSpec.scala | 3 +- .../test/scala/akka/dispatch/FutureSpec.scala | 3 +- .../scala/akka/io/TcpConnectionSpec.scala | 8 +- .../akka/io/TcpIntegrationSpecSupport.scala | 9 +- .../test/scala/akka/io/TcpListenerSpec.scala | 3 +- .../akka/pattern/CircuitBreakerSpec.scala | 6 +- .../routing/MetricsBasedResizerSpec.scala | 6 +- .../scala/akka/actor/ActorRefProvider.scala | 4 +- .../main/scala/akka/actor/ActorSystem.scala | 11 +- .../main/scala/akka/actor/TypedActor.scala | 5 +- .../akka/actor/dungeon/ReceiveTimeout.scala | 5 +- .../akka/dispatch/AbstractDispatcher.scala | 6 +- .../main/scala/akka/dispatch/Mailbox.scala | 5 +- .../main/scala/akka/io/SimpleDnsManager.scala | 10 +- .../src/main/scala/akka/io/TcpListener.scala | 4 +- .../src/main/scala/akka/io/UdpListener.scala | 4 +- .../main/scala/akka/pattern/AskSupport.scala | 6 +- .../pattern/BackoffOnRestartSupervisor.scala | 6 +- .../akka/routing/ConsistentHashing.scala | 4 +- .../routing/OptimalSizeExploringResizer.scala | 5 +- .../src/main/scala/akka/util/Index.scala | 6 +- .../PersistenceActorDeferBenchmark.scala | 5 +- .../PersistentActorBenchmark.scala | 10 +- .../internal/component/ActorComponent.scala | 5 +- .../metrics/ClusterMetricsRouting.scala | 5 +- .../cluster/metrics/MetricsCollector.scala | 5 +- .../metrics/ClusterMetricsRoutingSpec.scala | 5 +- .../cluster/sharding/ShardCoordinator.scala | 6 +- .../sharding/ClusterShardingSpec.scala | 6 +- .../akka/cluster/client/ClusterClient.scala | 11 +- .../pubsub/DistributedPubSubMediator.scala | 18 +- .../singleton/ClusterSingletonManager.scala | 7 +- .../DistributedPubSubMediatorRouterSpec.scala | 6 +- .../main/scala/akka/cluster/AutoDown.scala | 5 +- .../src/main/scala/akka/cluster/Cluster.scala | 5 +- .../scala/akka/cluster/ClusterDaemon.scala | 9 +- .../cluster/ClusterMetricsCollector.scala | 5 +- .../src/main/scala/akka/cluster/Gossip.scala | 3 +- .../routing/AdaptiveLoadBalancing.scala | 5 +- .../scala/akka/cluster/StressSpec.scala | 7 +- .../AdaptiveLoadBalancingRouterSpec.scala | 5 +- .../test/scala/akka/cluster/GossipSpec.scala | 9 +- .../circuitbreaker/askExtensions.scala | 5 +- .../scala/akka/cluster/ddata/Replicator.scala | 37 +- .../code/docs/actor/SchedulerDocSpec.scala | 6 +- .../docs/dispatcher/DispatcherDocSpec.scala | 5 +- .../server/WebSocketExampleSpec.scala | 5 +- .../query/MyEventsByTagPublisher.scala | 6 +- .../code/docs/routing/RouterDocSpec.scala | 5 +- .../scala/code/docs/stream/FlowDocSpec.scala | 5 +- .../code/docs/stream/FlowGraphDocSpec.scala | 3 +- .../stream/cookbook/RecipeParseLines.scala | 3 +- .../engine/client/PoolInterfaceActor.scala | 5 +- .../engine/parsing/HttpHeaderParser.scala | 5 +- .../engine/server/HttpServerBluePrint.scala | 8 +- .../http/impl/engine/ws/FrameOutHandler.scala | 10 +- .../akka/http/impl/engine/ws/Masking.scala | 4 +- .../ClientConnectionSettingsImpl.scala | 5 +- .../main/scala/akka/http/scaladsl/Http.scala | 3 +- .../akka/http/scaladsl/model/HttpHeader.scala | 5 +- .../http/scaladsl/model/HttpMessage.scala | 6 +- .../akka/http/scaladsl/model/Multipart.scala | 4 +- .../engine/client/ConnectionPoolSpec.scala | 10 +- .../engine/parsing/HttpHeaderParserSpec.scala | 5 +- .../engine/parsing/ResponseParserSpec.scala | 7 +- .../impl/engine/ws/WSServerAutobahnTest.scala | 33 +- .../impl/model/parser/HttpHeaderSpec.scala | 10 +- .../http/javadsl/HttpExtensionApiSpec.scala | 5 +- .../akka/http/scaladsl/ClientServerSpec.scala | 6 +- .../akka/http/scaladsl/model/UriSpec.scala | 16 +- .../akka/http/scaladsl/coding/Encoder.scala | 4 +- .../akka/http/scaladsl/server/Directive.scala | 4 +- .../scaladsl/server/ExceptionHandler.scala | 5 +- .../directives/ExecutionDirectives.scala | 4 +- .../akka/remote/testconductor/Player.scala | 5 +- .../akka/remote/testkit/MultiNodeSpec.scala | 4 +- .../EventsByPersistenceIdPublisher.scala | 8 +- .../leveldb/EventsByTagPublisher.scala | 8 +- .../scala/akka/persistence/Persistence.scala | 3 +- .../journal/PersistencePluginProxy.scala | 4 +- .../journal/leveldb/LeveldbStore.scala | 4 +- .../persistence/AtLeastOnceDeliverySpec.scala | 5 +- .../PersistentActorBoundedStashingSpec.scala | 5 +- .../PersistentActorFailureSpec.scala | 3 +- .../SnapshotFailureRobustnessSpec.scala | 3 +- .../journal/ReplayFilterSpec.scala | 24 +- .../PiercingShouldKeepQuarantineSpec.scala | 4 +- .../remote/RemoteQuarantinePiercingSpec.scala | 4 +- .../RemoteRestartedQuarantinedSpec.scala | 4 +- .../remote/testconductor/BarrierSpec.scala | 3 +- .../src/main/scala/akka/remote/Endpoint.scala | 11 +- .../scala/akka/remote/RemoteWatcher.scala | 6 +- .../src/main/scala/akka/remote/Remoting.scala | 11 +- .../akka/remote/RemotingLifecycleEvent.scala | 5 +- .../transport/netty/NettyTransport.scala | 4 +- .../scala/akka/remote/ActorsLeakSpec.scala | 5 +- .../transport/AkkaProtocolStressTest.scala | 6 +- .../transport/netty/NettyTransportSpec.scala | 19 +- .../simple/SimpleClusterListener.scala | 5 +- .../simple/SimpleClusterListener2.scala | 5 +- .../cluster/stats/StatsSampleOneMaster.scala | 11 +- .../sample/cluster/stats/StatsService.scala | 4 +- .../distributeddata/ReplicatedMetrics.scala | 14 +- .../sample/persistence/SnapshotExample.scala | 4 +- .../sample/persistence/ViewExample.scala | 6 +- .../scala/akka/stream/testkit/Coroner.scala | 8 +- .../akka/stream/io/InputStreamSinkSpec.scala | 4 +- .../stream/io/OutputStreamSourceSpec.scala | 4 +- .../test/scala/akka/stream/io/TcpHelper.scala | 5 +- .../test/scala/akka/stream/io/TlsSpec.scala | 8 +- .../akka/stream/scaladsl/AttributesSpec.scala | 5 +- .../akka/stream/scaladsl/FlowFilterSpec.scala | 5 +- .../akka/stream/scaladsl/FlowFoldSpec.scala | 6 +- .../akka/stream/scaladsl/FlowReduceSpec.scala | 6 +- .../scala/akka/stream/scaladsl/FlowSpec.scala | 9 +- .../stream/scaladsl/GraphUnzipWithSpec.scala | 5 +- .../akka/stream/scaladsl/HeadSinkSpec.scala | 3 +- .../akka/stream/scaladsl/LastSinkSpec.scala | 6 +- .../main/scala/akka/stream/impl/Sinks.scala | 5 +- .../scala/akka/stream/impl/StreamLayout.scala | 5 +- .../stream/impl/fusing/GraphInterpreter.scala | 5 +- .../scala/akka/stream/impl/io/IOSinks.scala | 12 +- .../scala/akka/stream/impl/io/TcpStages.scala | 7 +- .../scala/akka/stream/javadsl/Graph.scala | 4 +- .../scala/akka/stream/stage/GraphStage.scala | 6 +- .../scala/akka/testkit/AkkaSpecSpec.scala | 3 +- .../src/test/scala/akka/testkit/Coroner.scala | 8 +- repos/akka/project/ActivatorDist.scala | 5 +- .../breeze/linalg/DenseAxpyBenchmark.scala | 8 +- .../linalg/DenseDotProductBenchmark.scala | 14 +- .../scala/breeze/io/RandomAccessFile.scala | 32 +- .../scala/breeze/linalg/DenseMatrix.scala | 17 +- .../scala/breeze/linalg/DenseVector.scala | 12 +- .../scala/breeze/linalg/SparseVector.scala | 7 +- .../breeze/linalg/constructors/random.scala | 5 +- .../linalg/operators/DenseVectorOps.scala | 12 +- .../linalg/operators/SparseVectorOps.scala | 26 +- .../linalg/operators/VectorBuilderOps.scala | 4 +- .../breeze/numerics/financial/package.scala | 7 +- .../optimize/ProjectedQuasiNewton.scala | 9 +- .../optimize/TruncatedNewtonMinimizer.scala | 6 +- .../proximal/NonlinearMinimizer.scala | 6 +- .../scala/breeze/signal/fourierShift.scala | 4 +- .../scala/breeze/signal/iFourierShift.scala | 4 +- .../stats/regression/LeastSquares.scala | 15 +- .../main/scala/breeze/util/ArrayUtil.scala | 20 +- .../src/main/scala/cats/js/std/future.scala | 4 +- .../src/main/scala/cats/jvm/std/future.scala | 4 +- .../main/scala/org/ensime/core/Analyzer.scala | 5 +- .../scala/org/ensime/core/Completion.scala | 7 +- .../scala/org/ensime/core/JavaAnalyzer.scala | 6 +- .../main/scala/org/ensime/core/Project.scala | 5 +- .../org/ensime/indexer/SearchService.scala | 4 +- .../main/scala/pythonparse/Statements.scala | 11 +- .../test/resources/scalaparse/GenJSCode.scala | 8 +- .../finagle/netty3/ChannelSnooper.scala | 5 +- .../finagle/netty3/Netty3Transporter.scala | 16 +- .../service/FailureAccrualFactory.scala | 4 +- .../finagle/util/HashedWheelTimer.scala | 5 +- .../com/twitter/finagle/util/TimerStats.scala | 10 +- .../httpproxy/HttpConnectHandlerTest.scala | 6 +- .../example/memcache/KetamaClientStress.scala | 9 +- .../com/twitter/finagle/http/Codec.scala | 4 +- .../com/twitter/finagle/http/Message.scala | 5 +- .../twitter/finagle/http/filter/Cors.scala | 4 +- .../finagle/http/filter/LoggingFilter.scala | 4 +- .../twitter/finagle/http/CookieMapTest.scala | 4 +- .../twitter/finagle/http/RequestTest.scala | 6 +- .../finagle/http/codec/HttpDtabTest.scala | 8 +- .../finagle/http/exp/MultipartTest.scala | 6 +- .../scala/com/twitter/finagle/Memcached.scala | 4 +- .../twitter/finagle/memcached/Client.scala | 27 +- .../integration/ClusterClientTest.scala | 5 +- .../integration/MigrationClientTest.scala | 25 +- .../com/twitter/finagle/mux/ServerTest.scala | 7 +- .../finagle/mysql/CanBeParameter.scala | 5 +- .../mysql/integration/IntegrationClient.scala | 4 +- .../finagle/netty4/Netty4Listener.scala | 8 +- .../netty4/channel/ChannelSnooper.scala | 5 +- .../finagle/redis/protocol/Reply.scala | 4 +- .../finagle/serverset2/ZkSession.scala | 5 +- .../client/apache/ApacheWatcher.scala | 5 +- .../finagle/serverset2/Zk2ResolverTest.scala | 5 +- .../client/apache/ApacheWatcherTest.scala | 6 +- .../client/apache/ApacheZooKeeperTest.scala | 162 +++-- .../ZookeeperServerSetClusterTest.scala | 5 +- .../com/twitter/finagle/spdy/Codec.scala | 8 +- .../stats/MetricsBucketedHistogram.scala | 4 +- .../finagle/stats/MetricsStatsReceiver.scala | 7 +- .../finagle/stats/JsonExporterTest.scala | 5 +- .../main/scala/com/twitter/finagle/rich.scala | 7 +- .../finagle/thrift/TTwitterClientFilter.scala | 5 +- .../scala/com/twitter/finagle/ThriftMux.scala | 10 +- .../twitter/finagle/thriftmux/Netty3.scala | 6 +- .../scala/net/liftweb/json/ParserBugs.scala | 4 +- .../net/liftweb/markdown/LineTokenizer.scala | 3 +- .../db/src/main/scala/net/liftweb/db/DB.scala | 7 +- .../liftweb/db/LoggingStatementWrappers.scala | 19 +- .../net/liftweb/mapper/HasManyThrough.scala | 6 +- .../scala/net/liftweb/mapper/OneToMany.scala | 5 +- .../net/liftweb/mapper/view/TableEditor.scala | 6 +- .../net/liftweb/mapper/DBProviders.scala | 11 +- .../scala/net/liftweb/proto/ProtoUser.scala | 6 +- .../scala/net/liftweb/http/LiftScreen.scala | 10 +- .../scala/net/liftweb/http/LiftServlet.scala | 10 +- .../scala/net/liftweb/http/LiftSession.scala | 10 +- .../scala/net/liftweb/http/MVCHelper.scala | 6 +- .../src/main/scala/net/liftweb/http/S.scala | 6 +- .../javascript/JavaScriptContext.scala | 3 +- .../net/liftweb/http/SecurityRulesSpec.scala | 3 +- .../liftweb/webapptest/JettyTestServer.scala | 4 +- .../net/liftweb/webapptest/OneShot.scala | 4 +- .../net/liftweb/webapptest/ToHeadUsages.scala | 4 +- .../src/main/scala/ScalatraBootstrap.scala | 4 +- .../core/controller/DashboardController.scala | 15 +- .../core/service/ProtectedBranchService.scala | 6 +- .../core/service/WebHookService.scala | 5 +- .../gitbucket/core/service/WikiService.scala | 8 +- .../servlet/BasicAuthenticationFilter.scala | 4 +- .../core/servlet/InitializeListener.scala | 5 +- .../scala/gitbucket/core/ssh/GitCommand.scala | 12 +- .../core/service/LabelsServiceSpec.scala | 7 +- .../core/service/ServiceSpecBase.scala | 8 +- .../com/gravity/goose/images/ImageSaver.scala | 4 +- .../goose/images/StandardImageExtractor.scala | 15 +- .../gravity/goose/network/HtmlFetcher.scala | 17 +- .../goose/network/HttpExceptions.scala | 5 +- .../com/gravity/goose/text/StopWords.scala | 5 +- .../com/gravity/goose/ExtractionsTest.scala | 12 +- .../com/gravity/goose/GoldSitesTestIT.scala | 22 +- .../scala/remote/RemoteResourceOwner.scala | 4 +- .../scala/IdeaIncrementalBuilder.scala | 9 +- .../jps/incremental/scala/SbtBuilder.scala | 4 +- .../scala/local/CompilerFactoryImpl.scala | 5 +- .../incremental/scala/local/IdeClient.scala | 5 +- .../scala/local/IdeClientIdea.scala | 4 +- .../incremental/scala/local/LocalServer.scala | 5 +- .../jps/incremental/scala/remote/Main.scala | 6 +- .../scala/remote/RemoteServer.scala | 6 +- ...conLanguageCodeStyleSettingsProvider.scala | 4 +- .../hocon/formatting/HoconFormatter.scala | 4 +- .../HoconErrorHighlightingAnnotator.scala | 6 +- .../GoToImplicitConversionAction.scala | 7 +- .../NewScalaTypeDefinitionAction.scala | 5 +- .../scala/actions/ScalaActionUtil.scala | 6 +- .../scala/actions/ScalaFileTemplateUtil.scala | 4 +- .../ShowImplicitParametersAction.scala | 7 +- .../scala/actions/ShowTypeInfoAction.scala | 5 +- .../scala/annotator/AnnotatorUtils.scala | 5 +- .../annotator/ApplicationAnnotator.scala | 5 +- .../scala/annotator/AssignmentAnnotator.scala | 13 +- .../scala/annotator/FunctionAnnotator.scala | 14 +- .../scala/annotator/OverridingAnnotator.scala | 10 +- .../scala/annotator/PatternAnnotator.scala | 3 +- .../scala/annotator/ScalaAnnotator.scala | 12 +- .../CreateApplyOrUnapplyQuickFix.scala | 4 +- .../CreateEntityQuickFix.scala | 10 +- .../CreateTypeDefinitionQuickFix.scala | 12 +- .../gutter/ScalaLineMarkerProvider.scala | 5 +- .../annotator/gutter/ScalaMarkerType.scala | 4 +- .../quickfix/AddLToLongLiteralFix.scala | 5 +- .../annotator/quickfix/ChangeTypeFix.scala | 10 +- .../quickfix/ConvertOctalToHexFix.scala | 5 +- .../quickfix/ImplementMethodsQuickFix.scala | 6 +- .../quickfix/WrapInOptionQuickFix.scala | 5 +- .../AddModifierWithValOrVarQuickFix.scala | 12 +- .../template/CaseClassWithoutParamList.scala | 6 +- .../template/IllegalInheritance.scala | 5 +- .../template/ObjectCreationImpossible.scala | 5 +- .../plugins/scala/caches/CachesUtil.scala | 11 +- .../ScalaGenerateCompanionObjectHandler.scala | 5 +- .../ScalaGenerateEqualsHandler.scala | 20 +- .../ScalaGeneratePropertyHandler.scala | 5 +- .../ScalaGenerateToStringHandler.scala | 10 +- .../booleans/DeMorganLawIntention.scala | 5 +- .../booleans/ExpandBooleanIntention.scala | 4 +- .../FlipComparisonInInfixExprIntention.scala | 5 +- ...pComparisonInMethodCallExprIntention.scala | 9 +- .../booleans/NegateComparisonIntention.scala | 5 +- ...EqualsOrEqualityInInfixExprIntention.scala | 5 +- ...sOrEqualityInMethodCallExprIntention.scala | 5 +- .../ConvertToCurlyBracesIntention.scala | 4 +- .../InvertIfConditionIntention.scala | 4 +- .../controlflow/MergeElseIfIntention.scala | 4 +- .../controlflow/MergeIfToAndIntention.scala | 4 +- .../controlflow/MergeIfToOrIntention.scala | 10 +- .../RemoveRedundantElseIntention.scala | 4 +- .../ReplaceDoWhileWithWhileIntention.scala | 10 +- .../ReplaceWhileWithDoWhileIntention.scala | 4 +- .../controlflow/SplitElseIfIntention.scala | 4 +- .../controlflow/SplitIfIntention.scala | 4 +- .../ConvertToInfixExpressionIntention.scala | 3 +- .../IntroduceExplicitParameterIntention.scala | 5 +- .../IntroduceImplicitParameterIntention.scala | 5 +- .../expression/RemoveApplyIntention.scala | 5 +- ...emoveUnnecessaryParenthesesIntention.scala | 10 +- .../AbstractFormatConversionIntention.scala | 4 +- ...tFormattedStringToInterpolatedString.scala | 5 +- ...ingConcatenationToInterpolatedString.scala | 5 +- .../intention/imports/ImportMembersUtil.scala | 10 +- .../InsertGapIntoStringIntention.scala | 7 +- .../StringToMultilineStringIntention.scala | 15 +- .../ConvertToTypedPatternIntention.scala | 5 +- .../matcher/CreateCaseClausesIntention.scala | 15 +- .../matcher/ExpandPatternIntention.scala | 5 +- .../types/ConvertFromInfixIntention.scala | 5 +- ...ertImplicitBoundsToImplicitParameter.scala | 22 +- .../types/ConvertToInfixIntention.scala | 17 +- .../types/MakeTypeMoreSpecificIntention.scala | 5 +- .../types/ToggleTypeAnnotation.scala | 4 +- .../intention/types/UpdateStrategy.scala | 15 +- .../codeInsight/template/util/MacroUtil.scala | 6 +- .../codeInspection/ReplaceQuickFix.scala | 5 +- .../booleans/SimplifyBooleanInspection.scala | 4 +- .../cast/ScalaRedundantCastInspection.scala | 5 +- .../ScalaRedundantConversionInspection.scala | 4 +- .../collections/MapGetOrElseInspection.scala | 9 +- .../OperationOnCollectionQuickFix.scala | 5 +- .../ScalaUselessExpressionInspection.scala | 5 +- .../ConvertibleToMethodValueInspection.scala | 5 +- .../MatchToPartialFunctionInspection.scala | 9 +- .../RemoveBracesForSingleImportQuickFix.scala | 5 +- ...eralEndingWithDecimalPointInspection.scala | 14 +- ...ssorLikeMethodIsEmptyParenInspection.scala | 5 +- ...rLikeMethodIsParameterlessInspection.scala | 5 +- ...ethodDefinedWithEqualsSignInspection.scala | 5 +- .../UnitMethodIsParameterlessInspection.scala | 5 +- .../quickfix/InsertReturnTypeAndEquals.scala | 7 +- .../ScalaMoveToPackageQuickFix.scala | 6 +- ...UnnecessaryParenthesesInspectionBase.scala | 5 +- .../ScalaUnnecessarySemicolonInspection.scala | 5 +- .../VariablePatternShadowInspection.scala | 5 +- .../ScalaSuppressableInspectionTool.scala | 5 +- .../ComparingUnrelatedTypesInspection.scala | 6 +- .../TypeCheckCanBeMatchInspection.scala | 13 +- ...ctorSimplifyTypeProjectionInspection.scala | 4 +- ...torUseCorrectLambdaKeywordInspection.scala | 5 +- .../ScalaUnusedImportPass.scala | 5 +- .../ScalaUnusedImportPassBase.scala | 5 +- .../ScalaUnusedSymbolPass.scala | 14 +- .../scala/compiler/CompileServerManager.scala | 7 +- .../scala/compiler/RemoteServerRunner.scala | 5 +- .../components/HighlightingAdvisor.scala | 13 +- .../console/ScalaConsoleExecuteAction.scala | 4 +- .../scala/console/ScalaLanguageConsole.scala | 20 +- .../SendSelectionToConsoleAction.scala | 4 +- .../scala/conversion/JavaToScala.scala | 4 +- .../copy/JavaCopyPastePostProcessor.scala | 6 +- .../scala/debugger/LocationLineManager.scala | 10 +- .../ScalaFrameExtraVariablesProvider.scala | 11 +- .../evaluation/ScalaCodeFragment.scala | 6 +- .../ScalaEvaluatorBuilderUtil.scala | 63 +- .../evaluator/ScalaEqEvaluator.scala | 10 +- .../evaluator/ScalaInstanceofEvaluator.scala | 5 +- .../evaluator/ScalaMethodEvaluator.scala | 24 +- .../ScalaSmartStepIntoHandler.scala | 5 +- .../ui/NonStrictCollectionsRenderer.scala | 8 +- .../debugger/ui/ScalaCollectionRenderer.scala | 10 +- .../ScalaBackspaceHandler.scala | 8 +- .../CreateScalaDocStubAction.scala | 5 +- .../ScalaDocumentationProvider.scala | 8 +- .../MultilineStringEnterHandler.scala | 14 +- .../ScalaImportOptimizer.scala | 11 +- .../ScalaCodeBlockSelectioner.scala | 5 +- .../plugins/scala/extensions/package.scala | 6 +- .../NonMemberMethodUsagesSearcher.scala | 7 +- .../factory/ScalaFindUsagesHandler.scala | 5 +- ...orParamsInConstructorPatternSearcher.scala | 3 +- .../ScalaHighlightUsagesHandlerFactory.scala | 11 +- ...calaInjectedStringLiteralManipulator.scala | 5 +- .../plugins/scala/lang/TokenSets.scala | 4 +- .../SameSignatureCallParametersProvider.scala | 3 +- .../ScalaAfterNewCompletionUtil.scala | 5 +- .../ScalaAotCompletionContributor.scala | 16 +- .../ScalaClassNameCompletionContributor.scala | 19 +- .../completion/ScalaOverrideContributor.scala | 10 +- .../ScalaSmartCompletionContributor.scala | 14 +- .../filters/expression/YieldFilter.scala | 10 +- .../ScalaConstructorInsertHandler.scala | 10 +- .../handlers/ScalaInsertHandler.scala | 15 +- .../lookups/LookupElementManager.scala | 3 +- .../completion/lookups/ScalaLookupItem.scala | 11 +- .../ScalaPostfixTemplatePsiInfo.scala | 5 +- .../processors/ScalaSpacingProcessor.scala | 28 +- .../lang/parser/parsing/CommonUtils.scala | 5 +- .../lang/parser/parsing/CompilationUnit.scala | 3 +- .../parser/parsing/base/ImportSelectors.scala | 3 +- .../scala/lang/psi/PresentationUtil.scala | 5 +- .../scala/lang/psi/ScImportsHolder.scala | 16 +- .../plugins/scala/lang/psi/TypeAdjuster.scala | 8 +- .../lang/psi/api/FileDeclarationsHolder.scala | 12 +- .../scala/lang/psi/api/InferUtil.scala | 13 +- .../lang/psi/api/ScControlFlowOwner.scala | 5 +- .../psi/api/base/ScPrimaryConstructor.scala | 5 +- .../psi/api/base/ScReferenceElement.scala | 5 +- .../psi/api/base/patterns/ScPattern.scala | 6 +- .../lang/psi/api/expr/ScAnnotations.scala | 3 +- .../scala/lang/psi/api/expr/ScBlock.scala | 6 +- .../lang/psi/api/expr/ScExpression.scala | 19 +- .../api/expr/ScModificationTrackerOwner.scala | 5 +- .../lang/psi/api/statements/ScFunction.scala | 14 +- .../psi/api/toplevel/ScNamedElement.scala | 28 +- .../psi/api/toplevel/typedef/ScMember.scala | 6 +- .../toplevel/typedef/ScTypeDefinition.scala | 5 +- .../scala/lang/psi/impl/ScPackageImpl.scala | 9 +- .../psi/impl/ScalaPsiElementFactory.scala | 13 +- .../scala/lang/psi/impl/ScalaPsiManager.scala | 4 +- .../psi/impl/base/ScModifierListImpl.scala | 23 +- .../ScStableCodeReferenceElementImpl.scala | 21 +- .../types/ScFunctionalTypeElementImpl.scala | 6 +- .../base/types/ScInfixTypeElementImpl.scala | 6 +- .../ScParameterizedTypeElementImpl.scala | 22 +- .../base/types/ScSimpleTypeElementImpl.scala | 15 +- .../base/types/ScTupleTypeElementImpl.scala | 6 +- .../lang/psi/impl/expr/ScAssignStmtImpl.scala | 12 +- .../psi/impl/expr/ScConstrBlockImpl.scala | 5 +- .../lang/psi/impl/expr/ScConstrExprImpl.scala | 5 +- .../lang/psi/impl/expr/ScIfStmtImpl.scala | 4 +- .../lang/psi/impl/expr/ScInfixExprImpl.scala | 6 +- .../expr/ScNewTemplateDefinitionImpl.scala | 6 +- .../impl/expr/ScReferenceExpressionImpl.scala | 11 +- .../ScalaOverridingMemberSearcher.scala | 5 +- .../statements/ScFunctionDefinitionImpl.scala | 6 +- .../psi/impl/statements/ScFunctionImpl.scala | 6 +- .../statements/ScMacroDefinitionImpl.scala | 6 +- .../imports/ScImportSelectorImpl.scala | 5 +- .../toplevel/imports/ScImportStmtImpl.scala | 4 +- .../toplevel/packaging/ScPackagingImpl.scala | 12 +- .../templates/ScExtendsBlockImpl.scala | 6 +- .../impl/toplevel/typedef/ScClassImpl.scala | 18 +- .../impl/toplevel/typedef/ScObjectImpl.scala | 22 +- .../impl/toplevel/typedef/ScTraitImpl.scala | 13 +- .../typedef/SyntheticMembersInjector.scala | 15 +- .../typedef/TypeDefinitionMembers.scala | 10 +- .../psi/implicits/ImplicitCollector.scala | 6 +- .../implicits/ScImplicitlyConvertible.scala | 9 +- .../lang/psi/light/LightScalaMethod.scala | 5 +- .../lang/psi/light/PsiClassWrapper.scala | 5 +- .../psi/light/PsiTypedDefinitionWrapper.scala | 6 +- .../lang/psi/light/ScFunctionWrapper.scala | 10 +- .../StaticPsiTypedDefinitionWrapper.scala | 5 +- .../light/StaticTraitScFunctionWrapper.scala | 11 +- .../psi/stubs/impl/ScAnnotationStubImpl.scala | 6 +- .../psi/stubs/impl/ScTypeAliasStubImpl.scala | 18 +- .../scala/lang/psi/types/Compatibility.scala | 5 +- .../scala/lang/psi/types/Conformance.scala | 111 ++-- .../lang/psi/types/ScExistentialType.scala | 14 +- .../scala/lang/psi/types/ScFunctionType.scala | 3 +- .../lang/psi/types/ScParameterizedType.scala | 11 +- .../lang/psi/types/ScProjectionType.scala | 24 +- .../lang/psi/types/ScTypePresentation.scala | 5 +- .../lang/psi/types/ScTypePsiTypeBridge.scala | 6 +- .../lang/psi/types/ScUndefinedType.scala | 5 +- .../psi/types/nonvalue/ScMethodType.scala | 11 +- .../rearranger/ScalaArrangementVisitor.scala | 5 +- .../scala/lang/rearranger/package.scala | 64 +- .../ScalaChangeSignatureDialog.scala | 10 +- .../ScalaChangeSignatureHandler.scala | 12 +- .../ScalaChangeSignatureUsageHandler.scala | 71 +- .../ScalaChangeSignatureUsageProcessor.scala | 11 +- .../changeSignature/ScalaParameterInfo.scala | 10 +- .../ScalaParameterTableModel.scala | 4 +- .../ScalaParameterTableModelItem.scala | 5 +- .../delete/SafeDeleteProcessorUtil.scala | 4 +- .../extractMethod/InnerClassSettings.scala | 6 +- .../ScalaExtractMethodHandler.scala | 14 +- .../ScalaExtractMethodUtils.scala | 18 +- .../extractTrait/ExtractSuperUtil.scala | 19 +- .../ScalaExtractTraitHandler.scala | 27 +- .../inline/ScalaInlineHandler.scala | 18 +- ...aIntroduceFieldFromExpressionHandler.scala | 12 +- .../ScalaIntroduceFieldHandlerBase.scala | 10 +- .../ScalaIntroduceParameterDialog.scala | 4 +- .../ScalaIntroduceParameterHandler.scala | 20 +- .../IntroduceExpressions.scala | 17 +- .../IntroduceTypeAlias.scala | 34 +- .../ScalaInplaceTypeAliasIntroducer.scala | 5 +- .../ScalaInplaceVariableIntroducer.scala | 9 +- .../ScalaIntroduceVariableHandler.scala | 7 +- .../introduceVariable/ScopeSuggester.scala | 7 +- .../ScalaMoveClassesOrPackagesHandler.scala | 9 +- .../rename/RenameLightProcessor.scala | 6 +- .../rename/RenameScalaClassProcessor.scala | 6 +- .../rename/RenameScalaMethodProcessor.scala | 6 +- .../RenameScalaTypeAliasProcessor.scala | 8 +- .../rename/RenameScalaVariableProcessor.scala | 20 +- .../refactoring/rename/ScalaRenameUtil.scala | 6 +- .../inplace/ScalaInplaceRenameHandler.scala | 6 +- .../inplace/ScalaLocalInplaceRenamer.scala | 24 +- .../inplace/ScalaMemberInplaceRenamer.scala | 15 +- .../ui/ScalaCodeFragmentTableCellEditor.scala | 5 +- .../util/InplaceRenameHelper.scala | 7 +- .../util/ScalaCompositeTypeValidator.scala | 8 +- .../util/ScalaDirectoryService.scala | 6 +- .../util/ScalaRefactoringUtil.scala | 73 ++- .../refactoring/util/ScalaTypeValidator.scala | 8 +- .../util/ScalaVariableValidator.scala | 9 +- .../ScalaReferenceContributor.scala | 5 +- .../ResolvableReferenceExpression.scala | 27 +- ...ResolvableStableCodeReferenceElement.scala | 20 +- .../scala/lang/resolve/ResolveUtils.scala | 30 +- .../lang/resolve/ScalaResolveResult.scala | 5 +- .../resolve/processor/BaseProcessor.scala | 7 +- .../ExpandedExtractorResolveProcessor.scala | 5 +- .../resolve/processor/MostSpecificUtil.scala | 5 +- .../parser/parsing/MyScaladocParsing.scala | 10 +- .../ScDocResolvableCodeReferenceImpl.scala | 10 +- .../impl/ScalaFileStructureViewElement.scala | 4 +- .../ScalaDocWithSyntaxSurrounder.scala | 3 +- .../ScalaGenerationInfo.scala | 4 +- .../ScalaImplementMethodsHandler.scala | 6 +- .../ScalaMethodImplementor.scala | 5 +- .../scala/overrideImplement/ScalaOIUtil.scala | 37 +- .../ScalaOverrideMethodsHandler.scala | 6 +- .../SetupScalaSdkNotificationProvider.scala | 5 +- .../source/ScalaEditorFileSwapper.scala | 8 +- .../settings/ScalaCompilerConfiguration.scala | 4 +- .../codeInspection/i18n/ScalaI18nUtil.scala | 9 +- .../ScalaInvalidPropertyKeyInspection.scala | 10 +- .../folding/ScalaPropertyFoldingBuilder.scala | 4 +- .../ScalaScriptConfugurationProducer.scala | 4 +- .../script/ScalaScriptRunConfiguration.scala | 4 +- .../settings/ScalaProjectSettingsUtil.scala | 4 +- .../ScLiteralExpressionTokenizer.scala | 12 +- .../testingSupport/ScalaTestGenerator.scala | 52 +- .../AbstractTestConfigurationProducer.scala | 5 +- .../AbstractTestRerunFailedTestsAction.scala | 3 +- .../test/AbstractTestRunConfiguration.scala | 17 +- .../ScalaTestConfigurationProducer.scala | 24 +- .../specs2/Specs2ConfigurationProducer.scala | 4 +- .../utest/UTestConfigurationProducer.scala | 9 +- .../test/utest/UTestRunConfiguration.scala | 5 +- .../plugins/scala/util/IntentionUtils.scala | 17 +- .../scala/util/MultilineStringUtil.scala | 5 +- .../scala/util/ScEquivalenceUtil.scala | 6 +- .../macroDebug/CleanMacrosheetAction.scala | 6 +- .../util/macroDebug/RunMacrosheetAction.scala | 6 +- .../macroDebug/ScalaMacroDebuggingUtil.scala | 5 +- .../actions/CleanWorksheetAction.scala | 6 +- .../actions/RunWorksheetAction.scala | 6 +- .../worksheet/actions/WorksheetFileHook.scala | 7 +- .../processor/WorksheetCompiler.scala | 3 +- .../processor/WorksheetPerFileConfig.scala | 5 +- .../worksheet/ui/WorksheetEditorPrinter.scala | 23 +- .../worksheet/ui/WorksheetFoldGroup.scala | 5 +- .../execution/SbtOrderEnumeratorHandler.scala | 4 +- .../jetbrains/sbt/language/SbtFileImpl.scala | 12 +- .../sbt/project/SbtProjectResolver.scala | 8 +- .../module/SbtModuleSettingsEditor.scala | 4 +- .../SbtNoImportNotificationProvider.scala | 14 +- .../SbtReImportNotificationProvider.scala | 14 +- .../ActivatorCachedRepoProcessor.scala | 5 +- .../jetbrains/sbt/resolvers/SbtResolver.scala | 5 +- .../resolvers/SbtResolverIndexesManager.scala | 5 +- .../sbt/runner/SbtRunConfiguration.scala | 8 +- .../plugins/hocon/HoconEditorActionTest.scala | 5 +- .../plugins/scala/LightScalaTestCase.scala | 4 +- .../InterpolatedStringsAnnotatorTest.scala | 4 +- .../annotator/PatternAnnotatorTest.scala | 15 +- .../PatternDefinitionAnnotatorTest.scala | 5 +- .../annotator/SingleAbstractMethodTest.scala | 4 +- .../VariableDefinitionAnnotatorTest.scala | 5 +- .../annotator/gutter/LineMarkerTestBase.scala | 4 +- .../ObjectCreationImpossibleTest.scala | 9 +- .../plugins/scala/base/LibraryTestCase.scala | 5 +- .../scala/base/ScalaFixtureTestCase.scala | 5 +- .../scala/base/ScalaLibraryLoader.scala | 4 +- ...laLightCodeInsightFixtureTestAdapter.scala | 24 +- .../plugins/scala/base/SimpleTestCase.scala | 4 +- .../debugger/ScalaDebuggerTestCase.scala | 30 +- .../ExactBreakpointTest.scala | 5 +- .../JavaHighlightingTest.scala | 4 +- .../IncrementalLexerHighlightingTest.scala | 9 +- .../lang/lexer/LexerPerformanceTest.scala | 29 +- .../scala/lang/resolve2/ResolveTestBase.scala | 8 +- .../HighlightingPerformanceTest.scala | 6 +- .../ChangeSignatureFromScalaTest.scala | 5 +- .../ExtractTraitTestConflicts.scala | 5 +- .../IntroduceParameterTestBase.scala | 5 +- .../nameSuggester/NameSuggesterTest.scala | 5 +- .../testingSupport/ScalaTestingTestCase.scala | 37 +- .../sbt/annotator/SbtAnnotatorTest.scala | 5 +- .../main/scala/kafka/admin/AclCommand.scala | 5 +- .../main/scala/kafka/admin/AdminUtils.scala | 6 +- .../scala/kafka/admin/ConfigCommand.scala | 12 +- .../kafka/admin/ConsumerGroupCommand.scala | 13 +- ...referredReplicaLeaderElectionCommand.scala | 5 +- .../admin/ReassignPartitionsCommand.scala | 16 +- .../main/scala/kafka/admin/TopicCommand.scala | 11 +- .../kafka/admin/ZkSecurityMigrator.scala | 6 +- .../main/scala/kafka/api/FetchResponse.scala | 6 +- .../main/scala/kafka/cluster/Partition.scala | 12 +- .../kafka/common/OffsetMetadataAndError.scala | 5 +- .../scala/kafka/consumer/ConsumerConfig.scala | 5 +- .../consumer/ConsumerFetcherManager.scala | 5 +- .../kafka/consumer/PartitionAssignor.scala | 6 +- .../consumer/ZookeeperConsumerConnector.scala | 50 +- .../controller/ControllerChannelManager.scala | 9 +- .../kafka/controller/KafkaController.scala | 50 +- .../controller/PartitionStateMachine.scala | 10 +- .../controller/ReplicaStateMachine.scala | 8 +- .../controller/TopicDeletionManager.scala | 12 +- .../kafka/coordinator/DelayedHeartbeat.scala | 6 +- .../kafka/coordinator/GroupCoordinator.scala | 4 +- .../kafka/coordinator/GroupMetadata.scala | 6 +- .../coordinator/GroupMetadataManager.scala | 10 +- .../core/src/main/scala/kafka/log/Log.scala | 10 +- .../src/main/scala/kafka/log/LogCleaner.scala | 25 +- .../src/main/scala/kafka/log/OffsetMap.scala | 5 +- .../scala/kafka/network/BlockingChannel.scala | 4 +- .../scala/kafka/network/RequestChannel.scala | 5 +- .../scala/kafka/network/SocketServer.scala | 6 +- .../kafka/producer/BrokerPartitionInfo.scala | 11 +- .../producer/async/DefaultEventHandler.scala | 5 +- .../main/scala/kafka/security/auth/Acl.scala | 7 +- .../security/auth/SimpleAclAuthorizer.scala | 5 +- .../kafka/server/ClientQuotaManager.scala | 5 +- .../scala/kafka/server/DelayedProduce.scala | 9 +- .../main/scala/kafka/server/KafkaApis.scala | 4 +- .../scala/kafka/server/KafkaHealthcheck.scala | 5 +- .../main/scala/kafka/server/KafkaServer.scala | 24 +- .../scala/kafka/server/ReplicaManager.scala | 14 +- .../scala/kafka/tools/ConsoleConsumer.scala | 4 +- .../scala/kafka/tools/ConsoleProducer.scala | 18 +- .../kafka/tools/ConsumerOffsetChecker.scala | 4 +- .../kafka/tools/ConsumerPerformance.scala | 20 +- .../scala/kafka/tools/DumpLogSegments.scala | 13 +- .../scala/kafka/tools/EndToEndLatency.scala | 10 +- .../scala/kafka/tools/ExportZkOffsets.scala | 11 +- .../scala/kafka/tools/GetOffsetShell.scala | 7 +- .../scala/kafka/tools/ImportZkOffsets.scala | 5 +- .../src/main/scala/kafka/tools/JmxTool.scala | 4 +- .../main/scala/kafka/tools/MirrorMaker.scala | 8 +- .../main/scala/kafka/tools/PerfConfig.scala | 5 +- .../kafka/tools/ProducerPerformance.scala | 12 +- .../scala/kafka/tools/ReplayLogProducer.scala | 10 +- .../kafka/tools/ReplicaVerificationTool.scala | 9 +- .../tools/SimpleConsumerPerformance.scala | 7 +- .../kafka/tools/SimpleConsumerShell.scala | 5 +- .../main/scala/kafka/utils/DelayedItem.scala | 4 +- .../src/main/scala/kafka/utils/ZkUtils.scala | 9 +- .../kafka/api/BaseProducerSendTest.scala | 12 +- .../kafka/api/EndToEndAuthorizationTest.scala | 5 +- .../kafka/api/PlaintextConsumerTest.scala | 18 +- .../kafka/api/ProducerBounceTest.scala | 21 +- .../api/ProducerFailureHandlingTest.scala | 20 +- .../integration/kafka/api/QuotasTest.scala | 23 +- .../api/RackAwareAutoTopicCreationTest.scala | 4 +- .../integration/kafka/api/SaslSetup.scala | 4 +- .../scala/kafka/tools/TestLogCleaning.scala | 17 +- .../scala/other/kafka/StressTestLog.scala | 20 +- .../other/kafka/TestLinearWriteSpeed.scala | 15 +- .../scala/other/kafka/TestOffsetManager.scala | 6 +- .../unit/kafka/admin/AclCommandTest.scala | 18 +- .../scala/unit/kafka/admin/AdminTest.scala | 6 +- .../kafka/admin/DeleteConsumerGroupTest.scala | 10 +- .../unit/kafka/admin/RackAwareTest.scala | 5 +- .../GroupCoordinatorResponseTest.scala | 6 +- .../UncleanLeaderElectionTest.scala | 4 +- .../kafka/log/LogCleanerIntegrationTest.scala | 8 +- .../test/scala/unit/kafka/log/LogTest.scala | 32 +- .../kafka/producer/AsyncProducerTest.scala | 8 +- .../auth/SimpleAclAuthorizerTest.scala | 35 +- .../unit/kafka/server/ISRExpirationTest.scala | 43 +- .../unit/kafka/server/KafkaConfigTest.scala | 12 +- .../unit/kafka/server/LogOffsetTest.scala | 14 +- .../unit/kafka/server/LogRecoveryTest.scala | 12 +- .../unit/kafka/server/SimpleFetchTest.scala | 8 +- .../kafka/utils/ReplicationUtilsTest.scala | 10 +- .../unit/kafka/utils/SchedulerTest.scala | 17 +- .../scala/unit/kafka/utils/TestUtils.scala | 25 +- repos/lila/app/controllers/Opening.scala | 6 +- repos/lila/app/controllers/Puzzle.scala | 6 +- repos/lila/app/controllers/QaQuestion.scala | 7 +- repos/lila/app/controllers/Round.scala | 6 +- repos/lila/app/controllers/Team.scala | 7 +- repos/lila/app/controllers/User.scala | 8 +- repos/lila/modules/db/src/main/Tube.scala | 4 +- .../modules/fishnet/src/main/UciToPgn.scala | 3 +- .../modules/forum/src/main/TopicApi.scala | 10 +- .../lila/modules/game/src/main/Captcher.scala | 3 +- repos/lila/modules/game/src/main/Event.scala | 4 +- .../modules/insight/src/main/Storage.scala | 3 +- repos/lila/modules/lobby/src/main/Lobby.scala | 6 +- .../lila/modules/lobby/src/main/Socket.scala | 7 +- repos/lila/modules/message/src/main/Api.scala | 5 +- repos/lila/modules/mod/src/main/ModApi.scala | 6 +- .../modules/puzzle/src/main/PuzzleApi.scala | 15 +- repos/lila/modules/qa/src/main/QaApi.scala | 8 +- .../modules/round/src/main/MoveMonitor.scala | 6 +- .../lila/modules/round/src/main/Player.scala | 7 +- .../modules/round/src/main/Rematcher.scala | 6 +- .../lila/modules/round/src/main/Socket.scala | 4 +- .../modules/round/src/main/StepBuilder.scala | 32 +- .../modules/security/src/main/Firewall.scala | 5 +- repos/lila/modules/site/src/main/Env.scala | 5 +- .../modules/socket/src/main/Handler.scala | 4 +- .../lila/modules/team/src/main/TeamApi.scala | 3 +- .../tournament/src/main/ApiActor.scala | 6 +- .../tournament/src/main/JsonView.scala | 9 +- .../tournament/src/main/PlayerRepo.scala | 3 +- .../tournament/src/main/TournamentApi.scala | 9 +- repos/lila/modules/tv/src/main/Tv.scala | 3 +- repos/lila/modules/user/src/main/Env.scala | 6 +- .../mesos/simulation/DriverActor.scala | 4 +- .../marathon/MarathonSchedulerActor.scala | 5 +- .../marathon/MarathonSchedulerService.scala | 22 +- .../marathon/SchedulerDriverFactory.scala | 6 +- .../mesosphere/marathon/api/CORSFilter.scala | 4 +- .../marathon/core/flow/FlowModule.scala | 4 +- .../marathon/core/launcher/TaskOp.scala | 4 +- .../launcher/impl/TaskOpFactoryHelper.scala | 5 +- .../impl/AppTaskLauncherActor.scala | 4 +- .../launchqueue/impl/RateLimiterActor.scala | 6 +- .../impl/OfferMatcherManagerActor.scala | 17 +- .../core/task/jobs/TaskJobsModule.scala | 6 +- .../core/task/tracker/TaskTrackerModule.scala | 11 +- .../health/HealthCheckWorkerActor.scala | 5 +- .../marathon/state/GroupManager.scala | 5 +- .../mesosphere/marathon/state/Migration.scala | 5 +- .../marathon/upgrade/DeploymentActor.scala | 6 +- .../mesosphere/mesos/ResourceMatcher.scala | 5 +- .../util/CapConcurrentExecutions.scala | 11 +- .../mesosphere/marathon/DebugConfTest.scala | 5 +- .../marathon/MarathonTestHelper.scala | 5 +- .../api/v2/AppTasksResourceTest.scala | 7 +- .../marathon/api/v2/AppsResourceTest.scala | 7 +- .../marathon/api/v2/GroupsResourceTest.scala | 19 +- .../marathon/api/v2/QueueResourceTest.scala | 7 +- .../marathon/api/v2/TasksResourceTest.scala | 12 +- .../api/v2/json/AppDefinitionTest.scala | 616 +++++++++--------- .../core/appinfo/TaskStatsByVersionTest.scala | 5 +- .../impl/OfferProcessorImplTest.scala | 3 +- .../impl/AppTaskLauncherActorTest.scala | 42 +- .../impl/TaskOpProcessorImplTest.scala | 6 +- .../tracker/impl/TaskTrackerActorTest.scala | 14 +- .../TaskStatusUpdateProcessorImplTest.scala | 4 +- .../GroupDeployIntegrationTest.scala | 20 +- .../marathon/tasks/ResourceUtilTest.scala | 14 +- .../tasks/TaskOpFactoryImplTest.scala | 4 +- .../upgrade/DeploymentActorTest.scala | 35 +- .../upgrade/DeploymentPlanRevertTest.scala | 5 +- .../upgrade/GroupVersioningUtilTest.scala | 18 +- .../marathon/upgrade/TaskStartActorTest.scala | 5 +- .../mesosphere/mesos/ConstraintsTest.scala | 72 +- .../mesos/ResourceMatcherTest.scala | 15 +- .../generator/WillRobinsonPickling.scala | 12 +- .../scala/pickling/run/externalizable.scala | 6 +- .../scala/pickling/run/wrapped-array.scala | 3 +- .../accounts/AccountServiceHandlers.scala | 3 +- .../precog/accounts/MongoAccountsServer.scala | 8 +- .../precog/auth/MongoAPIKeyManagerSpec.scala | 6 +- .../com/precog/auth/SecurityServiceSpec.scala | 24 +- .../precog/bifrost/ManagedQueryModule.scala | 5 +- .../bifrost/nihdb/NIHDBQueryExecutor.scala | 5 +- .../service/ShardServiceCombinators.scala | 4 +- .../bifrost/ManagedQueryExecutorSpec.scala | 5 +- .../com/precog/bytecode/StaticLibrary.scala | 8 +- .../precog/common/ingest/IngestMessage.scala | 9 +- .../precog/common/security/APIKeyFinder.scala | 3 +- .../common/security/APIKeyManagerSpec.scala | 6 +- .../common/util/ArbitraryEventMessage.scala | 4 +- .../precog/dvergr/JobServiceHandlers.scala | 4 +- .../com/precog/ingest/EventIdSequence.scala | 12 +- .../precog/ingest/kafka/KafkaRelayAgent.scala | 10 +- .../ingest/service/IngestServiceHandler.scala | 18 +- .../ingest/util/DirectIngestBenchmark.scala | 4 +- .../yggdrasil/jdbc/JDBCPlatformSpecs.scala | 6 +- .../scala/com/precog/mimir/ArrayLib.scala | 5 +- .../scala/com/precog/mimir/Clustering.scala | 6 +- .../scala/com/precog/mimir/Evaluator.scala | 14 +- .../com/precog/mimir/ArrayLibSpecs.scala | 66 +- .../com/precog/mirror/EvaluatorSpecs.scala | 5 +- .../main/scala/com/precog/niflheim/Chef.scala | 5 +- .../com/precog/niflheim/NIHDBActor.scala | 3 +- .../scala/com/precog/niflheim/Segment.scala | 7 +- .../com/precog/niflheim/StorageReader.scala | 5 +- .../scala/com/precog/quirrel/Phases.scala | 4 +- .../com/precog/quirrel/typer/Binder.scala | 3 +- .../ragnarok/EvaluatingPerfTestRunner.scala | 5 +- .../main/scala/com/precog/util/MapUtils.scala | 4 +- .../actor/KafkaShardIngestActor.scala | 8 +- .../jdbm3/JDBMRawSortProjection.scala | 6 +- .../yggdrasil/nihdb/NIHDBProjection.scala | 5 +- .../table/BlockStoreColumnarTableModule.scala | 10 +- .../com/precog/yggdrasil/vfs/ActorVFS.scala | 4 +- .../com/precog/yggdrasil/vfs/SecureVFS.scala | 5 +- .../com/precog/yggdrasil/vfs/VersionLog.scala | 6 +- .../util/IdSourceScannerModuleSpec.scala | 6 +- .../scalaguide/binder/models/AgeRange.scala | 5 +- .../scalaGuide/main/akka/code/ScalaAkka.scala | 6 +- .../main/async/code/ScalaWebSockets.scala | 15 +- .../code/CompileTimeDependencyInjection.scala | 10 +- .../main/forms/code/ScalaForms.scala | 5 +- .../main/http/code/ScalaResults.scala | 5 +- .../framework/project/Tasks.scala | 5 +- .../src/main/scala/play/forkrun/ForkRun.scala | 12 +- .../main/scala/play/forkrun/SbtClient.scala | 17 +- .../api/libs/iteratee/ConcurrentSpec.scala | 7 +- .../api/libs/iteratee/EnumerateesSpec.scala | 21 +- .../core/server/akkahttp/AkkaHttpServer.scala | 10 +- .../main/scala/play/docs/DocServerStart.scala | 5 +- .../main/scala/play/filters/csrf/csrf.scala | 4 +- .../main/scala/play/filters/gzip/Gzip.scala | 15 +- .../play/filters/gzip/GzipFilterSpec.scala | 5 +- .../it/ServerIntegrationSpecification.scala | 6 +- .../scala/play/it/http/BasicHttpClient.scala | 10 +- .../it/http/websocket/WebSocketClient.scala | 6 +- .../scala/play/libs/oauth/OAuthSpec.scala | 21 +- .../play/core/TemplateMagicForJava.scala | 5 +- .../play/api/db/evolutions/Evolutions.scala | 5 +- .../api/db/evolutions/EvolutionsSpec.scala | 8 +- .../scala/play/api/db/DatabaseConfig.scala | 5 +- .../scala/play/api/libs/json/ReadsSpec.scala | 5 +- .../server/netty/NettyModelConversion.scala | 5 +- .../server/netty/PlayRequestHandler.scala | 5 +- .../play/core/server/DevServerStart.scala | 5 +- .../play/core/server/ProdServerStart.scala | 11 +- .../server/ssl/CertificateGenerator.scala | 4 +- .../play/core/server/ssl/FakeKeyStore.scala | 8 +- .../core/server/ssl/ServerSSLEngine.scala | 4 +- .../streams/impl/IterateeSubscriberSpec.scala | 3 +- .../libs/ws/ssl/CompositeX509KeyManager.scala | 10 +- .../api/libs/ws/ssl/SystemConfiguration.scala | 8 +- .../scala/play/api/libs/oauth/OAuthSpec.scala | 21 +- .../play/api/libs/openid/OpenIDSpec.scala | 21 +- .../ws/ssl/CompositeX509KeyManagerSpec.scala | 6 +- .../ssl/CompositeX509TrustManagerSpec.scala | 15 +- .../main/scala/play/api/Configuration.scala | 5 +- .../main/scala/play/api/GlobalSettings.scala | 5 +- .../play/src/main/scala/play/api/Play.scala | 4 +- .../scala/play/api/controllers/Assets.scala | 4 +- .../play/api/http/HttpErrorHandler.scala | 10 +- .../src/main/scala/play/api/libs/Files.scala | 5 +- .../play/core/formatters/Multipart.scala | 6 +- .../scala/play/core/parsers/Multipart.scala | 7 +- .../play/core/routing/GeneratedRouter.scala | 5 +- .../api/libs/crypto/AESCTRCrypterSpec.scala | 6 +- .../test/scala/play/api/mvc/CookiesSpec.scala | 5 +- .../routes/compiler/templates/package.scala | 5 +- .../play/runsupport/FileWatchService.scala | 7 +- .../main/scala/play/runsupport/Reloader.scala | 6 +- .../play/runsupport/FilterArgsSpec.scala | 6 +- .../play/sbt/routes/RoutesCompiler.scala | 5 +- .../src/main/scala/org/saddle/Mat.scala | 6 +- .../main/scala/org/saddle/array/Sorter.scala | 5 +- .../scala/org/saddle/io/CsvImplicits.scala | 12 +- .../main/scala/org/saddle/time/RRule.scala | 18 +- .../scala/org/saddle/io/H5StoreSpec.scala | 10 +- .../actions/src/main/scala/sbt/Compiler.scala | 5 +- .../actions/src/main/scala/sbt/DotGraph.scala | 3 +- .../actions/src/main/scala/sbt/Tests.scala | 8 +- .../command/src/main/scala/sbt/Command.scala | 3 +- .../src/main/scala/sbt/std/SettingMacro.scala | 4 +- .../main/src/main/scala/sbt/Aggregation.scala | 3 +- .../main/src/main/scala/sbt/Extracted.scala | 7 +- .../src/main/scala/sbt/GlobalPlugin.scala | 5 +- repos/sbt/main/src/main/scala/sbt/Load.scala | 14 +- repos/sbt/main/src/main/scala/sbt/Main.scala | 6 +- .../sbt/main/src/main/scala/sbt/Resolve.scala | 3 +- .../main/scala/sbt/SettingCompletions.scala | 9 +- .../mavenint/MavenRepositoryResolver.scala | 5 +- .../MavenRepositorySystemFactory.scala | 4 +- .../org/scalajs/core/compiler/GenJSCode.scala | 4 +- .../scalajs/core/compiler/GenJSExports.scala | 4 +- .../core/compiler/PreTyperComponent.scala | 6 +- .../scalajs/core/compiler/PrepJSInterop.scala | 16 +- .../src/main/scala/java/math/BigInteger.scala | 14 +- .../src/main/scala/java/math/Elementary.scala | 5 +- .../main/scala/java/nio/GenHeapBuffer.scala | 7 +- .../scala/java/nio/charset/CoderResult.scala | 4 +- .../scalajs/jsenv/LinkingUnitAsyncJSEnv.scala | 6 +- .../scalajs/jsenv/LinkingUnitComJSEnv.scala | 6 +- .../org/scalajs/jsenv/LinkingUnitJSEnv.scala | 6 +- .../scalajs/js/typedarray/DataViewExt.scala | 10 +- .../partest/scalajs/ScalaJSPartest.scala | 5 +- .../linker/backend/emitter/JSDesugaring.scala | 3 +- .../frontend/optimizer/GenIncOptimizer.scala | 4 +- repos/scala/src/build/genprod.scala | 11 +- .../compiler/scala/reflect/reify/Errors.scala | 5 +- .../reflect/reify/codegen/GenUtils.scala | 5 +- .../reflect/reify/phases/Metalevels.scala | 6 +- .../src/compiler/scala/tools/ant/Scalac.scala | 4 +- .../src/compiler/scala/tools/nsc/Global.scala | 5 +- .../scala/tools/nsc/ast/TreeGen.scala | 3 +- .../scala/tools/nsc/ast/parser/Parsers.scala | 6 +- .../tools/nsc/backend/ScalaPrimitives.scala | 4 +- .../tools/nsc/backend/jvm/AsmUtils.scala | 5 +- .../tools/nsc/backend/jvm/BCodeHelpers.scala | 8 +- .../nsc/backend/jvm/BCodeIdiomatic.scala | 6 +- .../nsc/backend/jvm/BCodeSkelBuilder.scala | 4 +- .../nsc/backend/jvm/BackendReporting.scala | 17 +- .../tools/nsc/backend/jvm/GenBCode.scala | 6 +- .../backend/jvm/analysis/AliasingFrame.scala | 3 +- .../tools/nsc/backend/jvm/opt/BoxUnbox.scala | 10 +- .../tools/nsc/backend/jvm/opt/CallGraph.scala | 5 +- .../tools/nsc/backend/jvm/opt/Inliner.scala | 8 +- .../scala/tools/nsc/classpath/FileUtils.scala | 3 +- .../ZipAndJarFileLookupFactory.scala | 4 +- .../scala/tools/nsc/plugins/Plugins.scala | 3 +- .../symtab/classfile/ClassfileParser.scala | 20 +- .../tools/nsc/symtab/classfile/Pickler.scala | 7 +- .../scala/tools/nsc/transform/CleanUp.scala | 3 +- .../tools/nsc/transform/Constructors.scala | 7 +- .../tools/nsc/transform/Delambdafy.scala | 14 +- .../scala/tools/nsc/transform/Erasure.scala | 11 +- .../nsc/transform/ExtensionMethods.scala | 3 +- .../scala/tools/nsc/transform/Mixin.scala | 9 +- .../tools/nsc/transform/OverridingPairs.scala | 3 +- .../tools/nsc/transform/SpecializeTypes.scala | 15 +- .../nsc/transform/patmat/MatchCodeGen.scala | 10 +- .../tools/nsc/typechecker/ContextErrors.scala | 7 +- .../tools/nsc/typechecker/Implicits.scala | 13 +- .../nsc/typechecker/MethodSynthesis.scala | 3 +- .../scala/tools/nsc/typechecker/Namers.scala | 5 +- .../tools/nsc/typechecker/PatternTypers.scala | 10 +- .../tools/nsc/typechecker/RefChecks.scala | 19 +- .../nsc/typechecker/SuperAccessors.scala | 5 +- .../nsc/typechecker/SyntheticMethods.scala | 5 +- .../tools/nsc/typechecker/TreeCheckers.scala | 5 +- .../nsc/typechecker/TypeDiagnostics.scala | 15 +- .../scala/tools/nsc/typechecker/Typers.scala | 59 +- .../tools/reflect/FormatInterpolator.scala | 3 +- .../nsc/interactive/CompilerControl.scala | 6 +- .../scala/tools/nsc/interactive/Global.scala | 7 +- .../scala/tools/nsc/interactive/REPL.scala | 5 +- .../scala/collection/concurrent/TrieMap.scala | 17 +- .../generic/GenTraversableFactory.scala | 6 +- .../scala/collection/immutable/Range.scala | 6 +- .../collection/parallel/ParSeqLike.scala | 5 +- repos/scala/src/library/scala/io/Source.scala | 8 +- .../library/scala/runtime/Tuple3Zipped.scala | 5 +- .../scala/tools/partest/ReplTest.scala | 6 +- .../scala/reflect/internal/Definitions.scala | 5 +- .../scala/reflect/internal/Importers.scala | 6 +- .../scala/reflect/internal/Trees.scala | 30 +- .../scala/reflect/internal/Types.scala | 3 +- .../internal/tpe/TypeConstraints.scala | 8 +- .../util/AbstractFileClassLoader.scala | 11 +- .../src/reflect/scala/reflect/io/File.scala | 5 +- .../scala/reflect/runtime/JavaMirrors.scala | 17 +- .../reflect/runtime/ReflectionUtils.scala | 7 +- .../scala/tools/nsc/MainGenericRunner.scala | 15 +- .../scala/tools/nsc/interpreter/IMain.scala | 6 +- .../nsc/interpreter/MemberHandlers.scala | 5 +- .../scaladoc/scala/tools/nsc/ScalaDoc.scala | 4 +- .../nsc/doc/base/CommentFactoryBase.scala | 4 +- .../tools/nsc/doc/model/ModelFactory.scala | 3 +- .../model/ModelFactoryImplicitSupport.scala | 6 +- .../scalap/scala/tools/scalap/Arguments.scala | 4 +- .../src/scalap/scala/tools/scalap/Main.scala | 5 +- .../akka/src/akka/AkkaException.scala | 6 +- .../akka/src/akka/actor/ActorRegistry.scala | 3 +- .../scala/test/files/run/Course-2002-03.scala | 4 +- .../scala/test/files/run/Course-2002-10.scala | 4 +- .../test/files/run/ReplacementMatching.scala | 6 +- .../test/files/run/typealias_overriding.scala | 3 +- .../ParallelSeqCheck.scala | 17 +- .../collection/immutable/StringLikeTest.scala | 12 +- .../collection/mutable/ArrayBufferTest.scala | 4 +- .../test/junit/scala/util/SortingTest.scala | 4 +- .../run/delambdafy-lambdametafactory.scala | 10 +- repos/scala/test/pending/run/t2364.scala | 5 +- .../scalafx/colorselector/ColorSelector.scala | 4 +- .../scalafx/colorselector/Formatter.scala | 5 +- .../scala/scalafx/collections/package.scala | 5 +- .../transformation/FilteredBuffer.scala | 9 +- .../main/scala/scalafx/print/Printer.scala | 8 +- .../scalafx/scene/control/TreeTableView.scala | 6 +- .../scala/scalafx/scene/layout/GridPane.scala | 7 +- .../scala/scalafx/scene/paint/Color.scala | 6 +- .../scalafx/scene/transform/Transform.scala | 14 +- .../scalafx/animation/InterpolatorSpec.scala | 6 +- .../testutil/SFXEnumDelegateSpec.scala | 8 +- .../scalafx/util/StringConverterSpec.scala | 5 +- .../cpr/ScalatraBroadcasterFactory.scala | 5 +- .../ScalatraAtmosphereHandler.scala | 65 +- .../commands/JacksonJsonParsing.scala | 10 +- .../scalatra/commands/NativeJsonParsing.scala | 10 +- .../scala/org/scalatra/commands/binding.scala | 7 +- .../scala/org/scalatra/commands/field.scala | 17 +- .../scala/org/scalatra/RouteRegistry.scala | 4 +- .../scala/org/scalatra/ScalatraFilter.scala | 4 +- .../scala/org/scalatra/util/FileCharset.scala | 4 +- .../scala/org/scalatra/CorsSupportSpec.scala | 8 +- .../scala/org/scalatra/UrlSupportTest.scala | 5 +- .../scala/org/scalatra/BasicAuthExample.scala | 10 +- .../org/scalatra/FileUploadExample.scala | 5 +- .../scalate/ScalateRenderSupport.scala | 4 +- .../swagger/SwaggerCommandSupportSpec.scala | 5 +- .../scalatra/swagger/reflect/Reflector.scala | 6 +- .../swagger/reflect/descriptors.scala | 10 +- .../org/scalatra/test/JettyContainer.scala | 10 +- .../src/main/scala/scalaz/std/Either.scala | 8 +- .../tests/src/test/scala/scalaz/MapTest.scala | 8 +- .../src/test/scala/scalaz/TreeTest.scala | 5 +- .../test/scala/scalaz/std/java/TimeTest.scala | 5 +- .../commons/source/LzoGenericScheme.scala | 10 +- .../source/VersionedKeyValSource.scala | 6 +- .../com/twitter/scalding/ExecutionApp.scala | 6 +- .../com/twitter/scalding/IterableSource.scala | 5 +- .../scala/com/twitter/scalding/JobTest.scala | 3 +- .../scala/com/twitter/scalding/Mode.scala | 12 +- .../macros/impl/FieldsProviderImpl.scala | 5 +- .../mathematics/TypedSimilarity.scala | 10 +- .../scalding/typed/PartitionSchemed.scala | 4 +- .../scalding/typed/PartitionedTextLine.scala | 9 +- .../com/twitter/scalding/ExecutionTest.scala | 5 +- .../scala/com/twitter/scalding/KryoTest.scala | 4 +- .../twitter/scalding/StringUtilityTest.scala | 4 +- .../scheme/TypedParquetTupleScheme.scala | 11 +- .../StringOrderedSerialization.scala | 4 +- .../UnsignedComparisonLaws.scala | 4 +- .../ScroogeOrderedBuf.scala | 5 +- repos/scaloid/project/StringUtils.scala | 3 +- .../codegen/GenerateRoundtripSources.scala | 5 +- .../testkit/tests/ModelBuilderTest.scala | 4 +- .../testkit/util/SimpleParentRunner.scala | 4 +- .../slick/benchmark/StreamsStressTest.scala | 6 +- .../test/codegen/CodeGeneratorAllTest.scala | 4 +- .../scala/slick/compiler/ExpandSums.scala | 4 +- .../scala/slick/compiler/HoistClientOps.scala | 8 +- .../compiler/MergeToComprehensions.scala | 20 +- .../slick/src/sphinx/code/CodeGenerator.scala | 4 +- .../slick/src/sphinx/code/Connection.scala | 4 +- .../slick/src/sphinx/code/JoinsUnions.scala | 4 +- .../spark/ExecutorAllocationManager.scala | 30 +- .../org/apache/spark/MapOutputTracker.scala | 6 +- .../scala/org/apache/spark/SSLOptions.scala | 4 +- .../org/apache/spark/SecurityManager.scala | 8 +- .../scala/org/apache/spark/SparkContext.scala | 8 +- .../scala/org/apache/spark/SparkEnv.scala | 5 +- .../org/apache/spark/SparkHadoopWriter.scala | 12 +- .../spark/api/java/JavaSparkContext.scala | 7 +- .../apache/spark/api/python/PythonRDD.scala | 3 +- .../spark/deploy/ExecutorDescription.scala | 6 +- .../spark/deploy/FaultToleranceTest.scala | 14 +- .../apache/spark/deploy/SparkHadoopUtil.scala | 8 +- .../deploy/history/ApplicationCache.scala | 5 +- .../deploy/master/ApplicationSource.scala | 5 +- .../apache/spark/deploy/master/Master.scala | 4 +- .../spark/deploy/mesos/ui/DriverPage.scala | 5 +- .../spark/deploy/worker/DriverWrapper.scala | 4 +- .../org/apache/spark/executor/Executor.scala | 27 +- .../spark/executor/ExecutorSource.scala | 5 +- .../spark/memory/UnifiedMemoryManager.scala | 12 +- .../spark/partial/GroupedMeanEvaluator.scala | 4 +- .../org/apache/spark/rdd/CoalescedRDD.scala | 3 +- .../apache/spark/rdd/DoubleRDDFunctions.scala | 12 +- .../org/apache/spark/rdd/HadoopRDD.scala | 5 +- .../org/apache/spark/rdd/NewHadoopRDD.scala | 4 +- .../apache/spark/rdd/PairRDDFunctions.scala | 24 +- .../main/scala/org/apache/spark/rdd/RDD.scala | 36 +- .../apache/spark/scheduler/DAGScheduler.scala | 9 +- .../spark/scheduler/DAGSchedulerSource.scala | 53 +- .../spark/scheduler/TaskSchedulerImpl.scala | 16 +- .../spark/scheduler/TaskSetManager.scala | 10 +- .../CoarseGrainedSchedulerBackend.scala | 14 +- .../cluster/mesos/MesosClusterScheduler.scala | 6 +- .../mesos/MesosClusterSchedulerSource.scala | 18 +- .../spark/serializer/KryoSerializer.scala | 4 +- .../shuffle/sort/SortShuffleWriter.scala | 6 +- .../storage/BlockManagerMasterEndpoint.scala | 6 +- .../spark/storage/BlockManagerSource.scala | 22 +- .../storage/ShuffleBlockFetcherIterator.scala | 70 +- .../spark/storage/memory/MemoryStore.scala | 10 +- .../org/apache/spark/ui/JettyUtils.scala | 8 +- .../org/apache/spark/ui/jobs/JobPage.scala | 5 +- .../spark/ui/jobs/JobProgressListener.scala | 17 +- .../org/apache/spark/ui/jobs/PoolPage.scala | 5 +- .../apache/spark/util/CollectionsUtils.scala | 5 +- .../org/apache/spark/util/SizeEstimator.scala | 10 +- .../org/apache/spark/util/StatCounter.scala | 7 +- .../spark/util/TimeStampedHashMap.scala | 5 +- .../scala/org/apache/spark/util/Utils.scala | 6 +- .../collection/ExternalAppendOnlyMap.scala | 14 +- .../spark/util/collection/Spillable.scala | 6 +- .../util/random/StratifiedSamplingUtils.scala | 12 +- .../org/apache/spark/AccumulatorSuite.scala | 4 +- .../scala/org/apache/spark/DriverSuite.scala | 4 +- .../scala/org/apache/spark/FileSuite.scala | 9 +- .../apache/spark/MapOutputTrackerSuite.scala | 10 +- .../spark/deploy/RPackageUtilsSuite.scala | 6 +- .../spark/deploy/SparkSubmitSuite.scala | 4 +- .../spark/deploy/SparkSubmitUtilsSuite.scala | 43 +- .../StandaloneDynamicAllocationSuite.scala | 7 +- .../spark/deploy/client/AppClientSuite.scala | 7 +- .../history/ApplicationCacheSuite.scala | 8 +- .../master/PersistenceEngineSuite.scala | 7 +- .../deploy/rest/SubmitRestProtocolSuite.scala | 15 +- .../deploy/worker/CommandUtilsSuite.scala | 4 +- .../deploy/worker/ExecutorRunnerTest.scala | 4 +- .../spark/deploy/worker/WorkerSuite.scala | 28 +- .../deploy/worker/WorkerWatcherSuite.scala | 14 +- .../spark/io/CompressionCodecSuite.scala | 10 +- .../spark/rdd/PairRDDFunctionsSuite.scala | 42 +- .../spark/rdd/RDDOperationScopeSuite.scala | 74 ++- .../org/apache/spark/rpc/RpcEnvSuite.scala | 15 +- .../scheduler/EventLoggingListenerSuite.scala | 10 +- .../scheduler/TaskResultGetterSuite.scala | 6 +- .../CoarseMesosSchedulerBackendSuite.scala | 7 +- .../mesos/MesosSchedulerUtilsSuite.scala | 33 +- .../api/v1/AllStagesResourceSuite.scala | 6 +- .../spark/storage/BlockInfoManagerSuite.scala | 5 +- .../BlockManagerReplicationSuite.scala | 5 +- .../spark/storage/BlockManagerSuite.scala | 15 +- .../apache/spark/storage/StorageSuite.scala | 12 +- .../ExternalAppendOnlyMapSuite.scala | 42 +- .../util/collection/ExternalSorterSuite.scala | 22 +- .../spark/util/collection/SorterSuite.scala | 12 +- .../spark/examples/CassandraCQLTest.scala | 8 +- .../apache/spark/examples/CassandraTest.scala | 18 +- .../apache/spark/examples/ml/GBTExample.scala | 20 +- .../ml/LogisticRegressionExample.scala | 10 +- .../examples/ml/RandomForestExample.scala | 20 +- .../examples/ml/SimpleParamsExample.scala | 6 +- .../examples/ml/VectorSlicerExample.scala | 5 +- .../BinaryClassificationMetricsExample.scala | 5 +- .../DecisionTreeClassificationExample.scala | 5 +- ...radientBoostingClassificationExample.scala | 5 +- .../GradientBoostingRegressionExample.scala | 5 +- .../LinearRegressionWithSGDExample.scala | 5 +- .../LogisticRegressionWithLBFGSExample.scala | 5 +- .../RandomForestClassificationExample.scala | 5 +- .../examples/mllib/SparseNaiveBayes.scala | 6 +- .../sql/jdbc/MySQLIntegrationSuite.scala | 12 +- .../streaming/kafka/KafkaInputDStream.scala | 5 +- .../kafka/ReliableKafkaReceiver.scala | 5 +- .../streaming/kinesis/KinesisReceiver.scala | 5 +- .../kinesis/KinesisRecordProcessor.scala | 3 +- .../streaming/kinesis/KinesisTestUtils.scala | 5 +- .../kinesis/KPLBasedKinesisTestUtils.scala | 5 +- .../kinesis/KinesisBackedBlockRDDSuite.scala | 5 +- .../kinesis/KinesisReceiverSuite.scala | 12 +- .../spark/graphx/impl/EdgePartition.scala | 16 +- .../apache/spark/graphx/impl/GraphImpl.scala | 15 +- .../org/apache/spark/graphx/GraphSuite.scala | 256 ++++---- .../graphx/util/GraphGeneratorsSuite.scala | 31 +- .../scala/org/apache/spark/ml/Pipeline.scala | 12 +- .../spark/ml/attribute/AttributeGroup.scala | 4 +- .../spark/ml/classification/Classifier.scala | 4 +- .../classification/LogisticRegression.scala | 4 +- .../ProbabilisticClassifier.scala | 4 +- .../org/apache/spark/ml/clustering/LDA.scala | 6 +- .../apache/spark/ml/feature/Word2Vec.scala | 5 +- .../ml/regression/DecisionTreeRegressor.scala | 4 +- .../ml/tree/impl/GradientBoostedTrees.scala | 12 +- .../spark/ml/tree/impl/RandomForest.scala | 16 +- .../org/apache/spark/ml/tree/treeParams.scala | 6 +- .../spark/ml/tuning/CrossValidator.scala | 7 +- .../mllib/api/python/PythonMLLibAPI.scala | 16 +- .../spark/mllib/clustering/LDAModel.scala | 7 +- .../apache/spark/mllib/feature/Word2Vec.scala | 14 +- .../apache/spark/mllib/linalg/Matrices.scala | 9 +- .../linalg/distributed/BlockMatrix.scala | 10 +- .../mllib/linalg/distributed/RowMatrix.scala | 7 +- .../mllib/regression/IsotonicRegression.scala | 7 +- .../apache/spark/mllib/regression/Lasso.scala | 13 +- .../mllib/regression/LinearRegression.scala | 13 +- .../mllib/regression/RidgeRegression.scala | 13 +- .../mllib/tree/GradientBoostedTrees.scala | 12 +- .../spark/mllib/tree/RandomForest.scala | 17 +- .../mllib/tree/impl/DTStatsAggregator.scala | 4 +- .../mllib/tree/model/treeEnsembleModels.scala | 7 +- .../spark/ml/feature/Word2VecSuite.scala | 5 +- ...erativelyReweightedLeastSquaresSuite.scala | 15 +- .../ml/optim/WeightedLeastSquaresSuite.scala | 10 +- .../GeneralizedLinearRegressionSuite.scala | 41 +- .../ml/tuning/ParamGridBuilderSuite.scala | 6 +- .../classification/NaiveBayesSuite.scala | 28 +- .../StreamingLogisticRegressionSuite.scala | 24 +- .../spark/mllib/linalg/MatricesSuite.scala | 26 +- .../spark/mllib/linalg/VectorsSuite.scala | 5 +- .../spark/mllib/random/RandomRDDsSuite.scala | 32 +- .../regression/LinearRegressionSuite.scala | 12 +- .../regression/RidgeRegressionSuite.scala | 7 +- .../StreamingLinearRegressionSuite.scala | 24 +- .../spark/mllib/tree/DecisionTreeSuite.scala | 20 +- .../spark/mllib/tree/RandomForestSuite.scala | 12 +- .../mllib/tree/impl/BaggedPointSuite.scala | 14 +- .../apache/spark/repl/SparkILoopInit.scala | 5 +- .../org/apache/spark/repl/SparkIMain.scala | 6 +- .../spark/repl/SparkMemberHandlers.scala | 5 +- .../org/apache/spark/repl/SparkILoop.scala | 5 +- .../spark/repl/ExecutorClassLoader.scala | 5 +- .../scala/org/apache/spark/sql/Encoder.scala | 6 +- .../spark/sql/catalyst/ScalaReflection.scala | 10 +- .../catalyst/analysis/DecimalPrecision.scala | 8 +- .../sql/catalyst/catalog/SessionCatalog.scala | 12 +- .../expressions/complexTypeCreator.scala | 4 +- .../expressions/complexTypeExtractors.scala | 9 +- .../expressions/datetimeExpressions.scala | 4 +- .../spark/sql/catalyst/expressions/misc.scala | 12 +- .../spark/sql/catalyst/plans/QueryPlan.scala | 5 +- .../spark/sql/catalyst/trees/TreeNode.scala | 10 +- .../sql/catalyst/analysis/AnalysisTest.scala | 8 +- .../catalyst/catalog/CatalogTestCases.scala | 23 +- .../catalog/SessionCatalogSuite.scala | 5 +- .../encoders/EncoderResolutionSuite.scala | 5 +- .../codegen/GeneratedProjectionSuite.scala | 8 +- .../apache/spark/sql/DataFrameReader.scala | 8 +- .../scala/org/apache/spark/sql/Dataset.scala | 19 +- .../sql/execution/CoGroupedIterator.scala | 5 +- .../spark/sql/execution/SparkPlan.scala | 5 +- .../spark/sql/execution/SparkStrategies.scala | 8 +- .../apache/spark/sql/execution/Window.scala | 9 +- .../aggregate/AggregationIterator.scala | 5 +- .../aggregate/TungstenAggregate.scala | 5 +- .../TungstenAggregationIterator.scala | 4 +- .../columnar/InMemoryColumnarTableScan.scala | 5 +- .../datasources/DataSourceStrategy.scala | 5 +- .../InsertIntoHadoopFsRelation.scala | 4 +- .../datasources/WriterContainer.scala | 11 +- .../datasources/jdbc/DefaultSource.scala | 5 +- .../datasources/json/JSONOptions.scala | 4 +- .../parquet/CatalystReadSupport.scala | 5 +- .../DirectParquetOutputCommitter.scala | 9 +- .../datasources/parquet/ParquetRelation.scala | 9 +- .../execution/exchange/ShuffleExchange.scala | 5 +- .../execution/joins/CartesianProduct.scala | 6 +- .../sql/execution/joins/HashedRelation.scala | 5 +- .../apache/spark/sql/sources/interfaces.scala | 17 +- .../spark/sql/ColumnExpressionSuite.scala | 10 +- .../spark/sql/DataFrameAggregateSuite.scala | 7 +- .../apache/spark/sql/DateFunctionsSuite.scala | 12 +- .../scala/org/apache/spark/sql/UDFSuite.scala | 8 +- .../parquet/ParquetCompatibilityTest.scala | 6 +- .../datasources/parquet/ParquetTest.scala | 10 +- .../sql/internal/SQLConfEntrySuite.scala | 6 +- .../org/apache/spark/sql/jdbc/JDBCSuite.scala | 5 +- .../spark/sql/sources/FilteredScanSuite.scala | 4 +- .../spark/sql/sources/InsertSuite.scala | 10 +- .../spark/sql/sources/PrunedScanSuite.scala | 4 +- .../spark/sql/sources/SaveLoadSuite.scala | 16 +- .../ContinuousQueryManagerSuite.scala | 5 +- .../hive/thriftserver/HiveThriftServer2.scala | 4 +- .../sql/hive/thriftserver/SparkSQLEnv.scala | 4 +- .../spark/sql/hive/HiveInspectors.scala | 10 +- .../org/apache/spark/sql/hive/HiveQl.scala | 5 +- .../apache/spark/sql/hive/TableReader.scala | 5 +- .../sql/hive/execution/HiveTableScan.scala | 5 +- .../spark/sql/hive/hiveWriterContainers.scala | 15 +- .../spark/sql/hive/HiveInspectorSuite.scala | 5 +- .../sql/hive/MetastoreDataSourcesSuite.scala | 4 +- .../execution/AggregationQuerySuite.scala | 11 +- .../spark/sql/hive/orc/OrcFilterSuite.scala | 5 +- .../apache/spark/sql/hive/parquetSuites.scala | 5 +- .../sql/sources/hadoopFsRelationSuites.scala | 8 +- .../apache/spark/streaming/Checkpoint.scala | 4 +- .../spark/streaming/StreamingContext.scala | 30 +- .../spark/streaming/StreamingSource.scala | 9 +- .../streaming/api/java/JavaDStreamLike.scala | 6 +- .../streaming/api/java/JavaPairDStream.scala | 14 +- .../streaming/api/python/PythonDStream.scala | 4 +- .../streaming/dstream/FileInputDStream.scala | 4 +- .../spark/streaming/rdd/MapWithStateRDD.scala | 10 +- .../receiver/ReceivedBlockHandler.scala | 6 +- .../streaming/scheduler/JobGenerator.scala | 4 +- .../streaming/scheduler/JobScheduler.scala | 5 +- .../scheduler/ReceivedBlockTracker.scala | 4 +- .../streaming/scheduler/ReceiverTracker.scala | 13 +- .../spark/streaming/ui/StreamingPage.scala | 6 +- .../apache/spark/streaming/FailureSuite.scala | 5 +- .../streaming/ReceivedBlockTrackerSuite.scala | 27 +- .../streaming/ReceiverInputDStreamSuite.scala | 10 +- .../streaming/rdd/MapWithStateRDDSuite.scala | 5 +- .../scheduler/ReceiverTrackerSuite.scala | 4 +- .../StreamingJobProgressListenerSuite.scala | 4 +- .../yarn/AMDelegationTokenRenewer.scala | 10 +- .../org/apache/spark/deploy/yarn/Client.scala | 25 +- .../spark/deploy/yarn/ClientArguments.scala | 5 +- .../yarn/ExecutorDelegationTokenUpdater.scala | 15 +- .../spark/deploy/yarn/YarnAllocator.scala | 5 +- .../spark/deploy/yarn/YarnRMClient.scala | 5 +- .../deploy/yarn/YarnSparkHadoopUtil.scala | 9 +- .../cluster/YarnSchedulerBackend.scala | 5 +- .../deploy/yarn/BaseYarnClusterSuite.scala | 4 +- .../deploy/yarn/YarnAllocatorSuite.scala | 12 +- .../spark/deploy/yarn/YarnClusterSuite.scala | 5 +- .../spark/launcher/TestClasspathBuilder.scala | 4 +- .../yarn/YarnShuffleServiceSuite.scala | 6 +- .../src/main/scala/spire/math/poly/Term.scala | 4 +- .../scala/spire/example/randomforest.scala | 4 +- .../src/test/scala/spire/SyntaxTest.scala | 3 +- .../interval/IntervalSeqArbitrary.scala | 4 +- .../interval/IntervalTrieArbitrary.scala | 4 +- .../twitter/summingbird/batch/BatchLaws.scala | 4 +- .../summingbird/graph/LiteralTests.scala | 4 +- .../summingbird/example/Serialization.scala | 5 +- .../online/executor/AsyncBase.scala | 4 +- .../online/executor/InputState.scala | 5 +- .../scalding/BatchedStoreProperties.scala | 4 +- .../summingbird/scalding/Service.scala | 5 +- .../scalding/store/InitialBatchedStore.scala | 5 +- .../twitter/summingbird/storm/BaseBolt.scala | 4 +- .../summingbird/storm/StormStatProvider.scala | 4 +- .../scala/com/twitter/util/Disposable.scala | 5 +- .../main/scala/com/twitter/util/Timer.scala | 5 +- .../scala/com/twitter/jvm/Estimator.scala | 6 +- .../src/main/scala/com/twitter/jvm/Jvm.scala | 6 +- .../scala/com/twitter/logging/Formatter.scala | 5 +- .../scala/com/twitter/logging/Handler.scala | 5 +- .../com/twitter/logging/SyslogHandler.scala | 7 +- .../twitter/util/registry/FormatterTest.scala | 5 +- .../finagle/stats/StatsReceiverTest.scala | 7 +- .../main/scala/com/twitter/zk/ServerSet.scala | 3 +- .../src/main/scala/com/twitter/zk/ZNode.scala | 5 +- 1327 files changed, 7500 insertions(+), 5386 deletions(-) diff --git a/repos/PredictionIO/core/src/main/scala/io/prediction/core/BaseAlgorithm.scala b/repos/PredictionIO/core/src/main/scala/io/prediction/core/BaseAlgorithm.scala index 797dd51417c..2312b51f237 100644 --- a/repos/PredictionIO/core/src/main/scala/io/prediction/core/BaseAlgorithm.scala +++ b/repos/PredictionIO/core/src/main/scala/io/prediction/core/BaseAlgorithm.scala @@ -120,8 +120,9 @@ abstract class BaseAlgorithm[PD, M, Q, P] * @return Type signature of query */ def queryClass: Class[Q] = { - val types = TypeResolver - .resolveRawArguments(classOf[BaseAlgorithm[PD, M, Q, P]], getClass) + val types = TypeResolver.resolveRawArguments( + classOf[BaseAlgorithm[PD, M, Q, P]], + getClass) types(2).asInstanceOf[Class[Q]] } } diff --git a/repos/PredictionIO/core/src/main/scala/io/prediction/workflow/CreateServer.scala b/repos/PredictionIO/core/src/main/scala/io/prediction/workflow/CreateServer.scala index 82e280ac68c..ed2256bf5ca 100644 --- a/repos/PredictionIO/core/src/main/scala/io/prediction/workflow/CreateServer.scala +++ b/repos/PredictionIO/core/src/main/scala/io/prediction/workflow/CreateServer.scala @@ -447,8 +447,9 @@ class ServerActor[Q, P](val args: ServerConfig, def actorRefFactory: ActorContext = context implicit val timeout = Timeout(5, TimeUnit.SECONDS) - val pluginsActorRef = context - .actorOf(Props(classOf[PluginsActor], args.engineVariant), "PluginsActor") + val pluginsActorRef = context.actorOf( + Props(classOf[PluginsActor], args.engineVariant), + "PluginsActor") val pluginContext = EngineServerPluginContext(log, args.engineVariant) def receive: Actor.Receive = runRoute(myRoute) diff --git a/repos/PredictionIO/core/src/main/scala/io/prediction/workflow/JsonExtractor.scala b/repos/PredictionIO/core/src/main/scala/io/prediction/workflow/JsonExtractor.scala index 248f13a1e54..e459292be54 100644 --- a/repos/PredictionIO/core/src/main/scala/io/prediction/workflow/JsonExtractor.scala +++ b/repos/PredictionIO/core/src/main/scala/io/prediction/workflow/JsonExtractor.scala @@ -77,8 +77,9 @@ object JsonExtractor { def paramToJson(extractorOption: JsonExtractorOption, param: (String, Params)): String = { // to be replaced JValue needs to be done by Json4s, otherwise the tuple JValue will be wrong - val toBeReplacedJValue = JsonExtractor - .toJValue(JsonExtractorOption.Json4sNative, (param._1, null)) + val toBeReplacedJValue = JsonExtractor.toJValue( + JsonExtractorOption.Json4sNative, + (param._1, null)) val paramJValue = JsonExtractor.toJValue(extractorOption, param._2) compact(render(toBeReplacedJValue.replace(param._1 :: Nil, paramJValue))) @@ -134,8 +135,9 @@ object JsonExtractor { val jValues = params.map { case (name, param) => // to be replaced JValue needs to be done by Json4s, otherwise the tuple JValue will be wrong - val toBeReplacedJValue = JsonExtractor - .toJValue(JsonExtractorOption.Json4sNative, (name, null)) + val toBeReplacedJValue = + JsonExtractor.toJValue(JsonExtractorOption.Json4sNative, + (name, null)) val paramJValue = JsonExtractor.toJValue(extractorOption, param) toBeReplacedJValue.replace(name :: Nil, paramJValue) diff --git a/repos/PredictionIO/core/src/test/scala/io/prediction/controller/MetricEvaluatorTest.scala b/repos/PredictionIO/core/src/test/scala/io/prediction/controller/MetricEvaluatorTest.scala index b5f9c86f055..8e45e79f9ec 100644 --- a/repos/PredictionIO/core/src/test/scala/io/prediction/controller/MetricEvaluatorTest.scala +++ b/repos/PredictionIO/core/src/test/scala/io/prediction/controller/MetricEvaluatorTest.scala @@ -41,7 +41,9 @@ class MetricEvaluatorDevSuite extends FunSuite with SharedSparkContext { (EngineParams(), Seq((EmptyParams(), sc.parallelize(Seq((1, 0, 0), (2, 0, 0))))))) - val r = metricEvaluator - .evaluateBase(sc, Evaluation0, engineEvalDataSet, WorkflowParams()) + val r = metricEvaluator.evaluateBase(sc, + Evaluation0, + engineEvalDataSet, + WorkflowParams()) } } diff --git a/repos/PredictionIO/core/src/test/scala/io/prediction/workflow/JsonExtractorSuite.scala b/repos/PredictionIO/core/src/test/scala/io/prediction/workflow/JsonExtractorSuite.scala index 5d72a8db2a5..1e0acce7217 100644 --- a/repos/PredictionIO/core/src/test/scala/io/prediction/workflow/JsonExtractorSuite.scala +++ b/repos/PredictionIO/core/src/test/scala/io/prediction/workflow/JsonExtractorSuite.scala @@ -36,8 +36,9 @@ class JsonExtractorSuite extends FunSuite with Matchers { val json = """{"string": "query string", "optional": "optional string", "default": "d"}""" - val query = JsonExtractor - .extract(JsonExtractorOption.Json4sNative, json, classOf[ScalaQuery]) + val query = JsonExtractor.extract(JsonExtractorOption.Json4sNative, + json, + classOf[ScalaQuery]) query should be(ScalaQuery("query string", Some("optional string"), "d")) } @@ -48,8 +49,9 @@ class JsonExtractorSuite extends FunSuite with Matchers { val json = """{"string": "query string"}""" - val query = JsonExtractor - .extract(JsonExtractorOption.Json4sNative, json, classOf[ScalaQuery]) + val query = JsonExtractor.extract(JsonExtractorOption.Json4sNative, + json, + classOf[ScalaQuery]) query should be(ScalaQuery("query string", None, "default")) } @@ -61,8 +63,9 @@ class JsonExtractorSuite extends FunSuite with Matchers { val json = """{"string": "query string", "optional": null, "default": null}""" - val query = JsonExtractor - .extract(JsonExtractorOption.Json4sNative, json, classOf[ScalaQuery]) + val query = JsonExtractor.extract(JsonExtractorOption.Json4sNative, + json, + classOf[ScalaQuery]) query should be(ScalaQuery("query string", None, "default")) } @@ -73,8 +76,9 @@ class JsonExtractorSuite extends FunSuite with Matchers { val json = """{"string": "query string", "optional": "optional string", "default": "d"}""" - val query = JsonExtractor - .extract(JsonExtractorOption.Json4sNative, json, classOf[ScalaQuery]) + val query = JsonExtractor.extract(JsonExtractorOption.Json4sNative, + json, + classOf[ScalaQuery]) query should be(ScalaQuery("query string", Some("optional string"), "d")) } @@ -85,8 +89,9 @@ class JsonExtractorSuite extends FunSuite with Matchers { val json = """{"string": "query string"}""" - val query = JsonExtractor - .extract(JsonExtractorOption.Json4sNative, json, classOf[ScalaQuery]) + val query = JsonExtractor.extract(JsonExtractorOption.Json4sNative, + json, + classOf[ScalaQuery]) query should be(ScalaQuery("query string", None, "default")) } @@ -97,8 +102,9 @@ class JsonExtractorSuite extends FunSuite with Matchers { val json = """{"string": "query string", "optional": null, "default": null}""" - val query = JsonExtractor - .extract(JsonExtractorOption.Json4sNative, json, classOf[ScalaQuery]) + val query = JsonExtractor.extract(JsonExtractorOption.Json4sNative, + json, + classOf[ScalaQuery]) query should be(ScalaQuery("query string", None, "default")) } @@ -108,8 +114,9 @@ class JsonExtractorSuite extends FunSuite with Matchers { " value") { val json = """{"string": "query string"}""" - val query = JsonExtractor - .extract(JsonExtractorOption.Gson, json, classOf[ScalaQuery]) + val query = JsonExtractor.extract(JsonExtractorOption.Gson, + json, + classOf[ScalaQuery]) query should be(ScalaQuery("query string", null, null)) } diff --git a/repos/PredictionIO/data/src/main/scala/io/prediction/data/storage/Storage.scala b/repos/PredictionIO/data/src/main/scala/io/prediction/data/storage/Storage.scala index 0a114211769..78f96d70f1a 100644 --- a/repos/PredictionIO/data/src/main/scala/io/prediction/data/storage/Storage.scala +++ b/repos/PredictionIO/data/src/main/scala/io/prediction/data/storage/Storage.scala @@ -357,8 +357,9 @@ object Storage extends Logging { info("Test writing to Event Store (App Id 0)...") // use appId=0 for testing purpose eventsDb.init(0) - eventsDb - .insert(Event(event = "test", entityType = "test", entityId = "test"), 0) + eventsDb.insert( + Event(event = "test", entityType = "test", entityId = "test"), + 0) eventsDb.remove(0) eventsDb.close() } diff --git a/repos/PredictionIO/examples/experimental/scala-parallel-friend-recommendation/src/main/scala/DataSource.scala b/repos/PredictionIO/examples/experimental/scala-parallel-friend-recommendation/src/main/scala/DataSource.scala index b8d049ebbf3..a78cb53ef3e 100644 --- a/repos/PredictionIO/examples/experimental/scala-parallel-friend-recommendation/src/main/scala/DataSource.scala +++ b/repos/PredictionIO/examples/experimental/scala-parallel-friend-recommendation/src/main/scala/DataSource.scala @@ -66,8 +66,10 @@ class ForestFireSamplingDataSource(val dsp: FFSamplingDSParams) override def readTraining(sc: SparkContext): TrainingData = { val g = GraphLoader.edgeListFile(sc, dsp.graphEdgelistPath) - val sampled = Sampling - .forestFireSamplingInduced(sc, g, dsp.sampleFraction, dsp.geoParam) + val sampled = Sampling.forestFireSamplingInduced(sc, + g, + dsp.sampleFraction, + dsp.geoParam) val identity = DeltaSimRankRDD.identityMatrix(sc, g.vertices.count()) new TrainingData(sampled, identity) diff --git a/repos/PredictionIO/examples/experimental/scala-parallel-friend-recommendation/src/main/scala/SimRankAlgorithm.scala b/repos/PredictionIO/examples/experimental/scala-parallel-friend-recommendation/src/main/scala/SimRankAlgorithm.scala index 3ca14834d64..1bf2e2156e1 100644 --- a/repos/PredictionIO/examples/experimental/scala-parallel-friend-recommendation/src/main/scala/SimRankAlgorithm.scala +++ b/repos/PredictionIO/examples/experimental/scala-parallel-friend-recommendation/src/main/scala/SimRankAlgorithm.scala @@ -15,8 +15,10 @@ class SimRankAlgorithm(val ap: SimRankParams) def train(td: TrainingData): RDD[(Long, Double)] = { td.g.edges.count() - val scores = DeltaSimRankRDD - .compute(td.g, ap.numIterations, td.identityMatrix, ap.decay) + val scores = DeltaSimRankRDD.compute(td.g, + ap.numIterations, + td.identityMatrix, + ap.decay) scores } diff --git a/repos/PredictionIO/tools/src/main/scala/io/prediction/tools/RunServer.scala b/repos/PredictionIO/tools/src/main/scala/io/prediction/tools/RunServer.scala index 3f7aae87eaf..9d628734135 100644 --- a/repos/PredictionIO/tools/src/main/scala/io/prediction/tools/RunServer.scala +++ b/repos/PredictionIO/tools/src/main/scala/io/prediction/tools/RunServer.scala @@ -176,7 +176,9 @@ object RunServer extends Logging { .getOrElse(Nil) ++ Seq("--json-extractor", ca.common.jsonExtractor.toString) - Runner - .runOnSpark("io.prediction.workflow.CreateServer", args, ca, jarFiles) + Runner.runOnSpark("io.prediction.workflow.CreateServer", + args, + ca, + jarFiles) } } diff --git a/repos/PredictionIO/tools/src/main/scala/io/prediction/tools/RunWorkflow.scala b/repos/PredictionIO/tools/src/main/scala/io/prediction/tools/RunWorkflow.scala index 4dc27888432..958762977ac 100644 --- a/repos/PredictionIO/tools/src/main/scala/io/prediction/tools/RunWorkflow.scala +++ b/repos/PredictionIO/tools/src/main/scala/io/prediction/tools/RunWorkflow.scala @@ -210,7 +210,9 @@ object RunWorkflow extends Logging { else Seq()) ++ Seq("--json-extractor", ca.common.jsonExtractor.toString) - Runner - .runOnSpark("io.prediction.workflow.CreateWorkflow", args, ca, jarFiles) + Runner.runOnSpark("io.prediction.workflow.CreateWorkflow", + args, + ca, + jarFiles) } } diff --git a/repos/PredictionIO/tools/src/main/scala/io/prediction/tools/console/Console.scala b/repos/PredictionIO/tools/src/main/scala/io/prediction/tools/console/Console.scala index ad221b43760..701d0c4619e 100644 --- a/repos/PredictionIO/tools/src/main/scala/io/prediction/tools/console/Console.scala +++ b/repos/PredictionIO/tools/src/main/scala/io/prediction/tools/console/Console.scala @@ -872,8 +872,9 @@ object Console extends Logging { info(s"Uber JAR enabled. Putting ${core.getName} in lib.") val dst = new File("lib") dst.mkdir() - FileUtils - .copyFileToDirectory(coreAssembly(ca.common.pioHome.get), dst, true) + FileUtils.copyFileToDirectory(coreAssembly(ca.common.pioHome.get), + dst, + true) } else { if (new File("engine.json").exists()) { info(s"Uber JAR disabled. Making sure lib/${core.getName} is absent.") diff --git a/repos/akka/akka-actor-tests/src/test/scala/akka/actor/ActorCreationPerfSpec.scala b/repos/akka/akka-actor-tests/src/test/scala/akka/actor/ActorCreationPerfSpec.scala index f363ec5b732..d520be98103 100644 --- a/repos/akka/akka-actor-tests/src/test/scala/akka/actor/ActorCreationPerfSpec.scala +++ b/repos/akka/akka-actor-tests/src/test/scala/akka/actor/ActorCreationPerfSpec.scala @@ -50,8 +50,8 @@ object ActorCreationPerfSpec { sender() ! Created case WaitForChildren ⇒ context.children.foreach(_ ! IsAlive) - context - .become(waiting(context.children.size, sender()), discardOld = false) + context.become(waiting(context.children.size, sender()), + discardOld = false) } def waiting(number: Int, replyTo: ActorRef): Receive = { @@ -80,8 +80,8 @@ object ActorCreationPerfSpec { sender() ! Created case WaitForChildren ⇒ context.children.foreach(_ ! IsAlive) - context - .become(waiting(context.children.size, sender()), discardOld = false) + context.become(waiting(context.children.size, sender()), + discardOld = false) } def waiting(number: Int, replyTo: ActorRef): Receive = { diff --git a/repos/akka/akka-actor-tests/src/test/scala/akka/actor/ActorDSLSpec.scala b/repos/akka/akka-actor-tests/src/test/scala/akka/actor/ActorDSLSpec.scala index 8410a1bd5ea..50efd9d69a1 100644 --- a/repos/akka/akka-actor-tests/src/test/scala/akka/actor/ActorDSLSpec.scala +++ b/repos/akka/akka-actor-tests/src/test/scala/akka/actor/ActorDSLSpec.scala @@ -87,8 +87,7 @@ class ActorDSLSpec extends AkkaSpec { try { for (_ ← 1 to 1000) i.receiver ! 0 expectNoMsg(1 second) - EventFilter - .warning(start = "dropping message", occurrences = 1) intercept { + EventFilter.warning(start = "dropping message", occurrences = 1) intercept { i.receiver ! 42 } expectMsgType[Warning] diff --git a/repos/akka/akka-actor-tests/src/test/scala/akka/actor/ActorLookupSpec.scala b/repos/akka/akka-actor-tests/src/test/scala/akka/actor/ActorLookupSpec.scala index 939d1c2f1d3..64f0c3b1955 100644 --- a/repos/akka/akka-actor-tests/src/test/scala/akka/actor/ActorLookupSpec.scala +++ b/repos/akka/akka-actor-tests/src/test/scala/akka/actor/ActorLookupSpec.scala @@ -175,8 +175,7 @@ class ActorLookupSpec extends AkkaSpec with DefaultTimeout { "find actors by looking up their path" in { def check(looker: ActorRef, pathOf: ActorRef, result: ActorRef) { - Await - .result(looker ? LookupPath(pathOf.path), timeout.duration) should ===( + Await.result(looker ? LookupPath(pathOf.path), timeout.duration) should ===( result) } for { @@ -238,8 +237,7 @@ class ActorLookupSpec extends AkkaSpec with DefaultTimeout { "find system-generated actors" in { def check(target: ActorRef) { for (looker ← all) { - Await - .result(looker ? LookupPath(target.path), timeout.duration) should ===( + Await.result(looker ? LookupPath(target.path), timeout.duration) should ===( target) Await.result(looker ? LookupString(target.path.toString), timeout.duration) should ===(target) @@ -288,15 +286,13 @@ class ActorLookupSpec extends AkkaSpec with DefaultTimeout { val a = expectMsgType[ActorRef] a.path.elements.head should ===("temp") Await.result(c2 ? LookupPath(a.path), timeout.duration) should ===(a) - Await - .result(c2 ? LookupString(a.path.toString), timeout.duration) should ===( + Await.result(c2 ? LookupString(a.path.toString), timeout.duration) should ===( a) Await.result(c2 ? LookupString(a.path.toStringWithoutAddress), timeout.duration) should ===(a) Await.result(c2 ? LookupString("../../" + a.path.elements.mkString("/")), timeout.duration) should ===(a) - Await - .result(c2 ? LookupString(a.path.toString + "/"), timeout.duration) should ===( + Await.result(c2 ? LookupString(a.path.toString + "/"), timeout.duration) should ===( a) Await.result(c2 ? LookupString(a.path.toStringWithoutAddress + "/"), timeout.duration) should ===(a) diff --git a/repos/akka/akka-actor-tests/src/test/scala/akka/actor/ActorSelectionSpec.scala b/repos/akka/akka-actor-tests/src/test/scala/akka/actor/ActorSelectionSpec.scala index 70399fc2b51..b9bb1a51b2d 100644 --- a/repos/akka/akka-actor-tests/src/test/scala/akka/actor/ActorSelectionSpec.scala +++ b/repos/akka/akka-actor-tests/src/test/scala/akka/actor/ActorSelectionSpec.scala @@ -70,10 +70,11 @@ class ActorSelectionSpec asked.correlationId should ===(selection) implicit val ec = system.dispatcher - val resolved = Await - .result(selection.resolveOne(timeout.duration).mapTo[ActorRef] recover { + val resolved = Await.result( + selection.resolveOne(timeout.duration).mapTo[ActorRef] recover { case _ ⇒ null - }, timeout.duration) + }, + timeout.duration) Option(resolved) should ===(result) result @@ -323,8 +324,7 @@ class ActorSelectionSpec "resolve one actor with explicit timeout" in { val s = system.actorSelection(system / "c2") // Java and Scala API - Await - .result(s.resolveOne(1.second.dilated), timeout.duration) should ===( + Await.result(s.resolveOne(1.second.dilated), timeout.duration) should ===( c2) } diff --git a/repos/akka/akka-actor-tests/src/test/scala/akka/actor/ActorSystemSpec.scala b/repos/akka/akka-actor-tests/src/test/scala/akka/actor/ActorSystemSpec.scala index 31359240647..57888f440bb 100644 --- a/repos/akka/akka-actor-tests/src/test/scala/akka/actor/ActorSystemSpec.scala +++ b/repos/akka/akka-actor-tests/src/test/scala/akka/actor/ActorSystemSpec.scala @@ -121,8 +121,9 @@ object ActorSystemSpec { mbox: Mailbox, hasMessageHint: Boolean, hasSystemMessageHint: Boolean): Boolean = { - val ret = super - .registerForExecution(mbox, hasMessageHint, hasSystemMessageHint) + val ret = super.registerForExecution(mbox, + hasMessageHint, + hasSystemMessageHint) doneIt.switchOn { TestKit.awaitCond(mbox.actor.actor != null, 1.second) mbox.actor.actor match { @@ -305,8 +306,7 @@ class ActorSystemSpec implicit val timeout = Timeout((20 seconds).dilated) val waves = for (i ← 1 to 3) yield system.actorOf(Props[ActorSystemSpec.Waves]) ? 50000 - Await - .result(Future.sequence(waves), timeout.duration + 5.seconds) should ===( + Await.result(Future.sequence(waves), timeout.duration + 5.seconds) should ===( Vector("done", "done", "done")) } diff --git a/repos/akka/akka-actor-tests/src/test/scala/akka/actor/ConsistencySpec.scala b/repos/akka/akka-actor-tests/src/test/scala/akka/actor/ConsistencySpec.scala index 84713b9d04a..59b101cc412 100644 --- a/repos/akka/akka-actor-tests/src/test/scala/akka/actor/ConsistencySpec.scala +++ b/repos/akka/akka-actor-tests/src/test/scala/akka/actor/ConsistencySpec.scala @@ -11,8 +11,7 @@ object ConsistencySpec { val maxThreads = 2000 val factor = 1.5d val threads = - ThreadPoolConfig - .scaledPoolSize(minThreads, factor, maxThreads) // Make sure we have more threads than cores + ThreadPoolConfig.scaledPoolSize(minThreads, factor, maxThreads) // Make sure we have more threads than cores val config = s""" consistency-dispatcher { diff --git a/repos/akka/akka-actor-tests/src/test/scala/akka/actor/DeadLetterSupressionSpec.scala b/repos/akka/akka-actor-tests/src/test/scala/akka/actor/DeadLetterSupressionSpec.scala index 9312e0b8dce..b6b4fc0733e 100644 --- a/repos/akka/akka-actor-tests/src/test/scala/akka/actor/DeadLetterSupressionSpec.scala +++ b/repos/akka/akka-actor-tests/src/test/scala/akka/actor/DeadLetterSupressionSpec.scala @@ -32,8 +32,8 @@ class DeadLetterSupressionSpec extends AkkaSpec with ImplicitSender { system.eventStream.subscribe(deadListener.ref, classOf[DeadLetter]) val suppressedListener = TestProbe() - system.eventStream - .subscribe(suppressedListener.ref, classOf[SuppressedDeadLetter]) + system.eventStream.subscribe(suppressedListener.ref, + classOf[SuppressedDeadLetter]) val allListener = TestProbe() system.eventStream.subscribe(allListener.ref, classOf[AllDeadLetters]) @@ -60,8 +60,8 @@ class DeadLetterSupressionSpec extends AkkaSpec with ImplicitSender { system.eventStream.subscribe(deadListener.ref, classOf[DeadLetter]) val suppressedListener = TestProbe() - system.eventStream - .subscribe(suppressedListener.ref, classOf[SuppressedDeadLetter]) + system.eventStream.subscribe(suppressedListener.ref, + classOf[SuppressedDeadLetter]) val allListener = TestProbe() system.eventStream.subscribe(allListener.ref, classOf[AllDeadLetters]) diff --git a/repos/akka/akka-actor-tests/src/test/scala/akka/actor/FSMActorSpec.scala b/repos/akka/akka-actor-tests/src/test/scala/akka/actor/FSMActorSpec.scala index a92cea5727d..c8b4261cd7a 100644 --- a/repos/akka/akka-actor-tests/src/test/scala/akka/actor/FSMActorSpec.scala +++ b/repos/akka/akka-actor-tests/src/test/scala/akka/actor/FSMActorSpec.scala @@ -143,8 +143,7 @@ class FSMActorSpec Await.ready(transitionCallBackLatch, timeout.duration) Await.ready(lockedLatch, timeout.duration) - EventFilter - .warning(start = "unhandled event", occurrences = 1) intercept { + EventFilter.warning(start = "unhandled event", occurrences = 1) intercept { lock ! "not_handled" Await.ready(unhandledLatch, timeout.duration) } @@ -172,8 +171,7 @@ class FSMActorSpec } }) val name = fsm.path.toString - EventFilter - .error("Next state 2 does not exist", occurrences = 1) intercept { + EventFilter.error("Next state 2 does not exist", occurrences = 1) intercept { system.eventStream.subscribe(testActor, classOf[Logging.Error]) fsm ! "go" expectMsgPF(1 second, hint = "Next state 2 does not exist") { diff --git a/repos/akka/akka-actor-tests/src/test/scala/akka/actor/SchedulerSpec.scala b/repos/akka/akka-actor-tests/src/test/scala/akka/actor/SchedulerSpec.scala index 9e36603abbc..4c83f8f9565 100644 --- a/repos/akka/akka-actor-tests/src/test/scala/akka/actor/SchedulerSpec.scala +++ b/repos/akka/akka-actor-tests/src/test/scala/akka/actor/SchedulerSpec.scala @@ -365,8 +365,10 @@ class LightArrayRevolverSchedulerSpec "reject periodic tasks scheduled too far into the future" in { val maxDelay = tickDuration * Int.MaxValue import system.dispatcher - system.scheduler - .schedule(maxDelay - tickDuration, 1.second, testActor, "OK") + system.scheduler.schedule(maxDelay - tickDuration, + 1.second, + testActor, + "OK") intercept[IllegalArgumentException] { system.scheduler.schedule(maxDelay, 1.second, testActor, "Too far") } @@ -375,8 +377,10 @@ class LightArrayRevolverSchedulerSpec "reject periodic tasks scheduled with too long interval" in { val maxDelay = tickDuration * Int.MaxValue import system.dispatcher - system.scheduler - .schedule(100.millis, maxDelay - tickDuration, testActor, "OK") + system.scheduler.schedule(100.millis, + maxDelay - tickDuration, + testActor, + "OK") expectMsg("OK") intercept[IllegalArgumentException] { system.scheduler.schedule(100.millis, maxDelay, testActor, "Too long") diff --git a/repos/akka/akka-actor-tests/src/test/scala/akka/actor/SupervisorSpec.scala b/repos/akka/akka-actor-tests/src/test/scala/akka/actor/SupervisorSpec.scala index 26af9e3b289..bbd12b562e9 100644 --- a/repos/akka/akka-actor-tests/src/test/scala/akka/actor/SupervisorSpec.scala +++ b/repos/akka/akka-actor-tests/src/test/scala/akka/actor/SupervisorSpec.scala @@ -211,8 +211,7 @@ class SupervisorSpec override def beforeEach() = {} def ping(pingPongActor: ActorRef) = { - Await - .result(pingPongActor.?(Ping)(DilatedTimeout), DilatedTimeout) should ===( + Await.result(pingPongActor.?(Ping)(DilatedTimeout), DilatedTimeout) should ===( PongMessage) expectMsg(Timeout, PingMessage) } diff --git a/repos/akka/akka-actor-tests/src/test/scala/akka/dispatch/FutureSpec.scala b/repos/akka/akka-actor-tests/src/test/scala/akka/dispatch/FutureSpec.scala index 85b12c4ee92..ef274fa75ee 100644 --- a/repos/akka/akka-actor-tests/src/test/scala/akka/dispatch/FutureSpec.scala +++ b/repos/akka/akka-actor-tests/src/test/scala/akka/dispatch/FutureSpec.scala @@ -761,8 +761,7 @@ class FutureSpec Future(()) map { _ ⇒ val nested = Future(()) nested foreach (_ ⇒ l1.open()) - FutureSpec - .ready(l1, TestLatch.DefaultTimeout) // make sure nested is completed + FutureSpec.ready(l1, TestLatch.DefaultTimeout) // make sure nested is completed nested foreach (_ ⇒ l2.open()) FutureSpec.ready(l2, TestLatch.DefaultTimeout) } diff --git a/repos/akka/akka-actor-tests/src/test/scala/akka/io/TcpConnectionSpec.scala b/repos/akka/akka-actor-tests/src/test/scala/akka/io/TcpConnectionSpec.scala index 0102612e823..876d688be1b 100644 --- a/repos/akka/akka-actor-tests/src/test/scala/akka/io/TcpConnectionSpec.scala +++ b/repos/akka/akka-actor-tests/src/test/scala/akka/io/TcpConnectionSpec.scala @@ -101,8 +101,7 @@ class TcpConnectionSpec extends AkkaSpec(""" createConnectionActor(options = Vector(SO.KeepAlive(false))) val clientChannel = connectionActor.underlyingActor.channel clientChannel.socket.getKeepAlive should ===(true) // only set after connection is established - EventFilter - .warning(pattern = "registration timeout", occurrences = 1) intercept { + EventFilter.warning(pattern = "registration timeout", occurrences = 1) intercept { selector.send(connectionActor, ChannelConnectable) clientChannel.socket.getKeepAlive should ===(false) } @@ -647,8 +646,9 @@ class TcpConnectionSpec extends AkkaSpec(""" run { val sel = SelectorProvider.provider().openSelector() try { - val key = clientSideChannel - .register(sel, SelectionKey.OP_CONNECT | SelectionKey.OP_READ) + val key = clientSideChannel.register( + sel, + SelectionKey.OP_CONNECT | SelectionKey.OP_READ) // This timeout should be large enough to work on Windows sel.select(3000) diff --git a/repos/akka/akka-actor-tests/src/test/scala/akka/io/TcpIntegrationSpecSupport.scala b/repos/akka/akka-actor-tests/src/test/scala/akka/io/TcpIntegrationSpecSupport.scala index 0f44b3d99a3..5060032f967 100644 --- a/repos/akka/akka-actor-tests/src/test/scala/akka/io/TcpIntegrationSpecSupport.scala +++ b/repos/akka/akka-actor-tests/src/test/scala/akka/io/TcpIntegrationSpecSupport.scala @@ -21,16 +21,17 @@ trait TcpIntegrationSpecSupport { _: AkkaSpec ⇒ def bindServer(): Unit = { val bindCommander = TestProbe() - bindCommander - .send(IO(Tcp), Bind(bindHandler.ref, endpoint, options = bindOptions)) + bindCommander.send( + IO(Tcp), + Bind(bindHandler.ref, endpoint, options = bindOptions)) bindCommander.expectMsg(Bound(endpoint)) } def establishNewClientConnection( ): (TestProbe, ActorRef, TestProbe, ActorRef) = { val connectCommander = TestProbe() - connectCommander - .send(IO(Tcp), Connect(endpoint, options = connectOptions)) + connectCommander.send(IO(Tcp), + Connect(endpoint, options = connectOptions)) val Connected(`endpoint`, localAddress) = connectCommander.expectMsgType[Connected] val clientHandler = TestProbe() diff --git a/repos/akka/akka-actor-tests/src/test/scala/akka/io/TcpListenerSpec.scala b/repos/akka/akka-actor-tests/src/test/scala/akka/io/TcpListenerSpec.scala index fd640b29d19..198a0a2bee7 100644 --- a/repos/akka/akka-actor-tests/src/test/scala/akka/io/TcpListenerSpec.scala +++ b/repos/akka/akka-actor-tests/src/test/scala/akka/io/TcpListenerSpec.scala @@ -126,8 +126,7 @@ class TcpListenerSpec extends AkkaSpec(""" listener ! ChannelAcceptable val channel = expectWorkerForCommand - EventFilter - .warning(pattern = "selector capacity limit", occurrences = 1) intercept { + EventFilter.warning(pattern = "selector capacity limit", occurrences = 1) intercept { listener ! FailedRegisterIncoming(channel) awaitCond(!channel.isOpen) } diff --git a/repos/akka/akka-actor-tests/src/test/scala/akka/pattern/CircuitBreakerSpec.scala b/repos/akka/akka-actor-tests/src/test/scala/akka/pattern/CircuitBreakerSpec.scala index 908fddc159d..037d187acde 100644 --- a/repos/akka/akka-actor-tests/src/test/scala/akka/pattern/CircuitBreakerSpec.scala +++ b/repos/akka/akka-actor-tests/src/test/scala/akka/pattern/CircuitBreakerSpec.scala @@ -209,8 +209,7 @@ class CircuitBreakerSpec extends AkkaSpec with BeforeAndAfter { val breaker = CircuitBreakerSpec.shortResetTimeoutCb() breaker().withCircuitBreaker(Future(throwException)) checkLatch(breaker.halfOpenLatch) - Await - .result(breaker().withCircuitBreaker(Future(sayHi)), awaitTimeout) should ===( + Await.result(breaker().withCircuitBreaker(Future(sayHi)), awaitTimeout) should ===( "hi") checkLatch(breaker.closedLatch) } @@ -239,8 +238,7 @@ class CircuitBreakerSpec extends AkkaSpec with BeforeAndAfter { "An asynchronous circuit breaker that is closed" must { "allow calls through" in { val breaker = CircuitBreakerSpec.longCallTimeoutCb() - Await - .result(breaker().withCircuitBreaker(Future(sayHi)), awaitTimeout) should ===( + Await.result(breaker().withCircuitBreaker(Future(sayHi)), awaitTimeout) should ===( "hi") } diff --git a/repos/akka/akka-actor-tests/src/test/scala/akka/routing/MetricsBasedResizerSpec.scala b/repos/akka/akka-actor-tests/src/test/scala/akka/routing/MetricsBasedResizerSpec.scala index c0a8deb712f..9fd4bba2d63 100644 --- a/repos/akka/akka-actor-tests/src/test/scala/akka/routing/MetricsBasedResizerSpec.scala +++ b/repos/akka/akka-actor-tests/src/test/scala/akka/routing/MetricsBasedResizerSpec.scala @@ -253,8 +253,7 @@ class MetricsBasedResizerSpec router.sendToAll(await = false) //make sure the routees are still busy after the first batch of messages get processed. val before = LocalDateTime.now - resizer - .reportMessageCount(router.routees, router.msgs.size) //updates the records + resizer.reportMessageCount(router.routees, router.msgs.size) //updates the records msgs1.foreach(_.second.open()) //process two messages @@ -283,8 +282,7 @@ class MetricsBasedResizerSpec router.sendToAll(await = false) //make sure the routees are still busy after the first batch of messages get processed. val before = LocalDateTime.now - resizer - .reportMessageCount(router.routees, router.msgs.size) //updates the records + resizer.reportMessageCount(router.routees, router.msgs.size) //updates the records msgs1.foreach(_.second.open()) //process two messages diff --git a/repos/akka/akka-actor/src/main/scala/akka/actor/ActorRefProvider.scala b/repos/akka/akka-actor/src/main/scala/akka/actor/ActorRefProvider.scala index fc358e48b2c..cb1d55894c6 100644 --- a/repos/akka/akka-actor/src/main/scala/akka/actor/ActorRefProvider.scala +++ b/repos/akka/akka-actor/src/main/scala/akka/actor/ActorRefProvider.scala @@ -753,8 +753,8 @@ private[akka] class LocalActorRefProvider private[akka] ( } else ref.getChild(path.iterator) match { case Nobody ⇒ - log - .debug("look-up of path sequence [/{}] failed", path.mkString("/")) + log.debug("look-up of path sequence [/{}] failed", + path.mkString("/")) new EmptyLocalActorRef(system.provider, ref.path / path, eventStream) case x ⇒ x } diff --git a/repos/akka/akka-actor/src/main/scala/akka/actor/ActorSystem.scala b/repos/akka/akka-actor/src/main/scala/akka/actor/ActorSystem.scala index 7917e96bc3e..1ea027c182e 100644 --- a/repos/akka/akka-actor/src/main/scala/akka/actor/ActorSystem.scala +++ b/repos/akka/akka-actor/src/main/scala/akka/actor/ActorSystem.scala @@ -870,14 +870,12 @@ private[akka] class ActorSystemImpl( "Extension instance created as 'null' for extension [" + ext + "]") case instance ⇒ - extensions - .replace(ext, inProcessOfRegistration, instance) //Replace our in process signal with the initialized extension + extensions.replace(ext, inProcessOfRegistration, instance) //Replace our in process signal with the initialized extension instance //Profit! } } catch { case t: Throwable ⇒ - extensions - .replace(ext, inProcessOfRegistration, t) //In case shit hits the fan, remove the inProcess signal + extensions.replace(ext, inProcessOfRegistration, t) //In case shit hits the fan, remove the inProcess signal throw t //Escalate to caller } finally { inProcessOfRegistration.countDown //Always notify listeners of the inProcess signal @@ -939,8 +937,9 @@ private[akka] class ActorSystemImpl( case _ ⇒ "" }) + " " + (cell.childrenRefs match { - case ChildrenContainer - .TerminatingChildrenContainer(_, toDie, reason) ⇒ + case ChildrenContainer.TerminatingChildrenContainer(_, + toDie, + reason) ⇒ "Terminating(" + reason + ")" + (toDie.toSeq.sorted mkString ("\n" + indent + " | toDie: ", diff --git a/repos/akka/akka-actor/src/main/scala/akka/actor/TypedActor.scala b/repos/akka/akka-actor/src/main/scala/akka/actor/TypedActor.scala index 9203598457c..916eedfe9c2 100644 --- a/repos/akka/akka-actor/src/main/scala/akka/actor/TypedActor.scala +++ b/repos/akka/akka-actor/src/main/scala/akka/actor/TypedActor.scala @@ -94,8 +94,9 @@ trait TypedActorFactory { val ap = Props(new akka.actor.TypedActor.TypedActor[R, T](proxyVar, c(), i)) .withDeploy(props.actorProps.deploy) - typedActor - .createActorRefProxy(props, proxyVar, actorFactory.actorOf(ap, name)) + typedActor.createActorRefProxy(props, + proxyVar, + actorFactory.actorOf(ap, name)) } /** diff --git a/repos/akka/akka-actor/src/main/scala/akka/actor/dungeon/ReceiveTimeout.scala b/repos/akka/akka-actor/src/main/scala/akka/actor/dungeon/ReceiveTimeout.scala index d06937a5b57..ee9c6aa7b48 100644 --- a/repos/akka/akka-actor/src/main/scala/akka/actor/dungeon/ReceiveTimeout.scala +++ b/repos/akka/akka-actor/src/main/scala/akka/actor/dungeon/ReceiveTimeout.scala @@ -35,8 +35,9 @@ private[akka] trait ReceiveTimeout { this: ActorCell ⇒ recvtimeout._1 match { case f: FiniteDuration ⇒ recvtimeout._2.cancel() //Cancel any ongoing future - val task = system.scheduler - .scheduleOnce(f, self, akka.actor.ReceiveTimeout)(this.dispatcher) + val task = + system.scheduler.scheduleOnce(f, self, akka.actor.ReceiveTimeout)( + this.dispatcher) receiveTimeoutData = (f, task) case _ ⇒ cancelReceiveTimeout() } else cancelReceiveTimeout() diff --git a/repos/akka/akka-actor/src/main/scala/akka/dispatch/AbstractDispatcher.scala b/repos/akka/akka-actor/src/main/scala/akka/dispatch/AbstractDispatcher.scala index db21e06eee4..dfb49ba23db 100644 --- a/repos/akka/akka-actor/src/main/scala/akka/dispatch/AbstractDispatcher.scala +++ b/repos/akka/akka-actor/src/main/scala/akka/dispatch/AbstractDispatcher.scala @@ -130,8 +130,10 @@ abstract class MessageDispatcher( private final def shutdownSchedule: Int = Unsafe.instance.getIntVolatile(this, shutdownScheduleOffset) private final def updateShutdownSchedule(expect: Int, update: Int): Boolean = - Unsafe.instance - .compareAndSwapInt(this, shutdownScheduleOffset, expect, update) + Unsafe.instance.compareAndSwapInt(this, + shutdownScheduleOffset, + expect, + update) /** * Creates and returns a mailbox for the given actor. diff --git a/repos/akka/akka-actor/src/main/scala/akka/dispatch/Mailbox.scala b/repos/akka/akka-actor/src/main/scala/akka/dispatch/Mailbox.scala index 1feb94eda01..a6bd97fef6b 100644 --- a/repos/akka/akka-actor/src/main/scala/akka/dispatch/Mailbox.scala +++ b/repos/akka/akka-actor/src/main/scala/akka/dispatch/Mailbox.scala @@ -156,8 +156,9 @@ private[akka] abstract class Mailbox(val messageQueue: MessageQueue) @inline protected final def setStatus(newStatus: Status): Unit = - Unsafe.instance - .putIntVolatile(this, AbstractMailbox.mailboxStatusOffset, newStatus) + Unsafe.instance.putIntVolatile(this, + AbstractMailbox.mailboxStatusOffset, + newStatus) /** * Reduce the suspend count by one. Caller does not need to worry about whether diff --git a/repos/akka/akka-actor/src/main/scala/akka/io/SimpleDnsManager.scala b/repos/akka/akka-actor/src/main/scala/akka/io/SimpleDnsManager.scala index 8c65a51f8c3..8084618f584 100644 --- a/repos/akka/akka-actor/src/main/scala/akka/io/SimpleDnsManager.scala +++ b/repos/akka/akka-actor/src/main/scala/akka/io/SimpleDnsManager.scala @@ -29,11 +29,13 @@ class SimpleDnsManager(val ext: DnsExt) private val cleanupTimer = cacheCleanup map { _ ⇒ val interval = Duration( - ext.Settings.ResolverConfig - .getDuration("cache-cleanup-interval", TimeUnit.MILLISECONDS), + ext.Settings.ResolverConfig.getDuration("cache-cleanup-interval", + TimeUnit.MILLISECONDS), TimeUnit.MILLISECONDS) - system.scheduler - .schedule(interval, interval, self, SimpleDnsManager.CacheCleanup) + system.scheduler.schedule(interval, + interval, + self, + SimpleDnsManager.CacheCleanup) } override def receive = { diff --git a/repos/akka/akka-actor/src/main/scala/akka/io/TcpListener.scala b/repos/akka/akka-actor/src/main/scala/akka/io/TcpListener.scala index 28afce14384..cc15af551cb 100644 --- a/repos/akka/akka-actor/src/main/scala/akka/io/TcpListener.scala +++ b/repos/akka/akka-actor/src/main/scala/akka/io/TcpListener.scala @@ -59,8 +59,8 @@ private[io] class TcpListener(selectorRouter: ActorRef, throw new IllegalArgumentException( s"bound to unknown SocketAddress [$x]") } - channelRegistry - .register(channel, if (bind.pullMode) 0 else SelectionKey.OP_ACCEPT) + channelRegistry.register(channel, + if (bind.pullMode) 0 else SelectionKey.OP_ACCEPT) log.debug("Successfully bound to {}", ret) bind.options.foreach { case o: Inet.SocketOptionV2 ⇒ o.afterBind(channel.socket) diff --git a/repos/akka/akka-actor/src/main/scala/akka/io/UdpListener.scala b/repos/akka/akka-actor/src/main/scala/akka/io/UdpListener.scala index 3fc5b354169..e737cca1ced 100644 --- a/repos/akka/akka-actor/src/main/scala/akka/io/UdpListener.scala +++ b/repos/akka/akka-actor/src/main/scala/akka/io/UdpListener.scala @@ -84,8 +84,8 @@ private[io] class UdpListener(val udp: UdpExt, try { channel.close() sender() ! Unbound - log - .debug("Unbound endpoint [{}], stopping listener", bind.localAddress) + log.debug("Unbound endpoint [{}], stopping listener", + bind.localAddress) } finally context.stop(self) } diff --git a/repos/akka/akka-actor/src/main/scala/akka/pattern/AskSupport.scala b/repos/akka/akka-actor/src/main/scala/akka/pattern/AskSupport.scala index 88f4b180533..35c2793865f 100644 --- a/repos/akka/akka-actor/src/main/scala/akka/pattern/AskSupport.scala +++ b/repos/akka/akka-actor/src/main/scala/akka/pattern/AskSupport.scala @@ -558,8 +558,10 @@ private[akka] final class PromiseActorRef private ( @inline private[this] def updateWatchedBy(oldWatchedBy: Set[ActorRef], newWatchedBy: Set[ActorRef]): Boolean = - Unsafe.instance - .compareAndSwapObject(this, watchedByOffset, oldWatchedBy, newWatchedBy) + Unsafe.instance.compareAndSwapObject(this, + watchedByOffset, + oldWatchedBy, + newWatchedBy) @tailrec // Returns false if the Promise is already completed private[this] final def addWatcher(watcher: ActorRef): Boolean = diff --git a/repos/akka/akka-actor/src/main/scala/akka/pattern/BackoffOnRestartSupervisor.scala b/repos/akka/akka-actor/src/main/scala/akka/pattern/BackoffOnRestartSupervisor.scala index 8d41e560328..18fec1bbaa2 100644 --- a/repos/akka/akka-actor/src/main/scala/akka/pattern/BackoffOnRestartSupervisor.scala +++ b/repos/akka/akka-actor/src/main/scala/akka/pattern/BackoffOnRestartSupervisor.scala @@ -53,8 +53,10 @@ private class BackoffOnRestartSupervisor(val childProps: Props, case Terminated(`childRef`) ⇒ become(receive) child = None - val restartDelay = BackoffSupervisor - .calculateDelay(restartCount, minBackoff, maxBackoff, randomFactor) + val restartDelay = BackoffSupervisor.calculateDelay(restartCount, + minBackoff, + maxBackoff, + randomFactor) context.system.scheduler .scheduleOnce(restartDelay, self, BackoffSupervisor.StartChild) restartCount += 1 diff --git a/repos/akka/akka-actor/src/main/scala/akka/routing/ConsistentHashing.scala b/repos/akka/akka-actor/src/main/scala/akka/routing/ConsistentHashing.scala index f4f7f101f60..6eb031751f8 100644 --- a/repos/akka/akka-actor/src/main/scala/akka/routing/ConsistentHashing.scala +++ b/repos/akka/akka-actor/src/main/scala/akka/routing/ConsistentHashing.scala @@ -201,8 +201,8 @@ final case class ConsistentHashingRoutingLogic( ConsistentHash(routees.map(ConsistentRoutee(_, selfAddress)), vnodes) // re-hash // ignore, don't update, in case of CAS failure - consistentHashRef - .compareAndSet(oldConsistentHashTuple, (routees, consistentHash)) + consistentHashRef.compareAndSet(oldConsistentHashTuple, + (routees, consistentHash)) consistentHash } else oldConsistentHash } diff --git a/repos/akka/akka-actor/src/main/scala/akka/routing/OptimalSizeExploringResizer.scala b/repos/akka/akka-actor/src/main/scala/akka/routing/OptimalSizeExploringResizer.scala index 883dc6c3564..61921c2d0cb 100644 --- a/repos/akka/akka-actor/src/main/scala/akka/routing/OptimalSizeExploringResizer.scala +++ b/repos/akka/akka-actor/src/main/scala/akka/routing/OptimalSizeExploringResizer.scala @@ -313,8 +313,9 @@ case class DefaultOptimalSizeExploringResizer( } private def explore(currentSize: PoolSize): Int = { - val change = Math - .max(1, random.nextInt(Math.ceil(currentSize * exploreStepSize).toInt)) + val change = Math.max( + 1, + random.nextInt(Math.ceil(currentSize * exploreStepSize).toInt)) if (random.nextDouble() < chanceOfScalingDownWhenFull) -change else change } diff --git a/repos/akka/akka-actor/src/main/scala/akka/util/Index.scala b/repos/akka/akka-actor/src/main/scala/akka/util/Index.scala index 4ea56f35172..c3cb4086e34 100644 --- a/repos/akka/akka-actor/src/main/scala/akka/util/Index.scala +++ b/repos/akka/akka-actor/src/main/scala/akka/util/Index.scala @@ -133,8 +133,7 @@ class Index[K, V](val mapSize: Int, val valueComparator: Comparator[V]) { if (set.remove(value)) { //If we can remove the value if (set.isEmpty) //and the set becomes empty - container - .remove(key, emptySet) //We try to remove the key if it's mapped to an empty set + container.remove(key, emptySet) //We try to remove the key if it's mapped to an empty set true //Remove succeeded } else false //Remove failed @@ -174,8 +173,7 @@ class Index[K, V](val mapSize: Int, val valueComparator: Comparator[V]) { if (set.remove(value)) { //If we can remove the value if (set.isEmpty) //and the set becomes empty - container - .remove(e.getKey, emptySet) //We try to remove the key if it's mapped to an empty set + container.remove(e.getKey, emptySet) //We try to remove the key if it's mapped to an empty set } } } diff --git a/repos/akka/akka-bench-jmh/src/main/scala/akka/persistence/PersistenceActorDeferBenchmark.scala b/repos/akka/akka-bench-jmh/src/main/scala/akka/persistence/PersistenceActorDeferBenchmark.scala index f6b34533ec3..3371e10f659 100644 --- a/repos/akka/akka-bench-jmh/src/main/scala/akka/persistence/PersistenceActorDeferBenchmark.scala +++ b/repos/akka/akka-bench-jmh/src/main/scala/akka/persistence/PersistenceActorDeferBenchmark.scala @@ -50,8 +50,9 @@ class PersistentActorDeferBenchmark { probe = TestProbe()(system) storageLocations.foreach(FileUtils.deleteDirectory) - persistAsync_defer = system - .actorOf(Props(classOf[`persistAsync, defer`], data10k.last), "a-1") + persistAsync_defer = system.actorOf( + Props(classOf[`persistAsync, defer`], data10k.last), + "a-1") persistAsync_defer_replyASAP = system.actorOf( Props(classOf[`persistAsync, defer, respond ASAP`], data10k.last), "a-2") diff --git a/repos/akka/akka-bench-jmh/src/main/scala/akka/persistence/PersistentActorBenchmark.scala b/repos/akka/akka-bench-jmh/src/main/scala/akka/persistence/PersistentActorBenchmark.scala index 049116dc660..a223adc10fc 100644 --- a/repos/akka/akka-bench-jmh/src/main/scala/akka/persistence/PersistentActorBenchmark.scala +++ b/repos/akka/akka-bench-jmh/src/main/scala/akka/persistence/PersistentActorBenchmark.scala @@ -45,10 +45,12 @@ class PersistentActorThroughputBenchmark { actor = system.actorOf(Props(classOf[BaselineActor], data10k.last), "a-1") - noPersistPersistentActor = system - .actorOf(Props(classOf[NoPersistPersistentActor], data10k.last), "nop-1") - persistPersistentActor = system - .actorOf(Props(classOf[PersistPersistentActor], data10k.last), "ep-1") + noPersistPersistentActor = system.actorOf( + Props(classOf[NoPersistPersistentActor], data10k.last), + "nop-1") + persistPersistentActor = system.actorOf( + Props(classOf[PersistPersistentActor], data10k.last), + "ep-1") persistAsync1PersistentActor = system.actorOf( Props(classOf[PersistAsyncPersistentActor], data10k.last), "epa-1") diff --git a/repos/akka/akka-camel/src/main/scala/akka/camel/internal/component/ActorComponent.scala b/repos/akka/akka-camel/src/main/scala/akka/camel/internal/component/ActorComponent.scala index a97f0dd4951..6391a8eee3f 100644 --- a/repos/akka/akka-camel/src/main/scala/akka/camel/internal/component/ActorComponent.scala +++ b/repos/akka/akka-camel/src/main/scala/akka/camel/internal/component/ActorComponent.scala @@ -292,8 +292,9 @@ object CamelPath { def toUri(actorRef: ActorRef, autoAck: Boolean, replyTimeout: Duration): String = - "%s?autoAck=%s&replyTimeout=%s" - .format(actorRef.path.toString, autoAck, replyTimeout.toString) + "%s?autoAck=%s&replyTimeout=%s".format(actorRef.path.toString, + autoAck, + replyTimeout.toString) } /** diff --git a/repos/akka/akka-cluster-metrics/src/main/scala/akka/cluster/metrics/ClusterMetricsRouting.scala b/repos/akka/akka-cluster-metrics/src/main/scala/akka/cluster/metrics/ClusterMetricsRouting.scala index e58bacbb215..b67fde0ffa6 100644 --- a/repos/akka/akka-cluster-metrics/src/main/scala/akka/cluster/metrics/ClusterMetricsRouting.scala +++ b/repos/akka/akka-cluster-metrics/src/main/scala/akka/cluster/metrics/ClusterMetricsRouting.scala @@ -79,8 +79,9 @@ final case class AdaptiveLoadBalancingRoutingLogic( cluster.selfAddress, metricsSelector.weights(oldMetrics))) // ignore, don't update, in case of CAS failure - weightedRouteesRef - .compareAndSet(oldValue, (routees, oldMetrics, weightedRoutees)) + weightedRouteesRef.compareAndSet( + oldValue, + (routees, oldMetrics, weightedRoutees)) weightedRoutees } else oldWeightedRoutees } diff --git a/repos/akka/akka-cluster-metrics/src/main/scala/akka/cluster/metrics/MetricsCollector.scala b/repos/akka/akka-cluster-metrics/src/main/scala/akka/cluster/metrics/MetricsCollector.scala index 52dde753f6f..1b02b9d2bbf 100644 --- a/repos/akka/akka-cluster-metrics/src/main/scala/akka/cluster/metrics/MetricsCollector.scala +++ b/repos/akka/akka-cluster-metrics/src/main/scala/akka/cluster/metrics/MetricsCollector.scala @@ -182,8 +182,9 @@ class JmxMetricsCollector(address: Address, decayFactor: Double) * Creates a new instance each time. */ def heapMax(heap: MemoryUsage): Option[Metric] = - Metric - .create(name = HeapMemoryMax, value = heap.getMax, decayFactor = None) + Metric.create(name = HeapMemoryMax, + value = heap.getMax, + decayFactor = None) override def close(): Unit = () } diff --git a/repos/akka/akka-cluster-metrics/src/multi-jvm/scala/akka/cluster/metrics/ClusterMetricsRoutingSpec.scala b/repos/akka/akka-cluster-metrics/src/multi-jvm/scala/akka/cluster/metrics/ClusterMetricsRoutingSpec.scala index 5d1719a9759..518d0fdc1c7 100644 --- a/repos/akka/akka-cluster-metrics/src/multi-jvm/scala/akka/cluster/metrics/ClusterMetricsRoutingSpec.scala +++ b/repos/akka/akka-cluster-metrics/src/multi-jvm/scala/akka/cluster/metrics/ClusterMetricsRoutingSpec.scala @@ -38,8 +38,9 @@ object AdaptiveLoadBalancingRouterConfig extends MultiNodeConfig { // getMax can be undefined (-1) val max = math.max(heap.getMax, heap.getCommitted) val used = heap.getUsed - log - .info("used heap before: [{}] bytes, of max [{}]", used, heap.getMax) + log.info("used heap before: [{}] bytes, of max [{}]", + used, + heap.getMax) // allocate 70% of free space val allocateBytes = (0.7 * (max - used)).toInt val numberOfArrays = allocateBytes / 1024 diff --git a/repos/akka/akka-cluster-sharding/src/main/scala/akka/cluster/sharding/ShardCoordinator.scala b/repos/akka/akka-cluster-sharding/src/main/scala/akka/cluster/sharding/ShardCoordinator.scala index da804320502..bcca175103c 100644 --- a/repos/akka/akka-cluster-sharding/src/main/scala/akka/cluster/sharding/ShardCoordinator.scala +++ b/repos/akka/akka-cluster-sharding/src/main/scala/akka/cluster/sharding/ShardCoordinator.scala @@ -498,8 +498,10 @@ abstract class ShardCoordinator( var regionTerminationInProgress = Set.empty[ActorRef] import context.dispatcher - val rebalanceTask = context.system.scheduler - .schedule(rebalanceInterval, rebalanceInterval, self, RebalanceTick) + val rebalanceTask = context.system.scheduler.schedule(rebalanceInterval, + rebalanceInterval, + self, + RebalanceTick) cluster.subscribe(self, initialStateMode = InitialStateAsEvents, diff --git a/repos/akka/akka-cluster-sharding/src/multi-jvm/scala/akka/cluster/sharding/ClusterShardingSpec.scala b/repos/akka/akka-cluster-sharding/src/multi-jvm/scala/akka/cluster/sharding/ClusterShardingSpec.scala index 3bc1207f65f..a6b108a4800 100644 --- a/repos/akka/akka-cluster-sharding/src/multi-jvm/scala/akka/cluster/sharding/ClusterShardingSpec.scala +++ b/repos/akka/akka-cluster-sharding/src/multi-jvm/scala/akka/cluster/sharding/ClusterShardingSpec.scala @@ -271,8 +271,10 @@ abstract class ClusterShardingSpec(config: ClusterShardingSpecConfig) if (settings.stateStoreMode == "persistence") ShardCoordinator.props(typeName, settings, allocationStrategy) else - ShardCoordinator - .props(typeName, settings, allocationStrategy, replicator) + ShardCoordinator.props(typeName, + settings, + allocationStrategy, + replicator) } List("counter", diff --git a/repos/akka/akka-cluster-tools/src/main/scala/akka/cluster/client/ClusterClient.scala b/repos/akka/akka-cluster-tools/src/main/scala/akka/cluster/client/ClusterClient.scala index b3754f3d0b7..2613d1fed2d 100644 --- a/repos/akka/akka-cluster-tools/src/main/scala/akka/cluster/client/ClusterClient.scala +++ b/repos/akka/akka-cluster-tools/src/main/scala/akka/cluster/client/ClusterClient.scala @@ -294,8 +294,10 @@ final class ClusterClient(settings: ClusterClientSettings) sendGetContacts() import context.dispatcher - val heartbeatTask = context.system.scheduler - .schedule(heartbeatInterval, heartbeatInterval, self, HeartbeatTick) + val heartbeatTask = context.system.scheduler.schedule(heartbeatInterval, + heartbeatInterval, + self, + HeartbeatTick) var refreshContactsTask: Option[Cancellable] = None scheduleRefreshContactsTick(establishingGetContactsInterval) self ! RefreshContactsTick @@ -355,8 +357,9 @@ final class ClusterClient(settings: ClusterClientSettings) def active(receptionist: ActorRef): Actor.Receive = { case Send(path, msg, localAffinity) ⇒ - receptionist forward DistributedPubSubMediator - .Send(path, msg, localAffinity) + receptionist forward DistributedPubSubMediator.Send(path, + msg, + localAffinity) case SendToAll(path, msg) ⇒ receptionist forward DistributedPubSubMediator.SendToAll(path, msg) case Publish(topic, msg) ⇒ diff --git a/repos/akka/akka-cluster-tools/src/main/scala/akka/cluster/pubsub/DistributedPubSubMediator.scala b/repos/akka/akka-cluster-tools/src/main/scala/akka/cluster/pubsub/DistributedPubSubMediator.scala index e68ed315a3d..a1b535524bd 100644 --- a/repos/akka/akka-cluster-tools/src/main/scala/akka/cluster/pubsub/DistributedPubSubMediator.scala +++ b/repos/akka/akka-cluster-tools/src/main/scala/akka/cluster/pubsub/DistributedPubSubMediator.scala @@ -326,8 +326,10 @@ object DistributedPubSubMediator { trait TopicLike extends Actor { import context.dispatcher val pruneInterval: FiniteDuration = emptyTimeToLive / 2 - val pruneTask = context.system.scheduler - .schedule(pruneInterval, pruneInterval, self, Prune) + val pruneTask = context.system.scheduler.schedule(pruneInterval, + pruneInterval, + self, + Prune) var pruneDeadline: Option[Deadline] = None var subscribers = Set.empty[ActorRef] @@ -546,11 +548,15 @@ class DistributedPubSubMediator(settings: DistributedPubSubSettings) //Start periodic gossip to random nodes in cluster import context.dispatcher - val gossipTask = context.system.scheduler - .schedule(gossipInterval, gossipInterval, self, GossipTick) + val gossipTask = context.system.scheduler.schedule(gossipInterval, + gossipInterval, + self, + GossipTick) val pruneInterval: FiniteDuration = removedTimeToLive / 2 - val pruneTask = context.system.scheduler - .schedule(pruneInterval, pruneInterval, self, Prune) + val pruneTask = context.system.scheduler.schedule(pruneInterval, + pruneInterval, + self, + Prune) var registry: Map[Address, Bucket] = Map.empty.withDefault(a ⇒ Bucket(a, 0L, TreeMap.empty)) diff --git a/repos/akka/akka-cluster-tools/src/main/scala/akka/cluster/singleton/ClusterSingletonManager.scala b/repos/akka/akka-cluster-tools/src/main/scala/akka/cluster/singleton/ClusterSingletonManager.scala index 2f0ad319f2d..9f21ca049fe 100644 --- a/repos/akka/akka-cluster-tools/src/main/scala/akka/cluster/singleton/ClusterSingletonManager.scala +++ b/repos/akka/akka-cluster-tools/src/main/scala/akka/cluster/singleton/ClusterSingletonManager.scala @@ -627,9 +627,10 @@ class ClusterSingletonManager(singletonProps: Props, def scheduleDelayedMemberRemoved(m: Member): Unit = { if (removalMargin > Duration.Zero) { log.debug("Schedule DelayedMemberRemoved for [{}]", m.address) - context.system.scheduler - .scheduleOnce(removalMargin, self, DelayedMemberRemoved(m))( - context.dispatcher) + context.system.scheduler.scheduleOnce( + removalMargin, + self, + DelayedMemberRemoved(m))(context.dispatcher) } else self ! DelayedMemberRemoved(m) } diff --git a/repos/akka/akka-cluster-tools/src/test/scala/akka/cluster/pubsub/DistributedPubSubMediatorRouterSpec.scala b/repos/akka/akka-cluster-tools/src/test/scala/akka/cluster/pubsub/DistributedPubSubMediatorRouterSpec.scala index 1ab8a8f124c..25615f8fd96 100644 --- a/repos/akka/akka-cluster-tools/src/test/scala/akka/cluster/pubsub/DistributedPubSubMediatorRouterSpec.scala +++ b/repos/akka/akka-cluster-tools/src/test/scala/akka/cluster/pubsub/DistributedPubSubMediatorRouterSpec.scala @@ -81,8 +81,10 @@ trait DistributedPubSubMediatorRouterSpec { testActor) expectMsgClass(classOf[DistributedPubSubMediator.SubscribeAck]) - mediator ! DistributedPubSubMediator - .Publish("topic", msg, sendOneMessageToEachGroup = true) + mediator ! DistributedPubSubMediator.Publish("topic", + msg, + sendOneMessageToEachGroup = + true) expectMsg(msg) mediator ! DistributedPubSubMediator.Unsubscribe("topic", testActor) diff --git a/repos/akka/akka-cluster/src/main/scala/akka/cluster/AutoDown.scala b/repos/akka/akka-cluster/src/main/scala/akka/cluster/AutoDown.scala index dd26dc36fd3..5ba48d83e53 100644 --- a/repos/akka/akka-cluster/src/main/scala/akka/cluster/AutoDown.scala +++ b/repos/akka/akka-cluster/src/main/scala/akka/cluster/AutoDown.scala @@ -125,8 +125,9 @@ private[cluster] abstract class AutoDownBase( if (autoDownUnreachableAfter == Duration.Zero) { downOrAddPending(node) } else { - val task = scheduler - .scheduleOnce(autoDownUnreachableAfter, self, UnreachableTimeout(node)) + val task = scheduler.scheduleOnce(autoDownUnreachableAfter, + self, + UnreachableTimeout(node)) scheduledUnreachable += (node -> task) } } diff --git a/repos/akka/akka-cluster/src/main/scala/akka/cluster/Cluster.scala b/repos/akka/akka-cluster/src/main/scala/akka/cluster/Cluster.scala index f39867569f7..da5af2d2680 100644 --- a/repos/akka/akka-cluster/src/main/scala/akka/cluster/Cluster.scala +++ b/repos/akka/akka-cluster/src/main/scala/akka/cluster/Cluster.scala @@ -251,8 +251,9 @@ class Cluster(val system: ExtendedActorSystem) extends Extension { require( to.forall(classOf[ClusterDomainEvent].isAssignableFrom), s"subscribe to `akka.cluster.ClusterEvent.ClusterDomainEvent` or subclasses, was [${to.map(_.getName).mkString(", ")}]") - clusterCore ! InternalClusterAction - .Subscribe(subscriber, initialStateMode, to.toSet) + clusterCore ! InternalClusterAction.Subscribe(subscriber, + initialStateMode, + to.toSet) } /** diff --git a/repos/akka/akka-cluster/src/main/scala/akka/cluster/ClusterDaemon.scala b/repos/akka/akka-cluster/src/main/scala/akka/cluster/ClusterDaemon.scala index 512b1bc3fec..9e2086fb468 100644 --- a/repos/akka/akka-cluster/src/main/scala/akka/cluster/ClusterDaemon.scala +++ b/repos/akka/akka-cluster/src/main/scala/akka/cluster/ClusterDaemon.scala @@ -693,8 +693,8 @@ private[cluster] class ClusterCoreDaemon(publisher: ActorRef) def receiveGossipStatus(status: GossipStatus): Unit = { val from = status.from - if (!latestGossip.overview.reachability - .isReachable(selfUniqueAddress, from)) + if (!latestGossip.overview.reachability.isReachable(selfUniqueAddress, + from)) logInfo("Ignoring received gossip status from unreachable [{}] ", from) else if (latestGossip.members.forall(_.uniqueAddress != from)) log.debug( @@ -741,8 +741,9 @@ private[cluster] class ClusterCoreDaemon(publisher: ActorRef) from.address, envelope.to) Ignored - } else if (!localGossip.overview.reachability - .isReachable(selfUniqueAddress, from)) { + } else if (!localGossip.overview.reachability.isReachable( + selfUniqueAddress, + from)) { logInfo("Ignoring received gossip from unreachable [{}] ", from) Ignored } else if (localGossip.members.forall(_.uniqueAddress != from)) { diff --git a/repos/akka/akka-cluster/src/main/scala/akka/cluster/ClusterMetricsCollector.scala b/repos/akka/akka-cluster/src/main/scala/akka/cluster/ClusterMetricsCollector.scala index c8bbc2542bf..cc57c961bd8 100644 --- a/repos/akka/akka-cluster/src/main/scala/akka/cluster/ClusterMetricsCollector.scala +++ b/repos/akka/akka-cluster/src/main/scala/akka/cluster/ClusterMetricsCollector.scala @@ -751,8 +751,9 @@ class JmxMetricsCollector(address: Address, decayFactor: Double) * Creates a new instance each time. */ def heapMax(heap: MemoryUsage): Option[Metric] = - Metric - .create(name = HeapMemoryMax, value = heap.getMax, decayFactor = None) + Metric.create(name = HeapMemoryMax, + value = heap.getMax, + decayFactor = None) override def close(): Unit = () } diff --git a/repos/akka/akka-cluster/src/main/scala/akka/cluster/Gossip.scala b/repos/akka/akka-cluster/src/main/scala/akka/cluster/Gossip.scala index 9df3c8ed1fc..12778d3f9aa 100644 --- a/repos/akka/akka-cluster/src/main/scala/akka/cluster/Gossip.scala +++ b/repos/akka/akka-cluster/src/main/scala/akka/cluster/Gossip.scala @@ -225,8 +225,7 @@ private[cluster] final case class Gossip( def isSingletonCluster: Boolean = members.size == 1 def member(node: UniqueAddress): Member = { - membersMap - .getOrElse(node, Member.removed(node)) // placeholder for removed member + membersMap.getOrElse(node, Member.removed(node)) // placeholder for removed member } def hasMember(node: UniqueAddress): Boolean = membersMap.contains(node) diff --git a/repos/akka/akka-cluster/src/main/scala/akka/cluster/routing/AdaptiveLoadBalancing.scala b/repos/akka/akka-cluster/src/main/scala/akka/cluster/routing/AdaptiveLoadBalancing.scala index 42a4256f338..a415c7be7b1 100644 --- a/repos/akka/akka-cluster/src/main/scala/akka/cluster/routing/AdaptiveLoadBalancing.scala +++ b/repos/akka/akka-cluster/src/main/scala/akka/cluster/routing/AdaptiveLoadBalancing.scala @@ -90,8 +90,9 @@ final case class AdaptiveLoadBalancingRoutingLogic( cluster.selfAddress, metricsSelector.weights(oldMetrics))) // ignore, don't update, in case of CAS failure - weightedRouteesRef - .compareAndSet(oldValue, (routees, oldMetrics, weightedRoutees)) + weightedRouteesRef.compareAndSet( + oldValue, + (routees, oldMetrics, weightedRoutees)) weightedRoutees } else oldWeightedRoutees } diff --git a/repos/akka/akka-cluster/src/multi-jvm/scala/akka/cluster/StressSpec.scala b/repos/akka/akka-cluster/src/multi-jvm/scala/akka/cluster/StressSpec.scala index aae1e57ffb1..0c6243809bf 100644 --- a/repos/akka/akka-cluster/src/multi-jvm/scala/akka/cluster/StressSpec.scala +++ b/repos/akka/akka-cluster/src/multi-jvm/scala/akka/cluster/StressSpec.scala @@ -297,8 +297,11 @@ private[cluster] object StressMultiJvmSpec extends MultiNodeConfig { } import context.dispatcher - private val reportMetricsTask = context.system.scheduler - .schedule(reportMetricsInterval, reportMetricsInterval, self, ReportTick) + private val reportMetricsTask = context.system.scheduler.schedule( + reportMetricsInterval, + reportMetricsInterval, + self, + ReportTick) // subscribe to ClusterMetricsChanged, re-subscribe when restart override def preStart(): Unit = diff --git a/repos/akka/akka-cluster/src/multi-jvm/scala/akka/cluster/routing/AdaptiveLoadBalancingRouterSpec.scala b/repos/akka/akka-cluster/src/multi-jvm/scala/akka/cluster/routing/AdaptiveLoadBalancingRouterSpec.scala index b6936670cd3..6470630b8bd 100644 --- a/repos/akka/akka-cluster/src/multi-jvm/scala/akka/cluster/routing/AdaptiveLoadBalancingRouterSpec.scala +++ b/repos/akka/akka-cluster/src/multi-jvm/scala/akka/cluster/routing/AdaptiveLoadBalancingRouterSpec.scala @@ -40,8 +40,9 @@ object AdaptiveLoadBalancingRouterMultiJvmSpec extends MultiNodeConfig { // getMax can be undefined (-1) val max = math.max(heap.getMax, heap.getCommitted) val used = heap.getUsed - log - .info("used heap before: [{}] bytes, of max [{}]", used, heap.getMax) + log.info("used heap before: [{}] bytes, of max [{}]", + used, + heap.getMax) // allocate 70% of free space val allocateBytes = (0.7 * (max - used)).toInt val numberOfArrays = allocateBytes / 1024 diff --git a/repos/akka/akka-cluster/src/test/scala/akka/cluster/GossipSpec.scala b/repos/akka/akka-cluster/src/test/scala/akka/cluster/GossipSpec.scala index f99283b7e1e..e5469014e3e 100644 --- a/repos/akka/akka-cluster/src/test/scala/akka/cluster/GossipSpec.scala +++ b/repos/akka/akka-cluster/src/test/scala/akka/cluster/GossipSpec.scala @@ -189,10 +189,11 @@ class GossipSpec extends WordSpec with Matchers { "know who is youngest" in { // a2 and e1 is Joining val g1 = - Gossip(members = SortedSet(a2, b1.copyUp(3), e1), - overview = GossipOverview( - reachability = Reachability.empty - .unreachable(a2.uniqueAddress, e1.uniqueAddress))) + Gossip( + members = SortedSet(a2, b1.copyUp(3), e1), + overview = GossipOverview( + reachability = Reachability.empty.unreachable(a2.uniqueAddress, + e1.uniqueAddress))) g1.youngestMember should ===(b1) val g2 = Gossip(members = SortedSet(a2, b1.copyUp(3), e1), diff --git a/repos/akka/akka-contrib/src/main/scala/akka/contrib/circuitbreaker/askExtensions.scala b/repos/akka/akka-contrib/src/main/scala/akka/contrib/circuitbreaker/askExtensions.scala index b311fd59d3c..7b89a133a1f 100644 --- a/repos/akka/akka-contrib/src/main/scala/akka/contrib/circuitbreaker/askExtensions.scala +++ b/repos/akka/akka-contrib/src/main/scala/akka/contrib/circuitbreaker/askExtensions.scala @@ -53,8 +53,9 @@ object Implicits { def askWithCircuitBreaker(circuitBreakerProxy: ActorRef, message: Any)( implicit executionContext: ExecutionContext, timeout: Timeout): Future[Any] = - circuitBreakerProxy - .internalAskWithCircuitBreaker(message, timeout, ActorRef.noSender) + circuitBreakerProxy.internalAskWithCircuitBreaker(message, + timeout, + ActorRef.noSender) /** * Wraps the `ask` method in [[akka.pattern.AskSupport]] method to convert failures connected to the circuit diff --git a/repos/akka/akka-distributed-data/src/main/scala/akka/cluster/ddata/Replicator.scala b/repos/akka/akka-distributed-data/src/main/scala/akka/cluster/ddata/Replicator.scala index a2ef06c799f..31c1dec56fa 100644 --- a/repos/akka/akka-distributed-data/src/main/scala/akka/cluster/ddata/Replicator.scala +++ b/repos/akka/akka-distributed-data/src/main/scala/akka/cluster/ddata/Replicator.scala @@ -593,8 +593,9 @@ object Replicator { mergedRemovedNodePruning = mergedRemovedNodePruning.updated(key, thisValue) case Some(thatValue) ⇒ - mergedRemovedNodePruning = mergedRemovedNodePruning - .updated(key, thisValue merge thatValue) + mergedRemovedNodePruning = mergedRemovedNodePruning.updated( + key, + thisValue merge thatValue) } } @@ -844,16 +845,22 @@ final class Replicator(settings: ReplicatorSettings) //Start periodic gossip to random nodes in cluster import context.dispatcher - val gossipTask = context.system.scheduler - .schedule(gossipInterval, gossipInterval, self, GossipTick) + val gossipTask = context.system.scheduler.schedule(gossipInterval, + gossipInterval, + self, + GossipTick) val notifyTask = context.system.scheduler.schedule(notifySubscribersInterval, notifySubscribersInterval, self, FlushChanges) - val pruningTask = context.system.scheduler - .schedule(pruningInterval, pruningInterval, self, RemovedNodePruningTick) - val clockTask = context.system.scheduler - .schedule(gossipInterval, gossipInterval, self, ClockTick) + val pruningTask = context.system.scheduler.schedule(pruningInterval, + pruningInterval, + self, + RemovedNodePruningTick) + val clockTask = context.system.scheduler.schedule(gossipInterval, + gossipInterval, + self, + ClockTick) val serializer = SerializationExtension(context.system).serializerFor(classOf[DataEnvelope]) @@ -1005,8 +1012,9 @@ final class Replicator(settings: ReplicatorSettings) log.debug("Received Update for deleted key [{}]", key) sender() ! e case Failure(e) ⇒ - log - .debug("Received Update for key [{}], failed: {}", key, e.getMessage) + log.debug("Received Update for key [{}], failed: {}", + key, + e.getMessage) sender() ! ModifyFailure(key, "Update failed: " + e.getMessage, e, req) } } @@ -1331,8 +1339,9 @@ final class Replicator(settings: ReplicatorSettings) def init(): Unit = { val newEnvelope = envelope.initRemovedNodePruning(removed, selfUniqueAddress) - log - .debug("Initiated pruning of [{}] for data key [{}]", removed, key) + log.debug("Initiated pruning of [{}] for data key [{}]", + removed, + key) setData(key, newEnvelope) } @@ -1419,8 +1428,8 @@ final class Replicator(settings: ReplicatorSettings) val pruningCleanuped = pruningCleanupTombstoned(removed, envelope.data) if ((pruningCleanuped ne envelope.data) || envelope.pruning.contains(removed)) - envelope - .copy(data = pruningCleanuped, pruning = envelope.pruning - removed) + envelope.copy(data = pruningCleanuped, + pruning = envelope.pruning - removed) else envelope } diff --git a/repos/akka/akka-docs/rst/scala/code/docs/actor/SchedulerDocSpec.scala b/repos/akka/akka-docs/rst/scala/code/docs/actor/SchedulerDocSpec.scala index 453e2fb42dc..a1f263efeb0 100644 --- a/repos/akka/akka-docs/rst/scala/code/docs/actor/SchedulerDocSpec.scala +++ b/repos/akka/akka-docs/rst/scala/code/docs/actor/SchedulerDocSpec.scala @@ -51,8 +51,10 @@ class SchedulerDocSpec extends AkkaSpec(Map("akka.loglevel" -> "INFO")) { //This will schedule to send the Tick-message //to the tickActor after 0ms repeating every 50ms - val cancellable = system.scheduler - .schedule(0 milliseconds, 50 milliseconds, tickActor, Tick) + val cancellable = system.scheduler.schedule(0 milliseconds, + 50 milliseconds, + tickActor, + Tick) //This cancels further Ticks to be sent cancellable.cancel() diff --git a/repos/akka/akka-docs/rst/scala/code/docs/dispatcher/DispatcherDocSpec.scala b/repos/akka/akka-docs/rst/scala/code/docs/dispatcher/DispatcherDocSpec.scala index 9fbfdc8de9a..e32302d2c5f 100644 --- a/repos/akka/akka-docs/rst/scala/code/docs/dispatcher/DispatcherDocSpec.scala +++ b/repos/akka/akka-docs/rst/scala/code/docs/dispatcher/DispatcherDocSpec.scala @@ -273,8 +273,9 @@ class DispatcherDocSpec extends AkkaSpec(DispatcherDocSpec.config) { val context = system //#defining-dispatcher-in-code import akka.actor.Props - val myActor = context - .actorOf(Props[MyActor].withDispatcher("my-dispatcher"), "myactor1") + val myActor = + context.actorOf(Props[MyActor].withDispatcher("my-dispatcher"), + "myactor1") //#defining-dispatcher-in-code } diff --git a/repos/akka/akka-docs/rst/scala/code/docs/http/scaladsl/server/WebSocketExampleSpec.scala b/repos/akka/akka-docs/rst/scala/code/docs/http/scaladsl/server/WebSocketExampleSpec.scala index 7e29cfc6774..6eb04283307 100644 --- a/repos/akka/akka-docs/rst/scala/code/docs/http/scaladsl/server/WebSocketExampleSpec.scala +++ b/repos/akka/akka-docs/rst/scala/code/docs/http/scaladsl/server/WebSocketExampleSpec.scala @@ -53,8 +53,9 @@ class WebSocketExampleSpec extends WordSpec with Matchers { } //#websocket-request-handling - val bindingFuture = Http() - .bindAndHandleSync(requestHandler, interface = "localhost", port = 8080) + val bindingFuture = Http().bindAndHandleSync(requestHandler, + interface = "localhost", + port = 8080) println( s"Server online at http://localhost:8080/\nPress RETURN to stop...") diff --git a/repos/akka/akka-docs/rst/scala/code/docs/persistence/query/MyEventsByTagPublisher.scala b/repos/akka/akka-docs/rst/scala/code/docs/persistence/query/MyEventsByTagPublisher.scala index ded005ed8bc..284c41a8a4a 100644 --- a/repos/akka/akka-docs/rst/scala/code/docs/persistence/query/MyEventsByTagPublisher.scala +++ b/repos/akka/akka-docs/rst/scala/code/docs/persistence/query/MyEventsByTagPublisher.scala @@ -35,8 +35,10 @@ class MyEventsByTagPublisher(tag: String, var buf = Vector.empty[EventEnvelope] import context.dispatcher - val continueTask = context.system.scheduler - .schedule(refreshInterval, refreshInterval, self, Continue) + val continueTask = context.system.scheduler.schedule(refreshInterval, + refreshInterval, + self, + Continue) override def postStop(): Unit = { continueTask.cancel() diff --git a/repos/akka/akka-docs/rst/scala/code/docs/routing/RouterDocSpec.scala b/repos/akka/akka-docs/rst/scala/code/docs/routing/RouterDocSpec.scala index ac0a00a5ee5..e448234c03f 100644 --- a/repos/akka/akka-docs/rst/scala/code/docs/routing/RouterDocSpec.scala +++ b/repos/akka/akka-docs/rst/scala/code/docs/routing/RouterDocSpec.scala @@ -452,8 +452,9 @@ router-dispatcher {} //#consistent-hashing-pool-1 //#consistent-hashing-pool-2 - val router26: ActorRef = context - .actorOf(ConsistentHashingPool(5).props(Props[Worker]), "router26") + val router26: ActorRef = context.actorOf( + ConsistentHashingPool(5).props(Props[Worker]), + "router26") //#consistent-hashing-pool-2 //#consistent-hashing-group-1 diff --git a/repos/akka/akka-docs/rst/scala/code/docs/stream/FlowDocSpec.scala b/repos/akka/akka-docs/rst/scala/code/docs/stream/FlowDocSpec.scala index cc92e80e51e..b6a4c0cec2a 100644 --- a/repos/akka/akka-docs/rst/scala/code/docs/stream/FlowDocSpec.scala +++ b/repos/akka/akka-docs/rst/scala/code/docs/stream/FlowDocSpec.scala @@ -79,8 +79,9 @@ class FlowDocSpec extends AkkaSpec { import scala.concurrent.duration._ case object Tick - val timer = Source - .tick(initialDelay = 1.second, interval = 1.seconds, tick = () => Tick) + val timer = Source.tick(initialDelay = 1.second, + interval = 1.seconds, + tick = () => Tick) val timerCancel: Cancellable = Sink.ignore.runWith(timer) timerCancel.cancel() diff --git a/repos/akka/akka-docs/rst/scala/code/docs/stream/FlowGraphDocSpec.scala b/repos/akka/akka-docs/rst/scala/code/docs/stream/FlowGraphDocSpec.scala index 6f1be099556..48882734173 100644 --- a/repos/akka/akka-docs/rst/scala/code/docs/stream/FlowGraphDocSpec.scala +++ b/repos/akka/akka-docs/rst/scala/code/docs/stream/FlowGraphDocSpec.scala @@ -215,8 +215,7 @@ class FlowGraphDocSpec extends AkkaSpec { }) //#flow-graph-matvalue - Await - .result(Source(1 to 10).via(foldFlow).runWith(Sink.head), 3.seconds) should ===( + Await.result(Source(1 to 10).via(foldFlow).runWith(Sink.head), 3.seconds) should ===( 55) //#flow-graph-matvalue-cycle diff --git a/repos/akka/akka-docs/rst/scala/code/docs/stream/cookbook/RecipeParseLines.scala b/repos/akka/akka-docs/rst/scala/code/docs/stream/cookbook/RecipeParseLines.scala index 064dd70197d..968ad5e2612 100644 --- a/repos/akka/akka-docs/rst/scala/code/docs/stream/cookbook/RecipeParseLines.scala +++ b/repos/akka/akka-docs/rst/scala/code/docs/stream/cookbook/RecipeParseLines.scala @@ -30,8 +30,7 @@ class RecipeParseLines extends RecipeSpec { .map(_.utf8String) //#parse-lines - Await - .result(linesStream.limit(10).runWith(Sink.seq), 3.seconds) should be( + Await.result(linesStream.limit(10).runWith(Sink.seq), 3.seconds) should be( List("Hello World\r!", "Hello Akka!", "Hello Streams!", "")) } } diff --git a/repos/akka/akka-http-core/src/main/scala/akka/http/impl/engine/client/PoolInterfaceActor.scala b/repos/akka/akka-http-core/src/main/scala/akka/http/impl/engine/client/PoolInterfaceActor.scala index 0d3c13a2613..40f255cb1e1 100644 --- a/repos/akka/akka-http-core/src/main/scala/akka/http/impl/engine/client/PoolInterfaceActor.scala +++ b/repos/akka/akka-http-core/src/main/scala/akka/http/impl/engine/client/PoolInterfaceActor.scala @@ -60,8 +60,9 @@ private class PoolInterfaceActor( Buffer[PoolRequest](hcps.setup.settings.maxOpenRequests, fm) private[this] var activeIdleTimeout: Option[Cancellable] = None - log - .debug("(Re-)starting host connection pool to {}:{}", hcps.host, hcps.port) + log.debug("(Re-)starting host connection pool to {}:{}", + hcps.host, + hcps.port) initConnectionFlow() diff --git a/repos/akka/akka-http-core/src/main/scala/akka/http/impl/engine/parsing/HttpHeaderParser.scala b/repos/akka/akka-http-core/src/main/scala/akka/http/impl/engine/parsing/HttpHeaderParser.scala index a73ac09773b..388e9e809db 100644 --- a/repos/akka/akka-http-core/src/main/scala/akka/http/impl/engine/parsing/HttpHeaderParser.scala +++ b/repos/akka/akka-http-core/src/main/scala/akka/http/impl/engine/parsing/HttpHeaderParser.scala @@ -606,8 +606,9 @@ private[http] object HttpHeaderParser { valueStart, valueStart + maxHeaderValueLength + 2)() val trimmedHeaderValue = headerValue.trim - val header = HeaderParser - .parseFull(headerName, trimmedHeaderValue, settings) match { + val header = HeaderParser.parseFull(headerName, + trimmedHeaderValue, + settings) match { case Right(h) ⇒ h case Left(error) ⇒ onIllegalHeader( diff --git a/repos/akka/akka-http-core/src/main/scala/akka/http/impl/engine/server/HttpServerBluePrint.scala b/repos/akka/akka-http-core/src/main/scala/akka/http/impl/engine/server/HttpServerBluePrint.scala index 6d1076703bb..25f5e4b4852 100644 --- a/repos/akka/akka-http-core/src/main/scala/akka/http/impl/engine/server/HttpServerBluePrint.scala +++ b/repos/akka/akka-http-core/src/main/scala/akka/http/impl/engine/server/HttpServerBluePrint.scala @@ -77,10 +77,10 @@ private[http] object HttpServerBluePrint { SslTlsInbound, SessionBytes, NotUsed] = - BidiFlow - .fromFlows(Flow[ByteString].map(SendBytes), Flow[SslTlsInbound].collect { - case x: SessionBytes ⇒ x - }) + BidiFlow.fromFlows(Flow[ByteString].map(SendBytes), + Flow[SslTlsInbound].collect { + case x: SessionBytes ⇒ x + }) def websocketSupport(settings: ServerSettings, log: LoggingAdapter): BidiFlow[ResponseRenderingOutput, diff --git a/repos/akka/akka-http-core/src/main/scala/akka/http/impl/engine/ws/FrameOutHandler.scala b/repos/akka/akka-http-core/src/main/scala/akka/http/impl/engine/ws/FrameOutHandler.scala index 80193d1a7e4..eb205c5e116 100644 --- a/repos/akka/akka-http-core/src/main/scala/akka/http/impl/engine/ws/FrameOutHandler.scala +++ b/repos/akka/akka-http-core/src/main/scala/akka/http/impl/engine/ws/FrameOutHandler.scala @@ -38,16 +38,18 @@ private[http] class FrameOutHandler(serverSide: Boolean, .closeFrame(code.getOrElse(Protocol.CloseCodes.Regular), reason))) ctx.pull() case PeerClosed(code, reason) ⇒ - val closeFrame = FrameEvent - .closeFrame(code.getOrElse(Protocol.CloseCodes.Regular), reason) + val closeFrame = FrameEvent.closeFrame( + code.getOrElse(Protocol.CloseCodes.Regular), + reason) if (serverSide) ctx.pushAndFinish(closeFrame) else { become(new WaitingForTransportClose) ctx.push(closeFrame) } case ActivelyCloseWithCode(code, reason) ⇒ - val closeFrame = FrameEvent - .closeFrame(code.getOrElse(Protocol.CloseCodes.Regular), reason) + val closeFrame = FrameEvent.closeFrame( + code.getOrElse(Protocol.CloseCodes.Regular), + reason) become(new WaitingForPeerCloseFrame()) ctx.push(closeFrame) case UserHandlerCompleted ⇒ diff --git a/repos/akka/akka-http-core/src/main/scala/akka/http/impl/engine/ws/Masking.scala b/repos/akka/akka-http-core/src/main/scala/akka/http/impl/engine/ws/Masking.scala index ae8ff41103a..79aecc09695 100644 --- a/repos/akka/akka-http-core/src/main/scala/akka/http/impl/engine/ws/Masking.scala +++ b/repos/akka/akka-http-core/src/main/scala/akka/http/impl/engine/ws/Masking.scala @@ -71,8 +71,8 @@ private[http] object Masking { try { val mask = extractMask(header) become(new Running(mask)) - current - .onPush(start.copy(header = setNewMask(header, mask)), ctx) + current.onPush(start.copy(header = setNewMask(header, mask)), + ctx) } catch { case p: ProtocolException ⇒ become(Done) diff --git a/repos/akka/akka-http-core/src/main/scala/akka/http/impl/settings/ClientConnectionSettingsImpl.scala b/repos/akka/akka-http-core/src/main/scala/akka/http/impl/settings/ClientConnectionSettingsImpl.scala index 8ea4547cd33..65c91c76a89 100644 --- a/repos/akka/akka-http-core/src/main/scala/akka/http/impl/settings/ClientConnectionSettingsImpl.scala +++ b/repos/akka/akka-http-core/src/main/scala/akka/http/impl/settings/ClientConnectionSettingsImpl.scala @@ -43,8 +43,9 @@ object ClientConnectionSettingsImpl idleTimeout = c getPotentiallyInfiniteDuration "idle-timeout", requestHeaderSizeHint = c getIntBytes "request-header-size-hint", websocketRandomFactory = Randoms.SecureRandomInstances, // can currently only be overridden from code - socketOptions = SocketOptionSettings - .fromSubConfig(root, c.getConfig("socket-options")), + socketOptions = + SocketOptionSettings.fromSubConfig(root, + c.getConfig("socket-options")), parserSettings = ParserSettingsImpl.fromSubConfig(root, c.getConfig("parsing"))) } diff --git a/repos/akka/akka-http-core/src/main/scala/akka/http/scaladsl/Http.scala b/repos/akka/akka-http-core/src/main/scala/akka/http/scaladsl/Http.scala index d87a564da9e..c3c37726f5a 100644 --- a/repos/akka/akka-http-core/src/main/scala/akka/http/scaladsl/Http.scala +++ b/repos/akka/akka-http-core/src/main/scala/akka/http/scaladsl/Http.scala @@ -710,8 +710,7 @@ class HttpExt(private val config: Config)(implicit val system: ActorSystem) throw e } val fastFuture = FastFuture.successful(gateway) - hostPoolCache - .put(setup, fastFuture) // optimize subsequent gateway accesses + hostPoolCache.put(setup, fastFuture) // optimize subsequent gateway accesses gatewayPromise.success(gateway) // satisfy everyone who got a hold of our promise while we were starting up whenShuttingDown.future.onComplete(_ ⇒ hostPoolCache.remove(setup, fastFuture))(fm.executionContext) diff --git a/repos/akka/akka-http-core/src/main/scala/akka/http/scaladsl/model/HttpHeader.scala b/repos/akka/akka-http-core/src/main/scala/akka/http/scaladsl/model/HttpHeader.scala index 81c42594a20..97a4ce84b2a 100644 --- a/repos/akka/akka-http-core/src/main/scala/akka/http/scaladsl/model/HttpHeader.scala +++ b/repos/akka/akka-http-core/src/main/scala/akka/http/scaladsl/model/HttpHeader.scala @@ -65,8 +65,9 @@ object HttpHeader { parser.`header-field-value`.run() match { case Success(preProcessedValue) ⇒ try { - HeaderParser - .parseFull(name.toLowerCase, preProcessedValue, settings) match { + HeaderParser.parseFull(name.toLowerCase, + preProcessedValue, + settings) match { case Right(header) ⇒ ParsingResult.Ok(header, Nil) case Left(info) ⇒ val errors = diff --git a/repos/akka/akka-http-core/src/main/scala/akka/http/scaladsl/model/HttpMessage.scala b/repos/akka/akka-http-core/src/main/scala/akka/http/scaladsl/model/HttpMessage.scala index cf7f8fa80a7..681e246705c 100644 --- a/repos/akka/akka-http-core/src/main/scala/akka/http/scaladsl/model/HttpMessage.scala +++ b/repos/akka/akka-http-core/src/main/scala/akka/http/scaladsl/model/HttpMessage.scala @@ -188,8 +188,10 @@ final class HttpRequest(val method: HttpMethod, */ def effectiveUri(securedConnection: Boolean, defaultHostHeader: Host = Host.empty): Uri = - HttpRequest - .effectiveUri(uri, headers, securedConnection, defaultHostHeader) + HttpRequest.effectiveUri(uri, + headers, + securedConnection, + defaultHostHeader) /** * Returns a copy of this request with the URI resolved according to the logic defined at diff --git a/repos/akka/akka-http-core/src/main/scala/akka/http/scaladsl/model/Multipart.scala b/repos/akka/akka-http-core/src/main/scala/akka/http/scaladsl/model/Multipart.scala index 35f1def0abe..0613f16fcf4 100644 --- a/repos/akka/akka-http-core/src/main/scala/akka/http/scaladsl/model/Multipart.scala +++ b/repos/akka/akka-http-core/src/main/scala/akka/http/scaladsl/model/Multipart.scala @@ -67,8 +67,8 @@ sealed trait Multipart extends jm.Multipart { partHeadersSizeHint = 128, log)) .flatMapConcat(ConstantFun.scalaIdentityFunction) - HttpEntity - .Chunked(mediaType withBoundary boundary withCharset charset, chunks) + HttpEntity.Chunked(mediaType withBoundary boundary withCharset charset, + chunks) } /** Java API */ diff --git a/repos/akka/akka-http-core/src/test/scala/akka/http/impl/engine/client/ConnectionPoolSpec.scala b/repos/akka/akka-http-core/src/test/scala/akka/http/impl/engine/client/ConnectionPoolSpec.scala index 017d819181b..dfe6cdcd2d7 100644 --- a/repos/akka/akka-http-core/src/test/scala/akka/http/impl/engine/client/ConnectionPoolSpec.scala +++ b/repos/akka/akka-http-core/src/test/scala/akka/http/impl/engine/client/ConnectionPoolSpec.scala @@ -256,8 +256,9 @@ class ConnectionPoolSpec val (_, _, _, hcp) = cachedHostConnectionPool[Int](idleTimeout = 1.second) val gateway = Await.result(hcp.gatewayFuture, 500.millis) - val PoolGateway - .Running(_, shutdownStartedPromise, shutdownCompletedPromise) = + val PoolGateway.Running(_, + shutdownStartedPromise, + shutdownCompletedPromise) = gateway.currentState shutdownStartedPromise.isCompleted shouldEqual false shutdownCompletedPromise.isCompleted shouldEqual false @@ -331,8 +332,9 @@ class ConnectionPoolSpec autoAccept = true) { val (serverEndpoint2, serverHostName2, serverPort2) = TestUtils.temporaryServerHostnameAndPort() - Http() - .bindAndHandleSync(testServerHandler(0), serverHostName2, serverPort2) + Http().bindAndHandleSync(testServerHandler(0), + serverHostName2, + serverPort2) val (requestIn, responseOut, responseOutSub, hcp) = superPool[Int]() diff --git a/repos/akka/akka-http-core/src/test/scala/akka/http/impl/engine/parsing/HttpHeaderParserSpec.scala b/repos/akka/akka-http-core/src/test/scala/akka/http/impl/engine/parsing/HttpHeaderParserSpec.scala index 82d3e97d419..9dd18d41d01 100644 --- a/repos/akka/akka-http-core/src/test/scala/akka/http/impl/engine/parsing/HttpHeaderParserSpec.scala +++ b/repos/akka/akka-http-core/src/test/scala/akka/http/impl/engine/parsing/HttpHeaderParserSpec.scala @@ -287,8 +287,9 @@ class HttpHeaderParserSpec } def insert(line: String, value: AnyRef): Unit = if (parser.isEmpty) - HttpHeaderParser - .insertRemainingCharsAsNewNodes(parser, ByteString(line), value) + HttpHeaderParser.insertRemainingCharsAsNewNodes(parser, + ByteString(line), + value) else HttpHeaderParser.insert(parser, ByteString(line), value) def parseLine(line: String) = diff --git a/repos/akka/akka-http-core/src/test/scala/akka/http/impl/engine/parsing/ResponseParserSpec.scala b/repos/akka/akka-http-core/src/test/scala/akka/http/impl/engine/parsing/ResponseParserSpec.scala index 6d2989c53d1..d05983140bf 100644 --- a/repos/akka/akka-http-core/src/test/scala/akka/http/impl/engine/parsing/ResponseParserSpec.scala +++ b/repos/akka/akka-http-core/src/test/scala/akka/http/impl/engine/parsing/ResponseParserSpec.scala @@ -294,9 +294,10 @@ class ResponseParserSpec override def equals(other: scala.Any): Boolean = other match { case other: StrictEqualHttpResponse ⇒ this.resp.copy(entity = HttpEntity.Empty) == other.resp.copy( - entity = HttpEntity.Empty) && Await - .result(this.resp.entity.toStrict(250.millis), 250.millis) == Await - .result(other.resp.entity.toStrict(250.millis), 250.millis) + entity = HttpEntity.Empty) && Await.result( + this.resp.entity.toStrict(250.millis), + 250.millis) == Await.result(other.resp.entity.toStrict(250.millis), + 250.millis) } override def toString = resp.toString diff --git a/repos/akka/akka-http-core/src/test/scala/akka/http/impl/engine/ws/WSServerAutobahnTest.scala b/repos/akka/akka-http-core/src/test/scala/akka/http/impl/engine/ws/WSServerAutobahnTest.scala index 66ad93dc593..a12e802752b 100644 --- a/repos/akka/akka-http-core/src/test/scala/akka/http/impl/engine/ws/WSServerAutobahnTest.scala +++ b/repos/akka/akka-http-core/src/test/scala/akka/http/impl/engine/ws/WSServerAutobahnTest.scala @@ -28,23 +28,22 @@ object WSServerAutobahnTest extends App { try { val binding = - Http() - .bindAndHandleSync( - { - case req @ HttpRequest(GET, Uri.Path("/"), _, _, _) - if req.header[UpgradeToWebSocket].isDefined ⇒ - req.header[UpgradeToWebSocket] match { - case Some(upgrade) ⇒ - upgrade - .handleMessages(echoWebSocketService) // needed for running the autobahn test suite - case None ⇒ - HttpResponse(400, entity = "Not a valid websocket request!") - } - case _: HttpRequest ⇒ - HttpResponse(404, entity = "Unknown resource!") - }, - interface = host, // adapt to your docker host IP address if necessary - port = port) + Http().bindAndHandleSync( + { + case req @ HttpRequest(GET, Uri.Path("/"), _, _, _) + if req.header[UpgradeToWebSocket].isDefined ⇒ + req.header[UpgradeToWebSocket] match { + case Some(upgrade) ⇒ + upgrade + .handleMessages(echoWebSocketService) // needed for running the autobahn test suite + case None ⇒ + HttpResponse(400, entity = "Not a valid websocket request!") + } + case _: HttpRequest ⇒ + HttpResponse(404, entity = "Unknown resource!") + }, + interface = host, // adapt to your docker host IP address if necessary + port = port) Await.result(binding, 3.second) // throws if binding fails println(s"Server online at http://${host}:${port}") diff --git a/repos/akka/akka-http-core/src/test/scala/akka/http/impl/model/parser/HttpHeaderSpec.scala b/repos/akka/akka-http-core/src/test/scala/akka/http/impl/model/parser/HttpHeaderSpec.scala index f13de513e78..6b7dbcfe799 100644 --- a/repos/akka/akka-http-core/src/test/scala/akka/http/impl/model/parser/HttpHeaderSpec.scala +++ b/repos/akka/akka-http-core/src/test/scala/akka/http/impl/model/parser/HttpHeaderSpec.scala @@ -896,12 +896,14 @@ class HttpHeaderSpec extends FreeSpec with Matchers { Nil) } "compress value whitespace into single spaces and trim" in { - parse("Foo", " b a \tr\t") shouldEqual ParsingResult - .Ok(RawHeader("Foo", "b a r"), Nil) + parse("Foo", " b a \tr\t") shouldEqual ParsingResult.Ok( + RawHeader("Foo", "b a r"), + Nil) } "resolve obs-fold occurrences" in { - parse("Foo", "b\r\n\ta \r\n r") shouldEqual ParsingResult - .Ok(RawHeader("Foo", "b a r"), Nil) + parse("Foo", "b\r\n\ta \r\n r") shouldEqual ParsingResult.Ok( + RawHeader("Foo", "b a r"), + Nil) } "parse with custom uri parsing mode" in { diff --git a/repos/akka/akka-http-core/src/test/scala/akka/http/javadsl/HttpExtensionApiSpec.scala b/repos/akka/akka-http-core/src/test/scala/akka/http/javadsl/HttpExtensionApiSpec.scala index 6d354098573..417e2966db1 100644 --- a/repos/akka/akka-http-core/src/test/scala/akka/http/javadsl/HttpExtensionApiSpec.scala +++ b/repos/akka/akka-http-core/src/test/scala/akka/http/javadsl/HttpExtensionApiSpec.scala @@ -579,8 +579,9 @@ class HttpExtensionApiSpec def runServer(): (Host, Port, ServerBinding) = { val (_, host, port) = TestUtils.temporaryServerHostnameAndPort() - val server = http - .bindAndHandleSync(httpSuccessFunction, toHost(host, port), materializer) + val server = http.bindAndHandleSync(httpSuccessFunction, + toHost(host, port), + materializer) (host, port, waitFor(server)) } diff --git a/repos/akka/akka-http-core/src/test/scala/akka/http/scaladsl/ClientServerSpec.scala b/repos/akka/akka-http-core/src/test/scala/akka/http/scaladsl/ClientServerSpec.scala index c790f413b8b..3002ebb12c9 100644 --- a/repos/akka/akka-http-core/src/test/scala/akka/http/scaladsl/ClientServerSpec.scala +++ b/repos/akka/akka-http-core/src/test/scala/akka/http/scaladsl/ClientServerSpec.scala @@ -204,8 +204,10 @@ class ClientServerSpec Promise().future // never complete the request with a response; we're waiting for the timeout to happen, nothing else } - val binding = Http() - .bindAndHandleAsync(handle, hostname, port, settings = settings) + val binding = Http().bindAndHandleAsync(handle, + hostname, + port, + settings = settings) val b1 = Await.result(binding, 3.seconds) (receivedRequest, b1) } diff --git a/repos/akka/akka-http-core/src/test/scala/akka/http/scaladsl/model/UriSpec.scala b/repos/akka/akka-http-core/src/test/scala/akka/http/scaladsl/model/UriSpec.scala index 78968eec7ea..841eeed5458 100644 --- a/repos/akka/akka-http-core/src/test/scala/akka/http/scaladsl/model/UriSpec.scala +++ b/repos/akka/akka-http-core/src/test/scala/akka/http/scaladsl/model/UriSpec.scala @@ -437,8 +437,10 @@ class UriSpec extends WordSpec with Matchers { // http://tools.ietf.org/html/rfc3986#section-1.1.2 "be correctly parsed from and rendered to simple test examples" in { - Uri("ftp://ftp.is.co.za/rfc/rfc1808.txt") shouldEqual Uri - .from(scheme = "ftp", host = "ftp.is.co.za", path = "/rfc/rfc1808.txt") + Uri("ftp://ftp.is.co.za/rfc/rfc1808.txt") shouldEqual Uri.from( + scheme = "ftp", + host = "ftp.is.co.za", + path = "/rfc/rfc1808.txt") Uri("http://www.ietf.org/rfc/rfc2396.txt") shouldEqual Uri.from( scheme = "http", @@ -451,11 +453,13 @@ class UriSpec extends WordSpec with Matchers { path = "/c=GB", queryString = Some("objectClass?one")) - Uri("mailto:John.Doe@example.com") shouldEqual Uri - .from(scheme = "mailto", path = "John.Doe@example.com") + Uri("mailto:John.Doe@example.com") shouldEqual Uri.from( + scheme = "mailto", + path = "John.Doe@example.com") - Uri("news:comp.infosystems.www.servers.unix") shouldEqual Uri - .from(scheme = "news", path = "comp.infosystems.www.servers.unix") + Uri("news:comp.infosystems.www.servers.unix") shouldEqual Uri.from( + scheme = "news", + path = "comp.infosystems.www.servers.unix") Uri("tel:+1-816-555-1212") shouldEqual Uri.from(scheme = "tel", path = "+1-816-555-1212") diff --git a/repos/akka/akka-http/src/main/scala/akka/http/scaladsl/coding/Encoder.scala b/repos/akka/akka-http/src/main/scala/akka/http/scaladsl/coding/Encoder.scala index b8ccf1b687c..4e6427c6cf1 100644 --- a/repos/akka/akka-http/src/main/scala/akka/http/scaladsl/coding/Encoder.scala +++ b/repos/akka/akka-http/src/main/scala/akka/http/scaladsl/coding/Encoder.scala @@ -26,8 +26,8 @@ trait Encoder { else message.self def encodeData[T](t: T)(implicit mapper: DataMapper[T]): T = - mapper - .transformDataBytes(t, Flow[ByteString].transform(newEncodeTransformer)) + mapper.transformDataBytes(t, + Flow[ByteString].transform(newEncodeTransformer)) def encode(input: ByteString): ByteString = newCompressor.compressAndFinish(input) diff --git a/repos/akka/akka-http/src/main/scala/akka/http/scaladsl/server/Directive.scala b/repos/akka/akka-http/src/main/scala/akka/http/scaladsl/server/Directive.scala index cf10d36b5e8..8a39cfeb4b4 100644 --- a/repos/akka/akka-http/src/main/scala/akka/http/scaladsl/server/Directive.scala +++ b/repos/akka/akka-http/src/main/scala/akka/http/scaladsl/server/Directive.scala @@ -175,8 +175,8 @@ object Directive { underlying.filter(predicate, rejections: _*).tflatMap(_ ⇒ Empty) def filter(predicate: T ⇒ Boolean, rejections: Rejection*): Directive1[T] = - underlying - .tfilter({ case Tuple1(value) ⇒ predicate(value) }, rejections: _*) + underlying.tfilter({ case Tuple1(value) ⇒ predicate(value) }, + rejections: _*) } } diff --git a/repos/akka/akka-http/src/main/scala/akka/http/scaladsl/server/ExceptionHandler.scala b/repos/akka/akka-http/src/main/scala/akka/http/scaladsl/server/ExceptionHandler.scala index 0e1408a5dc9..f8d4e62e8aa 100644 --- a/repos/akka/akka-http/src/main/scala/akka/http/scaladsl/server/ExceptionHandler.scala +++ b/repos/akka/akka-http/src/main/scala/akka/http/scaladsl/server/ExceptionHandler.scala @@ -58,8 +58,9 @@ object ExceptionHandler { case NonFatal(e) ⇒ ctx ⇒ { - ctx.log - .error(e, "Error during processing of request {}", ctx.request) + ctx.log.error(e, + "Error during processing of request {}", + ctx.request) ctx.complete(InternalServerError) } } diff --git a/repos/akka/akka-http/src/main/scala/akka/http/scaladsl/server/directives/ExecutionDirectives.scala b/repos/akka/akka-http/src/main/scala/akka/http/scaladsl/server/directives/ExecutionDirectives.scala index be6707161ca..4d98a5dff80 100644 --- a/repos/akka/akka-http/src/main/scala/akka/http/scaladsl/server/directives/ExecutionDirectives.scala +++ b/repos/akka/akka-http/src/main/scala/akka/http/scaladsl/server/directives/ExecutionDirectives.scala @@ -26,8 +26,8 @@ trait ExecutionDirectives { try innerRouteBuilder(())(ctx).fast.recoverWith(handleException) catch { case NonFatal(e) ⇒ - handleException - .applyOrElse[Throwable, Future[RouteResult]](e, throw _) + handleException.applyOrElse[Throwable, Future[RouteResult]](e, + throw _) } } diff --git a/repos/akka/akka-multi-node-testkit/src/main/scala/akka/remote/testconductor/Player.scala b/repos/akka/akka-multi-node-testkit/src/main/scala/akka/remote/testconductor/Player.scala index 1883e25dedc..098691f592d 100644 --- a/repos/akka/akka-multi-node-testkit/src/main/scala/akka/remote/testconductor/Player.scala +++ b/repos/akka/akka-multi-node-testkit/src/main/scala/akka/remote/testconductor/Player.scala @@ -362,8 +362,9 @@ private[akka] class PlayerHandler( log.debug("channel {} unbound", event.getChannel) override def writeComplete(ctx: ChannelHandlerContext, event: WriteCompletionEvent) = - log - .debug("channel {} written {}", event.getChannel, event.getWrittenAmount) + log.debug("channel {} written {}", + event.getChannel, + event.getWrittenAmount) override def exceptionCaught(ctx: ChannelHandlerContext, event: ExceptionEvent) = { diff --git a/repos/akka/akka-multi-node-testkit/src/main/scala/akka/remote/testkit/MultiNodeSpec.scala b/repos/akka/akka-multi-node-testkit/src/main/scala/akka/remote/testkit/MultiNodeSpec.scala index f269a4f81f0..84c92fa92d1 100644 --- a/repos/akka/akka-multi-node-testkit/src/main/scala/akka/remote/testkit/MultiNodeSpec.scala +++ b/repos/akka/akka-multi-node-testkit/src/main/scala/akka/remote/testkit/MultiNodeSpec.scala @@ -283,8 +283,8 @@ abstract class MultiNodeSpec(val myself: RoleName, implicit def awaitHelper[T](w: Awaitable[T]) = new AwaitHelper(w) class AwaitHelper[T](w: Awaitable[T]) { def await: T = - Await - .result(w, remainingOr(testConductor.Settings.QueryTimeout.duration)) + Await.result(w, + remainingOr(testConductor.Settings.QueryTimeout.duration)) } final override def multiNodeSpecBeforeAll { diff --git a/repos/akka/akka-persistence-query/src/main/scala/akka/persistence/query/journal/leveldb/EventsByPersistenceIdPublisher.scala b/repos/akka/akka-persistence-query/src/main/scala/akka/persistence/query/journal/leveldb/EventsByPersistenceIdPublisher.scala index b597d781f62..370c2fb6a48 100644 --- a/repos/akka/akka-persistence-query/src/main/scala/akka/persistence/query/journal/leveldb/EventsByPersistenceIdPublisher.scala +++ b/repos/akka/akka-persistence-query/src/main/scala/akka/persistence/query/journal/leveldb/EventsByPersistenceIdPublisher.scala @@ -162,9 +162,11 @@ private[akka] class LiveEventsByPersistenceIdPublisher( writeJournalPluginId) { import EventsByPersistenceIdPublisher._ - val tickTask = context.system.scheduler - .schedule(refreshInterval, refreshInterval, self, Continue)( - context.dispatcher) + val tickTask = + context.system.scheduler.schedule(refreshInterval, + refreshInterval, + self, + Continue)(context.dispatcher) override def postStop(): Unit = tickTask.cancel() diff --git a/repos/akka/akka-persistence-query/src/main/scala/akka/persistence/query/journal/leveldb/EventsByTagPublisher.scala b/repos/akka/akka-persistence-query/src/main/scala/akka/persistence/query/journal/leveldb/EventsByTagPublisher.scala index 52ab50960a9..6ba392ab6de 100644 --- a/repos/akka/akka-persistence-query/src/main/scala/akka/persistence/query/journal/leveldb/EventsByTagPublisher.scala +++ b/repos/akka/akka-persistence-query/src/main/scala/akka/persistence/query/journal/leveldb/EventsByTagPublisher.scala @@ -159,9 +159,11 @@ private[akka] class LiveEventsByTagPublisher(tag: String, writeJournalPluginId) { import EventsByTagPublisher._ - val tickTask = context.system.scheduler - .schedule(refreshInterval, refreshInterval, self, Continue)( - context.dispatcher) + val tickTask = + context.system.scheduler.schedule(refreshInterval, + refreshInterval, + self, + Continue)(context.dispatcher) override def postStop(): Unit = tickTask.cancel() diff --git a/repos/akka/akka-persistence/src/main/scala/akka/persistence/Persistence.scala b/repos/akka/akka-persistence/src/main/scala/akka/persistence/Persistence.scala index e3593e90826..9167bfd00ec 100644 --- a/repos/akka/akka-persistence/src/main/scala/akka/persistence/Persistence.scala +++ b/repos/akka/akka-persistence/src/main/scala/akka/persistence/Persistence.scala @@ -317,8 +317,7 @@ class Persistence(val system: ExtendedActorSystem) extends Extension { system.dynamicAccess.getClassFor[Any](pluginClassName).get val pluginDispatcherId = pluginConfig.getString("plugin-dispatcher") val pluginActorArgs = try { - Reflect - .findConstructor(pluginClass, List(pluginConfig)) // will throw if not found + Reflect.findConstructor(pluginClass, List(pluginConfig)) // will throw if not found List(pluginConfig) } catch { case NonFatal(_) ⇒ Nil } // otherwise use empty constructor val pluginActorProps = Props(Deploy(dispatcher = pluginDispatcherId), diff --git a/repos/akka/akka-persistence/src/main/scala/akka/persistence/journal/PersistencePluginProxy.scala b/repos/akka/akka-persistence/src/main/scala/akka/persistence/journal/PersistencePluginProxy.scala index dcffd43f660..00dc16cce26 100644 --- a/repos/akka/akka-persistence/src/main/scala/akka/persistence/journal/PersistencePluginProxy.scala +++ b/repos/akka/akka-persistence/src/main/scala/akka/persistence/journal/PersistencePluginProxy.scala @@ -129,8 +129,8 @@ final class PersistencePluginProxy(config: Config) } } - context.system.scheduler - .scheduleOnce(initTimeout, self, InitTimeout)(context.dispatcher) + context.system.scheduler.scheduleOnce(initTimeout, self, InitTimeout)( + context.dispatcher) } } diff --git a/repos/akka/akka-persistence/src/main/scala/akka/persistence/journal/leveldb/LeveldbStore.scala b/repos/akka/akka-persistence/src/main/scala/akka/persistence/journal/leveldb/LeveldbStore.scala index 007347ce948..c1f2d41c61a 100644 --- a/repos/akka/akka-persistence/src/main/scala/akka/persistence/journal/leveldb/LeveldbStore.scala +++ b/repos/akka/akka-persistence/src/main/scala/akka/persistence/journal/leveldb/LeveldbStore.scala @@ -159,8 +159,8 @@ private[persistence] trait LeveldbStore batch: WriteBatch): Unit = { val persistentBytes = persistentToBytes(persistent) val nid = numericId(persistent.persistenceId) - batch - .put(keyToBytes(counterKey(nid)), counterToBytes(persistent.sequenceNr)) + batch.put(keyToBytes(counterKey(nid)), + counterToBytes(persistent.sequenceNr)) batch.put(keyToBytes(Key(nid, persistent.sequenceNr, 0)), persistentBytes) tags.foreach { tag ⇒ diff --git a/repos/akka/akka-persistence/src/test/scala/akka/persistence/AtLeastOnceDeliverySpec.scala b/repos/akka/akka-persistence/src/test/scala/akka/persistence/AtLeastOnceDeliverySpec.scala index 78cbfb1d795..127830811dd 100644 --- a/repos/akka/akka-persistence/src/test/scala/akka/persistence/AtLeastOnceDeliverySpec.scala +++ b/repos/akka/akka-persistence/src/test/scala/akka/persistence/AtLeastOnceDeliverySpec.scala @@ -150,8 +150,9 @@ object AtLeastOnceDeliverySpec { case a @ Action(id, payload) ⇒ // discard duplicates (naive impl) if (!allReceived.contains(id)) { - log - .debug("Destination got {}, all count {}", a, allReceived.size + 1) + log.debug("Destination got {}, all count {}", + a, + allReceived.size + 1) testActor ! a allReceived += id } diff --git a/repos/akka/akka-persistence/src/test/scala/akka/persistence/PersistentActorBoundedStashingSpec.scala b/repos/akka/akka-persistence/src/test/scala/akka/persistence/PersistentActorBoundedStashingSpec.scala index e82a4882b38..7aa2a4f3365 100644 --- a/repos/akka/akka-persistence/src/test/scala/akka/persistence/PersistentActorBoundedStashingSpec.scala +++ b/repos/akka/akka-persistence/src/test/scala/akka/persistence/PersistentActorBoundedStashingSpec.scala @@ -51,8 +51,9 @@ object PersistentActorBoundedStashingSpec { |akka.persistence.internal-stash-overflow-strategy = "%s" |""".stripMargin - val throwConfig = String - .format(templateConfig, "akka.persistence.ThrowExceptionConfigurator") + val throwConfig = String.format( + templateConfig, + "akka.persistence.ThrowExceptionConfigurator") val discardConfig = String.format(templateConfig, "akka.persistence.DiscardConfigurator") val replyToConfig = String.format( diff --git a/repos/akka/akka-persistence/src/test/scala/akka/persistence/PersistentActorFailureSpec.scala b/repos/akka/akka-persistence/src/test/scala/akka/persistence/PersistentActorFailureSpec.scala index 6312c952f10..548105fdf83 100644 --- a/repos/akka/akka-persistence/src/test/scala/akka/persistence/PersistentActorFailureSpec.scala +++ b/repos/akka/akka-persistence/src/test/scala/akka/persistence/PersistentActorFailureSpec.scala @@ -345,8 +345,7 @@ class PersistentActorFailureSpec expectMsg(List("a-1", "a-2", "c-1", "c-2")) // Create yet another one with same persistenceId, b-1 and b-2 discarded during replay - EventFilter - .warning(start = "Invalid replayed event", occurrences = 2) intercept { + EventFilter.warning(start = "Invalid replayed event", occurrences = 2) intercept { val p3 = namedPersistentActor[Behavior1PersistentActor] p3 ! GetState expectMsg(List("a-1", "a-2", "c-1", "c-2")) diff --git a/repos/akka/akka-persistence/src/test/scala/akka/persistence/SnapshotFailureRobustnessSpec.scala b/repos/akka/akka-persistence/src/test/scala/akka/persistence/SnapshotFailureRobustnessSpec.scala index 08c0231d76c..914b2d75e49 100644 --- a/repos/akka/akka-persistence/src/test/scala/akka/persistence/SnapshotFailureRobustnessSpec.scala +++ b/repos/akka/akka-persistence/src/test/scala/akka/persistence/SnapshotFailureRobustnessSpec.scala @@ -92,8 +92,7 @@ object SnapshotFailureRobustnessSpec { override def deleteAsync( persistenceId: String, criteria: SnapshotSelectionCriteria): Future[Unit] = { - super - .deleteAsync(persistenceId, criteria) // we actually delete it properly, but act as if it failed + super.deleteAsync(persistenceId, criteria) // we actually delete it properly, but act as if it failed Future.failed( new IOException("Failed to delete snapshot for some reason!")) } diff --git a/repos/akka/akka-persistence/src/test/scala/akka/persistence/journal/ReplayFilterSpec.scala b/repos/akka/akka-persistence/src/test/scala/akka/persistence/journal/ReplayFilterSpec.scala index 7c3729b6947..cd935133c98 100644 --- a/repos/akka/akka-persistence/src/test/scala/akka/persistence/journal/ReplayFilterSpec.scala +++ b/repos/akka/akka-persistence/src/test/scala/akka/persistence/journal/ReplayFilterSpec.scala @@ -75,8 +75,7 @@ class ReplayFilterSpec extends AkkaSpec with ImplicitSender { windowSize = 100, maxOldWriters = 10, debugEnabled = false)) - EventFilter - .warning(start = "Invalid replayed event", occurrences = 1) intercept { + EventFilter.warning(start = "Invalid replayed event", occurrences = 1) intercept { filter ! m1 filter ! m2 filter ! m3 @@ -99,8 +98,7 @@ class ReplayFilterSpec extends AkkaSpec with ImplicitSender { windowSize = 100, maxOldWriters = 10, debugEnabled = false)) - EventFilter - .warning(start = "Invalid replayed event", occurrences = 2) intercept { + EventFilter.warning(start = "Invalid replayed event", occurrences = 2) intercept { filter ! m1 filter ! m2 val m3b = @@ -125,8 +123,7 @@ class ReplayFilterSpec extends AkkaSpec with ImplicitSender { windowSize = 100, maxOldWriters = 10, debugEnabled = false)) - EventFilter - .warning(start = "Invalid replayed event", occurrences = 3) intercept { + EventFilter.warning(start = "Invalid replayed event", occurrences = 3) intercept { filter ! m1 val m2b = m2.copy(persistent = m2.persistent.update(writerUuid = writerB)) @@ -162,8 +159,7 @@ class ReplayFilterSpec extends AkkaSpec with ImplicitSender { windowSize = 100, maxOldWriters = 10, debugEnabled = false)) - EventFilter - .error(start = "Invalid replayed event", occurrences = 1) intercept { + EventFilter.error(start = "Invalid replayed event", occurrences = 1) intercept { filter ! m1 filter ! m2 filter ! m3 @@ -184,8 +180,7 @@ class ReplayFilterSpec extends AkkaSpec with ImplicitSender { windowSize = 100, maxOldWriters = 10, debugEnabled = false)) - EventFilter - .error(start = "Invalid replayed event", occurrences = 1) intercept { + EventFilter.error(start = "Invalid replayed event", occurrences = 1) intercept { filter ! m1 filter ! m2 val m3b = @@ -209,8 +204,7 @@ class ReplayFilterSpec extends AkkaSpec with ImplicitSender { windowSize = 100, maxOldWriters = 10, debugEnabled = false)) - EventFilter - .warning(start = "Invalid replayed event", occurrences = 1) intercept { + EventFilter.warning(start = "Invalid replayed event", occurrences = 1) intercept { filter ! m1 filter ! m2 filter ! m3 @@ -234,8 +228,7 @@ class ReplayFilterSpec extends AkkaSpec with ImplicitSender { windowSize = 100, maxOldWriters = 10, debugEnabled = false)) - EventFilter - .warning(start = "Invalid replayed event", occurrences = 2) intercept { + EventFilter.warning(start = "Invalid replayed event", occurrences = 2) intercept { filter ! m1 filter ! m2 val m3b = @@ -261,8 +254,7 @@ class ReplayFilterSpec extends AkkaSpec with ImplicitSender { windowSize = 100, maxOldWriters = 10, debugEnabled = false)) - EventFilter - .warning(start = "Invalid replayed event", occurrences = 3) intercept { + EventFilter.warning(start = "Invalid replayed event", occurrences = 3) intercept { filter ! m1 val m2b = m2.copy(persistent = m2.persistent.update(writerUuid = writerB)) diff --git a/repos/akka/akka-remote-tests/src/multi-jvm/scala/akka/remote/PiercingShouldKeepQuarantineSpec.scala b/repos/akka/akka-remote-tests/src/multi-jvm/scala/akka/remote/PiercingShouldKeepQuarantineSpec.scala index dafaeffc06f..e702a3ffcb2 100644 --- a/repos/akka/akka-remote-tests/src/multi-jvm/scala/akka/remote/PiercingShouldKeepQuarantineSpec.scala +++ b/repos/akka/akka-remote-tests/src/multi-jvm/scala/akka/remote/PiercingShouldKeepQuarantineSpec.scala @@ -52,8 +52,8 @@ abstract class PiercingShouldKeepQuarantineSpec enterBarrier("actor-identified") // Manually Quarantine the other system - RARP(system).provider.transport - .quarantine(node(second).address, Some(uid)) + RARP(system).provider.transport.quarantine(node(second).address, + Some(uid)) // Quarantining is not immediate Thread.sleep(1000) diff --git a/repos/akka/akka-remote-tests/src/multi-jvm/scala/akka/remote/RemoteQuarantinePiercingSpec.scala b/repos/akka/akka-remote-tests/src/multi-jvm/scala/akka/remote/RemoteQuarantinePiercingSpec.scala index e690866b49f..3f467491ed3 100644 --- a/repos/akka/akka-remote-tests/src/multi-jvm/scala/akka/remote/RemoteQuarantinePiercingSpec.scala +++ b/repos/akka/akka-remote-tests/src/multi-jvm/scala/akka/remote/RemoteQuarantinePiercingSpec.scala @@ -72,8 +72,8 @@ abstract class RemoteQuarantinePiercingSpec enterBarrier("actor-identified") // Manually Quarantine the other system - RARP(system).provider.transport - .quarantine(node(second).address, Some(uidFirst)) + RARP(system).provider.transport.quarantine(node(second).address, + Some(uidFirst)) // Quarantine is up -- Cannot communicate with remote system any more system.actorSelection( diff --git a/repos/akka/akka-remote-tests/src/multi-jvm/scala/akka/remote/RemoteRestartedQuarantinedSpec.scala b/repos/akka/akka-remote-tests/src/multi-jvm/scala/akka/remote/RemoteRestartedQuarantinedSpec.scala index 65b99cc3473..8bb325b4d22 100644 --- a/repos/akka/akka-remote-tests/src/multi-jvm/scala/akka/remote/RemoteRestartedQuarantinedSpec.scala +++ b/repos/akka/akka-remote-tests/src/multi-jvm/scala/akka/remote/RemoteRestartedQuarantinedSpec.scala @@ -88,8 +88,8 @@ abstract class RemoteRestartedQuarantinedSpec val (uid, ref) = identifyWithUid(second, "subject") - RARP(system).provider.transport - .quarantine(node(second).address, Some(uid)) + RARP(system).provider.transport.quarantine(node(second).address, + Some(uid)) enterBarrier("quarantined") enterBarrier("still-quarantined") diff --git a/repos/akka/akka-remote-tests/src/test/scala/akka/remote/testconductor/BarrierSpec.scala b/repos/akka/akka-remote-tests/src/test/scala/akka/remote/testconductor/BarrierSpec.scala index a631e14af6c..599afdf1efa 100644 --- a/repos/akka/akka-remote-tests/src/test/scala/akka/remote/testconductor/BarrierSpec.scala +++ b/repos/akka/akka-remote-tests/src/test/scala/akka/remote/testconductor/BarrierSpec.scala @@ -304,8 +304,7 @@ class BarrierSpec extends AkkaSpec(BarrierSpec.config) with ImplicitSender { expectMsg(ToClient(Done)) b ! Remove(B) b ! Remove(A) - EventFilter - .warning(start = "cannot remove", occurrences = 1) intercept { + EventFilter.warning(start = "cannot remove", occurrences = 1) intercept { b ! Remove(A) } Thread.sleep(5000) diff --git a/repos/akka/akka-remote/src/main/scala/akka/remote/Endpoint.scala b/repos/akka/akka-remote/src/main/scala/akka/remote/Endpoint.scala index 093e8a3f73d..dbaeaffd960 100644 --- a/repos/akka/akka-remote/src/main/scala/akka/remote/Endpoint.scala +++ b/repos/akka/akka-remote/src/main/scala/akka/remote/Endpoint.scala @@ -684,8 +684,10 @@ private[remote] class EndpointWriter( val ackIdleTimer = { val interval = settings.SysMsgAckTimeout / 2 - context.system.scheduler - .schedule(interval, interval, self, AckIdleCheckTimer) + context.system.scheduler.schedule(interval, + interval, + self, + AckIdleCheckTimer) } override def preStart(): Unit = { @@ -726,8 +728,9 @@ private[remote] class EndpointWriter( Logging.DebugLevel) case Handle(inboundHandle) ⇒ // Assert handle == None? - context.parent ! ReliableDeliverySupervisor - .GotUid(inboundHandle.handshakeInfo.uid, remoteAddress) + context.parent ! ReliableDeliverySupervisor.GotUid( + inboundHandle.handshakeInfo.uid, + remoteAddress) handle = Some(inboundHandle) reader = startReadEndpoint(inboundHandle) eventPublisher.notifyListeners( diff --git a/repos/akka/akka-remote/src/main/scala/akka/remote/RemoteWatcher.scala b/repos/akka/akka-remote/src/main/scala/akka/remote/RemoteWatcher.scala index c29c3b615be..af159ca7573 100644 --- a/repos/akka/akka-remote/src/main/scala/akka/remote/RemoteWatcher.scala +++ b/repos/akka/akka-remote/src/main/scala/akka/remote/RemoteWatcher.scala @@ -125,8 +125,10 @@ private[akka] class RemoteWatcher( var unreachable: Set[Address] = Set.empty var addressUids: Map[Address, Int] = Map.empty - val heartbeatTask = scheduler - .schedule(heartbeatInterval, heartbeatInterval, self, HeartbeatTick) + val heartbeatTask = scheduler.schedule(heartbeatInterval, + heartbeatInterval, + self, + HeartbeatTick) val failureDetectorReaperTask = scheduler.schedule(unreachableReaperInterval, unreachableReaperInterval, self, diff --git a/repos/akka/akka-remote/src/main/scala/akka/remote/Remoting.scala b/repos/akka/akka-remote/src/main/scala/akka/remote/Remoting.scala index 2f7479383a6..cc1a7c67980 100644 --- a/repos/akka/akka-remote/src/main/scala/akka/remote/Remoting.scala +++ b/repos/akka/akka-remote/src/main/scala/akka/remote/Remoting.scala @@ -519,8 +519,11 @@ private[remote] class EndpointManager(conf: Config, log: LoggingAdapter) val pruneInterval: FiniteDuration = (settings.RetryGateClosedFor * 2).max(1.second).min(10.seconds) - val pruneTimerCancellable: Cancellable = context.system.scheduler - .schedule(pruneInterval, pruneInterval, self, Prune) + val pruneTimerCancellable: Cancellable = context.system.scheduler.schedule( + pruneInterval, + pruneInterval, + self, + Prune) var pendingReadHandoffs = Map[ActorRef, AkkaProtocolHandle]() var stashedInbound = Map[ActorRef, Vector[InboundAssociation]]() @@ -678,8 +681,8 @@ private[remote] class EndpointManager(conf: Config, log: LoggingAdapter) "address cannot be quarantined without knowing the UID, gating instead for {} ms.", address, settings.RetryGateClosedFor.toMillis) - endpoints - .markAsFailed(endpoint, Deadline.now + settings.RetryGateClosedFor) + endpoints.markAsFailed(endpoint, + Deadline.now + settings.RetryGateClosedFor) case (Some(Pass(endpoint, Some(currentUid), _)), Some(quarantineUid)) if currentUid == quarantineUid ⇒ context.stop(endpoint) diff --git a/repos/akka/akka-remote/src/main/scala/akka/remote/RemotingLifecycleEvent.scala b/repos/akka/akka-remote/src/main/scala/akka/remote/RemotingLifecycleEvent.scala index eb83d7e2b89..250302cb7a5 100644 --- a/repos/akka/akka-remote/src/main/scala/akka/remote/RemotingLifecycleEvent.scala +++ b/repos/akka/akka-remote/src/main/scala/akka/remote/RemotingLifecycleEvent.scala @@ -66,8 +66,9 @@ final case class RemotingListenEvent(listenAddresses: Set[Address]) .asJava override def logLevel: Logging.LogLevel = Logging.InfoLevel override def toString: String = - "Remoting now listens on addresses: " + listenAddresses - .mkString("[", ", ", "]") + "Remoting now listens on addresses: " + listenAddresses.mkString("[", + ", ", + "]") } @SerialVersionUID(1L) diff --git a/repos/akka/akka-remote/src/main/scala/akka/remote/transport/netty/NettyTransport.scala b/repos/akka/akka-remote/src/main/scala/akka/remote/transport/netty/NettyTransport.scala index 368bf6e7847..eed7d26921a 100644 --- a/repos/akka/akka-remote/src/main/scala/akka/remote/transport/netty/NettyTransport.scala +++ b/repos/akka/akka-remote/src/main/scala/akka/remote/transport/netty/NettyTransport.scala @@ -516,8 +516,8 @@ class NettyTransport(val settings: NettyTransportSettings, private def outboundBootstrap(remoteAddress: Address): ClientBootstrap = { val bootstrap = setupBootstrap(new ClientBootstrap(clientChannelFactory), clientPipelineFactory(remoteAddress)) - bootstrap - .setOption("connectTimeoutMillis", settings.ConnectionTimeout.toMillis) + bootstrap.setOption("connectTimeoutMillis", + settings.ConnectionTimeout.toMillis) bootstrap.setOption("tcpNoDelay", settings.TcpNodelay) bootstrap.setOption("keepAlive", settings.TcpKeepalive) settings.ReceiveBufferSize.foreach(sz ⇒ diff --git a/repos/akka/akka-remote/src/test/scala/akka/remote/ActorsLeakSpec.scala b/repos/akka/akka-remote/src/test/scala/akka/remote/ActorsLeakSpec.scala index d009ec7c598..64e2fb4cc00 100644 --- a/repos/akka/akka-remote/src/test/scala/akka/remote/ActorsLeakSpec.scala +++ b/repos/akka/akka-remote/src/test/scala/akka/remote/ActorsLeakSpec.scala @@ -40,8 +40,9 @@ object ActorsLeakSpec { val cell = wc.underlying cell.childrenRefs match { - case ChildrenContainer - .TerminatingChildrenContainer(_, toDie, reason) ⇒ + case ChildrenContainer.TerminatingChildrenContainer(_, + toDie, + reason) ⇒ Nil case x @ (ChildrenContainer.TerminatedChildrenContainer | ChildrenContainer.EmptyChildrenContainer) ⇒ diff --git a/repos/akka/akka-remote/src/test/scala/akka/remote/transport/AkkaProtocolStressTest.scala b/repos/akka/akka-remote/src/test/scala/akka/remote/transport/AkkaProtocolStressTest.scala index e39bb5aa8e1..f9033a53493 100644 --- a/repos/akka/akka-remote/src/test/scala/akka/remote/transport/AkkaProtocolStressTest.scala +++ b/repos/akka/akka-remote/src/test/scala/akka/remote/transport/AkkaProtocolStressTest.scala @@ -71,8 +71,10 @@ object AkkaProtocolStressTest { // the proper ordering. if (seq > limit * 0.5) { controller ! ((maxSeq, losses)) - context.system.scheduler - .schedule(1.second, 1.second, self, ResendFinal) + context.system.scheduler.schedule(1.second, + 1.second, + self, + ResendFinal) context.become(done) } } else { diff --git a/repos/akka/akka-remote/src/test/scala/akka/remote/transport/netty/NettyTransportSpec.scala b/repos/akka/akka-remote/src/test/scala/akka/remote/transport/netty/NettyTransportSpec.scala index 081740edf1c..5da2322eee1 100644 --- a/repos/akka/akka-remote/src/test/scala/akka/remote/transport/netty/NettyTransportSpec.scala +++ b/repos/akka/akka-remote/src/test/scala/akka/remote/transport/netty/NettyTransportSpec.scala @@ -164,15 +164,16 @@ trait BindBehaviour { this: WordSpec with Matchers ⇒ s"bind to specified $proto address" in { val address = SocketUtil.temporaryServerAddress(address = "127.0.0.1", udp = proto == "udp") - val bindAddress = try SocketUtil - .temporaryServerAddress(address = "127.0.1.1", udp = proto == "udp") - catch { - case e: java.net.BindException ⇒ - info( - s"skipping test due to [${e.getMessage}], you probably have to use `ifconfig lo0 alias 127.0.1.1`") - pending - null - } + val bindAddress = + try SocketUtil.temporaryServerAddress(address = "127.0.1.1", + udp = proto == "udp") + catch { + case e: java.net.BindException ⇒ + info( + s"skipping test due to [${e.getMessage}], you probably have to use `ifconfig lo0 alias 127.0.1.1`") + pending + null + } val bindConfig = ConfigFactory.parseString(s""" akka.remote { diff --git a/repos/akka/akka-samples/akka-sample-cluster-scala/src/main/scala/sample/cluster/simple/SimpleClusterListener.scala b/repos/akka/akka-samples/akka-sample-cluster-scala/src/main/scala/sample/cluster/simple/SimpleClusterListener.scala index 6867af8e074..b19e8c3a07f 100644 --- a/repos/akka/akka-samples/akka-sample-cluster-scala/src/main/scala/sample/cluster/simple/SimpleClusterListener.scala +++ b/repos/akka/akka-samples/akka-sample-cluster-scala/src/main/scala/sample/cluster/simple/SimpleClusterListener.scala @@ -26,8 +26,9 @@ class SimpleClusterListener extends Actor with ActorLogging { case UnreachableMember(member) => log.info("Member detected as unreachable: {}", member) case MemberRemoved(member, previousStatus) => - log - .info("Member is Removed: {} after {}", member.address, previousStatus) + log.info("Member is Removed: {} after {}", + member.address, + previousStatus) case _: MemberEvent => // ignore } } diff --git a/repos/akka/akka-samples/akka-sample-cluster-scala/src/main/scala/sample/cluster/simple/SimpleClusterListener2.scala b/repos/akka/akka-samples/akka-sample-cluster-scala/src/main/scala/sample/cluster/simple/SimpleClusterListener2.scala index 9c11ff6f084..cd252cff09d 100644 --- a/repos/akka/akka-samples/akka-sample-cluster-scala/src/main/scala/sample/cluster/simple/SimpleClusterListener2.scala +++ b/repos/akka/akka-samples/akka-sample-cluster-scala/src/main/scala/sample/cluster/simple/SimpleClusterListener2.scala @@ -25,8 +25,9 @@ class SimpleClusterListener2 extends Actor with ActorLogging { case UnreachableMember(member) => log.info("Member detected as unreachable: {}", member) case MemberRemoved(member, previousStatus) => - log - .info("Member is Removed: {} after {}", member.address, previousStatus) + log.info("Member is Removed: {} after {}", + member.address, + previousStatus) case _: MemberEvent => // ignore } } diff --git a/repos/akka/akka-samples/akka-sample-cluster-scala/src/main/scala/sample/cluster/stats/StatsSampleOneMaster.scala b/repos/akka/akka-samples/akka-sample-cluster-scala/src/main/scala/sample/cluster/stats/StatsSampleOneMaster.scala index f07b1a3433e..ac9da561257 100644 --- a/repos/akka/akka-samples/akka-sample-cluster-scala/src/main/scala/sample/cluster/stats/StatsSampleOneMaster.scala +++ b/repos/akka/akka-samples/akka-sample-cluster-scala/src/main/scala/sample/cluster/stats/StatsSampleOneMaster.scala @@ -41,12 +41,11 @@ object StatsSampleOneMaster { //#create-singleton-manager //#singleton-proxy - system.actorOf( - ClusterSingletonProxy - .props(singletonManagerPath = "/user/statsService", - settings = - ClusterSingletonProxySettings(system).withRole("compute")), - name = "statsServiceProxy") + system.actorOf(ClusterSingletonProxy.props( + singletonManagerPath = "/user/statsService", + settings = ClusterSingletonProxySettings(system) + .withRole("compute")), + name = "statsServiceProxy") //#singleton-proxy } } diff --git a/repos/akka/akka-samples/akka-sample-cluster-scala/src/main/scala/sample/cluster/stats/StatsService.scala b/repos/akka/akka-samples/akka-sample-cluster-scala/src/main/scala/sample/cluster/stats/StatsService.scala index 59263498f98..12f6d01c46d 100644 --- a/repos/akka/akka-samples/akka-sample-cluster-scala/src/main/scala/sample/cluster/stats/StatsService.scala +++ b/repos/akka/akka-samples/akka-sample-cluster-scala/src/main/scala/sample/cluster/stats/StatsService.scala @@ -13,8 +13,8 @@ class StatsService extends Actor { // This router is used both with lookup and deploy of routees. If you // have a router with only lookup of routees you can use Props.empty // instead of Props[StatsWorker.class]. - val workerRouter = context - .actorOf(FromConfig.props(Props[StatsWorker]), name = "workerRouter") + val workerRouter = context.actorOf(FromConfig.props(Props[StatsWorker]), + name = "workerRouter") def receive = { case StatsJob(text) if text != "" => diff --git a/repos/akka/akka-samples/akka-sample-distributed-data-scala/src/main/scala/sample/distributeddata/ReplicatedMetrics.scala b/repos/akka/akka-samples/akka-sample-distributed-data-scala/src/main/scala/sample/distributeddata/ReplicatedMetrics.scala index 189ee74a524..bd1044e8511 100644 --- a/repos/akka/akka-samples/akka-sample-distributed-data-scala/src/main/scala/sample/distributeddata/ReplicatedMetrics.scala +++ b/repos/akka/akka-samples/akka-sample-distributed-data-scala/src/main/scala/sample/distributeddata/ReplicatedMetrics.scala @@ -53,11 +53,15 @@ class ReplicatedMetrics(measureInterval: FiniteDuration, implicit val cluster = Cluster(context.system) val node = nodeKey(cluster.selfAddress) - val tickTask = context.system.scheduler - .schedule(measureInterval, measureInterval, self, Tick)(context.dispatcher) - val cleanupTask = context.system.scheduler - .schedule(cleanupInterval, cleanupInterval, self, Cleanup)( - context.dispatcher) + val tickTask = context.system.scheduler.schedule(measureInterval, + measureInterval, + self, + Tick)(context.dispatcher) + val cleanupTask = + context.system.scheduler.schedule(cleanupInterval, + cleanupInterval, + self, + Cleanup)(context.dispatcher) val memoryMBean: MemoryMXBean = ManagementFactory.getMemoryMXBean val UsedHeapKey = LWWMapKey[Long]("usedHeap") diff --git a/repos/akka/akka-samples/akka-sample-persistence-scala/src/main/scala/sample/persistence/SnapshotExample.scala b/repos/akka/akka-samples/akka-sample-persistence-scala/src/main/scala/sample/persistence/SnapshotExample.scala index 4d6620d3327..b7006426f85 100644 --- a/repos/akka/akka-samples/akka-sample-persistence-scala/src/main/scala/sample/persistence/SnapshotExample.scala +++ b/repos/akka/akka-samples/akka-sample-persistence-scala/src/main/scala/sample/persistence/SnapshotExample.scala @@ -35,8 +35,8 @@ object SnapshotExample extends App { } val system = ActorSystem("example") - val persistentActor = system - .actorOf(Props(classOf[ExamplePersistentActor]), "persistentActor-3-scala") + val persistentActor = system.actorOf(Props(classOf[ExamplePersistentActor]), + "persistentActor-3-scala") persistentActor ! "a" persistentActor ! "b" diff --git a/repos/akka/akka-samples/akka-sample-persistence-scala/src/main/scala/sample/persistence/ViewExample.scala b/repos/akka/akka-samples/akka-sample-persistence-scala/src/main/scala/sample/persistence/ViewExample.scala index 6fe20b8991a..5125b89f44d 100644 --- a/repos/akka/akka-samples/akka-sample-persistence-scala/src/main/scala/sample/persistence/ViewExample.scala +++ b/repos/akka/akka-samples/akka-sample-persistence-scala/src/main/scala/sample/persistence/ViewExample.scala @@ -59,7 +59,9 @@ object ViewExample extends App { import system.dispatcher - system.scheduler - .schedule(Duration.Zero, 2.seconds, persistentActor, "scheduled") + system.scheduler.schedule(Duration.Zero, + 2.seconds, + persistentActor, + "scheduled") system.scheduler.schedule(Duration.Zero, 5.seconds, view, "snap") } diff --git a/repos/akka/akka-stream-testkit/src/test/scala/akka/stream/testkit/Coroner.scala b/repos/akka/akka-stream-testkit/src/test/scala/akka/stream/testkit/Coroner.scala index 98c33400830..38bbd3d7242 100644 --- a/repos/akka/akka-stream-testkit/src/test/scala/akka/stream/testkit/Coroner.scala +++ b/repos/akka/akka-stream-testkit/src/test/scala/akka/stream/testkit/Coroner.scala @@ -46,8 +46,8 @@ object Coroner { val finishedLatch = new CountDownLatch(1) def waitForStart(): Unit = { - startedLatch - .await(startAndStopDuration.length, startAndStopDuration.unit) + startedLatch.await(startAndStopDuration.length, + startAndStopDuration.unit) } def started(): Unit = startedLatch.countDown() @@ -58,8 +58,8 @@ object Coroner { override def cancel(): Unit = { cancelPromise.trySuccess(true) - finishedLatch - .await(startAndStopDuration.length, startAndStopDuration.unit) + finishedLatch.await(startAndStopDuration.length, + startAndStopDuration.unit) } override def ready(atMost: Duration)( diff --git a/repos/akka/akka-stream-tests/src/test/scala/akka/stream/io/InputStreamSinkSpec.scala b/repos/akka/akka-stream-tests/src/test/scala/akka/stream/io/InputStreamSinkSpec.scala index a5bb4552cee..a6802a437c8 100644 --- a/repos/akka/akka-stream-tests/src/test/scala/akka/stream/io/InputStreamSinkSpec.scala +++ b/repos/akka/akka-stream-tests/src/test/scala/akka/stream/io/InputStreamSinkSpec.scala @@ -92,8 +92,8 @@ class InputStreamSinkSpec extends AkkaSpec(UnboundedMailboxConfig) { .run() val f = Future(inputStream.read(new Array[Byte](byteString.size))) - the[Exception] thrownBy Await - .result(f, timeout) shouldBe a[TimeoutException] + the[Exception] thrownBy Await.result(f, timeout) shouldBe a[ + TimeoutException] probe.sendNext(byteString) Await.result(f, timeout) should ===(byteString.size) diff --git a/repos/akka/akka-stream-tests/src/test/scala/akka/stream/io/OutputStreamSourceSpec.scala b/repos/akka/akka-stream-tests/src/test/scala/akka/stream/io/OutputStreamSourceSpec.scala index 211018461bb..8ca33d3aadf 100644 --- a/repos/akka/akka-stream-tests/src/test/scala/akka/stream/io/OutputStreamSourceSpec.scala +++ b/repos/akka/akka-stream-tests/src/test/scala/akka/stream/io/OutputStreamSourceSpec.scala @@ -37,8 +37,8 @@ class OutputStreamSourceSpec extends AkkaSpec(UnboundedMailboxConfig) { val byteString = ByteString(bytesArray) def expectTimeout[T](f: Future[T], timeout: Duration) = - the[Exception] thrownBy Await - .result(f, timeout) shouldBe a[TimeoutException] + the[Exception] thrownBy Await.result(f, timeout) shouldBe a[ + TimeoutException] def expectSuccess[T](f: Future[T], value: T) = Await.result(f, timeout) should be(value) diff --git a/repos/akka/akka-stream-tests/src/test/scala/akka/stream/io/TcpHelper.scala b/repos/akka/akka-stream-tests/src/test/scala/akka/stream/io/TcpHelper.scala index c1522087f20..ee67dff055c 100644 --- a/repos/akka/akka-stream-tests/src/test/scala/akka/stream/io/TcpHelper.scala +++ b/repos/akka/akka-stream-tests/src/test/scala/akka/stream/io/TcpHelper.scala @@ -41,8 +41,9 @@ object TcpHelper { .withDispatcher("akka.test.stream-dispatcher") class TestClient(connection: ActorRef) extends Actor { - connection ! Tcp - .Register(self, keepOpenOnPeerClosed = true, useResumeWriting = false) + connection ! Tcp.Register(self, + keepOpenOnPeerClosed = true, + useResumeWriting = false) var queuedWrites = Queue.empty[ByteString] var writePending = false diff --git a/repos/akka/akka-stream-tests/src/test/scala/akka/stream/io/TlsSpec.scala b/repos/akka/akka-stream-tests/src/test/scala/akka/stream/io/TlsSpec.scala index 82236aaba7c..be736c99141 100644 --- a/repos/akka/akka-stream-tests/src/test/scala/akka/stream/io/TlsSpec.scala +++ b/repos/akka/akka-stream-tests/src/test/scala/akka/stream/io/TlsSpec.scala @@ -32,12 +32,12 @@ object TlsSpec { val password = "changeme" val keyStore = KeyStore.getInstance(KeyStore.getDefaultType) - keyStore - .load(getClass.getResourceAsStream("/keystore"), password.toCharArray) + keyStore.load(getClass.getResourceAsStream("/keystore"), + password.toCharArray) val trustStore = KeyStore.getInstance(KeyStore.getDefaultType) - trustStore - .load(getClass.getResourceAsStream(trustPath), password.toCharArray) + trustStore.load(getClass.getResourceAsStream(trustPath), + password.toCharArray) val keyManagerFactory = KeyManagerFactory.getInstance(KeyManagerFactory.getDefaultAlgorithm) diff --git a/repos/akka/akka-stream-tests/src/test/scala/akka/stream/scaladsl/AttributesSpec.scala b/repos/akka/akka-stream-tests/src/test/scala/akka/stream/scaladsl/AttributesSpec.scala index 3406e34faef..5e9ffdec409 100644 --- a/repos/akka/akka-stream-tests/src/test/scala/akka/stream/scaladsl/AttributesSpec.scala +++ b/repos/akka/akka-stream-tests/src/test/scala/akka/stream/scaladsl/AttributesSpec.scala @@ -72,8 +72,9 @@ class AttributesSpec extends AkkaSpec { } val attributes = - Attributes.name("a") and Attributes.name("b") and Attributes - .inputBuffer(1, 2) + Attributes.name("a") and Attributes.name("b") and Attributes.inputBuffer( + 1, + 2) "give access to first attribute" in { attributes.getFirst[Name] should ===(Some(Attributes.Name("a"))) diff --git a/repos/akka/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowFilterSpec.scala b/repos/akka/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowFilterSpec.scala index 80185898cf5..f0b6f50e6c3 100644 --- a/repos/akka/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowFilterSpec.scala +++ b/repos/akka/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowFilterSpec.scala @@ -27,8 +27,9 @@ class FlowFilterSpec extends AkkaSpec with ScriptedTest { } "not blow up with high request counts" in { - val settings = ActorMaterializerSettings(system) - .withInputBuffer(initialSize = 1, maxSize = 1) + val settings = + ActorMaterializerSettings(system).withInputBuffer(initialSize = 1, + maxSize = 1) implicit val materializer = ActorMaterializer(settings) val probe = TestSubscriber.manualProbe[Int]() diff --git a/repos/akka/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowFoldSpec.scala b/repos/akka/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowFoldSpec.scala index 4a9a6236f64..3c3b824c73e 100644 --- a/repos/akka/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowFoldSpec.scala +++ b/repos/akka/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowFoldSpec.scala @@ -44,14 +44,12 @@ class FlowFoldSpec extends AkkaSpec { } "work when using Flow.fold" in assertAllStagesStopped { - Await - .result(inputSource via foldFlow runWith Sink.head, 3.seconds) should be( + Await.result(inputSource via foldFlow runWith Sink.head, 3.seconds) should be( expected) } "work when using Source.fold + Flow.fold + Sink.fold" in assertAllStagesStopped { - Await - .result(foldSource via foldFlow runWith foldSink, 3.seconds) should be( + Await.result(foldSource via foldFlow runWith foldSink, 3.seconds) should be( expected) } diff --git a/repos/akka/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowReduceSpec.scala b/repos/akka/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowReduceSpec.scala index d64bfed1627..bbddb7f3f4c 100644 --- a/repos/akka/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowReduceSpec.scala +++ b/repos/akka/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowReduceSpec.scala @@ -43,14 +43,12 @@ class FlowReduceSpec extends AkkaSpec { } "work when using Flow.reduce" in assertAllStagesStopped { - Await - .result(inputSource via reduceFlow runWith Sink.head, 3.seconds) should be( + Await.result(inputSource via reduceFlow runWith Sink.head, 3.seconds) should be( expected) } "work when using Source.reduce + Flow.reduce + Sink.reduce" in assertAllStagesStopped { - Await - .result(reduceSource via reduceFlow runWith reduceSink, 3.seconds) should be( + Await.result(reduceSource via reduceFlow runWith reduceSink, 3.seconds) should be( expected) } diff --git a/repos/akka/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowSpec.scala b/repos/akka/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowSpec.scala index 2bb9d07307c..f365765dc73 100644 --- a/repos/akka/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowSpec.scala +++ b/repos/akka/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowSpec.scala @@ -379,8 +379,7 @@ class FlowSpec val downstream2Subscription = downstream2.expectSubscription() downstreamSubscription.request(5) - upstream - .expectRequest(upstreamSubscription, 1) // because initialInputBufferSize=1 + upstream.expectRequest(upstreamSubscription, 1) // because initialInputBufferSize=1 upstreamSubscription.sendNext("firstElement") downstream.expectNext("firstElement") @@ -409,8 +408,7 @@ class FlowSpec downstreamSubscription.request(5) - upstream - .expectRequest(upstreamSubscription, 1) // because initialInputBufferSize=1 + upstream.expectRequest(upstreamSubscription, 1) // because initialInputBufferSize=1 upstreamSubscription.sendNext("element1") downstream.expectNext("element1") upstreamSubscription.expectRequest(1) @@ -476,8 +474,7 @@ class FlowSpec // d2 now has 0 outstanding // buffer should be empty so we should be requesting one new element - upstream - .expectRequest(upstreamSubscription, 1) // because of buffer size 1 + upstream.expectRequest(upstreamSubscription, 1) // because of buffer size 1 } } diff --git a/repos/akka/akka-stream-tests/src/test/scala/akka/stream/scaladsl/GraphUnzipWithSpec.scala b/repos/akka/akka-stream-tests/src/test/scala/akka/stream/scaladsl/GraphUnzipWithSpec.scala index ac082669369..043d1929464 100644 --- a/repos/akka/akka-stream-tests/src/test/scala/akka/stream/scaladsl/GraphUnzipWithSpec.scala +++ b/repos/akka/akka-stream-tests/src/test/scala/akka/stream/scaladsl/GraphUnzipWithSpec.scala @@ -152,8 +152,9 @@ class GraphUnzipWithSpec extends AkkaSpec { } "work in the sad case" in { - val settings = ActorMaterializerSettings(system) - .withInputBuffer(initialSize = 1, maxSize = 1) + val settings = + ActorMaterializerSettings(system).withInputBuffer(initialSize = 1, + maxSize = 1) val leftProbe = TestSubscriber.manualProbe[LeftOutput]() val rightProbe = TestSubscriber.manualProbe[RightOutput]() diff --git a/repos/akka/akka-stream-tests/src/test/scala/akka/stream/scaladsl/HeadSinkSpec.scala b/repos/akka/akka-stream-tests/src/test/scala/akka/stream/scaladsl/HeadSinkSpec.scala index a0330cfaae6..185a77b0482 100644 --- a/repos/akka/akka-stream-tests/src/test/scala/akka/stream/scaladsl/HeadSinkSpec.scala +++ b/repos/akka/akka-stream-tests/src/test/scala/akka/stream/scaladsl/HeadSinkSpec.scala @@ -81,8 +81,7 @@ class HeadSinkSpec extends AkkaSpec with ScriptedTest { } "yield None for empty stream" in assertAllStagesStopped { - Await - .result(Source.empty[Int].runWith(Sink.headOption), 1.second) should be( + Await.result(Source.empty[Int].runWith(Sink.headOption), 1.second) should be( None) } } diff --git a/repos/akka/akka-stream-tests/src/test/scala/akka/stream/scaladsl/LastSinkSpec.scala b/repos/akka/akka-stream-tests/src/test/scala/akka/stream/scaladsl/LastSinkSpec.scala index 9c56f0a9ec6..ae2ea7009fd 100644 --- a/repos/akka/akka-stream-tests/src/test/scala/akka/stream/scaladsl/LastSinkSpec.scala +++ b/repos/akka/akka-stream-tests/src/test/scala/akka/stream/scaladsl/LastSinkSpec.scala @@ -21,8 +21,7 @@ class LastSinkSpec extends AkkaSpec with ScriptedTest { "A Flow with Sink.last" must { "yield the last value" in assertAllStagesStopped { - Await - .result(Source(1 to 42).map(identity).runWith(Sink.last), 1.second) should be( + Await.result(Source(1 to 42).map(identity).runWith(Sink.last), 1.second) should be( 42) } @@ -54,8 +53,7 @@ class LastSinkSpec extends AkkaSpec with ScriptedTest { } "yield None for empty stream" in assertAllStagesStopped { - Await - .result(Source.empty[Int].runWith(Sink.lastOption), 1.second) should be( + Await.result(Source.empty[Int].runWith(Sink.lastOption), 1.second) should be( None) } } diff --git a/repos/akka/akka-stream/src/main/scala/akka/stream/impl/Sinks.scala b/repos/akka/akka-stream/src/main/scala/akka/stream/impl/Sinks.scala index b1913d668fb..fe985d8fd84 100644 --- a/repos/akka/akka-stream/src/main/scala/akka/stream/impl/Sinks.scala +++ b/repos/akka/akka-stream/src/main/scala/akka/stream/impl/Sinks.scala @@ -215,8 +215,9 @@ private[akka] final class ActorRefSink[In](ref: ActorRef, actorMaterializer.effectiveSettings(context.effectiveAttributes) val subscriberRef = actorMaterializer.actorOf( context, - ActorRefSinkActor - .props(ref, effectiveSettings.maxInputBufferSize, onCompleteMessage)) + ActorRefSinkActor.props(ref, + effectiveSettings.maxInputBufferSize, + onCompleteMessage)) (akka.stream.actor.ActorSubscriber[In](subscriberRef), NotUsed) } diff --git a/repos/akka/akka-stream/src/main/scala/akka/stream/impl/StreamLayout.scala b/repos/akka/akka-stream/src/main/scala/akka/stream/impl/StreamLayout.scala index a0c1a7831b8..ac73d07f441 100644 --- a/repos/akka/akka-stream/src/main/scala/akka/stream/impl/StreamLayout.scala +++ b/repos/akka/akka-stream/src/main/scala/akka/stream/impl/StreamLayout.scala @@ -1007,8 +1007,9 @@ private[stream] abstract class MaterializerSession( materializeAtomic(atomic, subEffectiveAttributes, materializedValues) case copied: CopiedModule ⇒ enterScope(copied) - materializedValues - .put(copied, materializeModule(copied, subEffectiveAttributes)) + materializedValues.put( + copied, + materializeModule(copied, subEffectiveAttributes)) exitScope(copied) case composite @ (_: CompositeModule | _: FusedModule) ⇒ materializedValues.put( diff --git a/repos/akka/akka-stream/src/main/scala/akka/stream/impl/fusing/GraphInterpreter.scala b/repos/akka/akka-stream/src/main/scala/akka/stream/impl/fusing/GraphInterpreter.scala index d751fed444b..a301942463e 100644 --- a/repos/akka/akka-stream/src/main/scala/akka/stream/impl/fusing/GraphInterpreter.scala +++ b/repos/akka/akka-stream/src/main/scala/akka/stream/impl/fusing/GraphInterpreter.scala @@ -215,8 +215,9 @@ private[akka] object GraphInterpreter { "GraphAssembly\n " + stageList.mkString("[ ", "\n ", "\n ]") + "\n " + ins.mkString("[", ",", "]") + "\n " + inOwners.mkString("[", ",", "]") + "\n " + - outs.mkString("[", ",", "]") + "\n " + outOwners - .mkString("[", ",", "]") + outs.mkString("[", ",", "]") + "\n " + outOwners.mkString("[", + ",", + "]") } } diff --git a/repos/akka/akka-stream/src/main/scala/akka/stream/impl/io/IOSinks.scala b/repos/akka/akka-stream/src/main/scala/akka/stream/impl/io/IOSinks.scala index 1c2530c49f0..bea57156571 100644 --- a/repos/akka/akka-stream/src/main/scala/akka/stream/impl/io/IOSinks.scala +++ b/repos/akka/akka-stream/src/main/scala/akka/stream/impl/io/IOSinks.scala @@ -37,8 +37,10 @@ private[akka] final class FileSink(f: File, val settings = materializer.effectiveSettings(context.effectiveAttributes) val ioResultPromise = Promise[IOResult]() - val props = FileSubscriber - .props(f, ioResultPromise, settings.maxInputBufferSize, options) + val props = FileSubscriber.props(f, + ioResultPromise, + settings.maxInputBufferSize, + options) val dispatcher = context.effectiveAttributes.get[Dispatcher](IODispatcher).dispatcher @@ -73,8 +75,10 @@ private[akka] final class OutputStreamSink(createOutput: () ⇒ OutputStream, val os = createOutput() // if it fails, we fail the materialization - val props = OutputStreamSubscriber - .props(os, ioResultPromise, settings.maxInputBufferSize, autoFlush) + val props = OutputStreamSubscriber.props(os, + ioResultPromise, + settings.maxInputBufferSize, + autoFlush) val ref = materializer.actorOf(context, props) (akka.stream.actor.ActorSubscriber[ByteString](ref), diff --git a/repos/akka/akka-stream/src/main/scala/akka/stream/impl/io/TcpStages.scala b/repos/akka/akka-stream/src/main/scala/akka/stream/impl/io/TcpStages.scala index 4c8040dbf6b..b2d6abc925b 100644 --- a/repos/akka/akka-stream/src/main/scala/akka/stream/impl/io/TcpStages.scala +++ b/repos/akka/akka-stream/src/main/scala/akka/stream/impl/io/TcpStages.scala @@ -58,8 +58,11 @@ private[stream] class ConnectionSourceStage( override def preStart(): Unit = { getStageActor(receive) - tcpManager ! Tcp - .Bind(self, endpoint, backlog, options, pullMode = true) + tcpManager ! Tcp.Bind(self, + endpoint, + backlog, + options, + pullMode = true) } private def receive(evt: (ActorRef, Any)): Unit = { diff --git a/repos/akka/akka-stream/src/main/scala/akka/stream/javadsl/Graph.scala b/repos/akka/akka-stream/src/main/scala/akka/stream/javadsl/Graph.scala index ce0955112b9..a35fc7b2b47 100644 --- a/repos/akka/akka-stream/src/main/scala/akka/stream/javadsl/Graph.scala +++ b/repos/akka/akka-stream/src/main/scala/akka/stream/javadsl/Graph.scala @@ -176,8 +176,8 @@ object Partition { */ def create[T](outputCount: Int, partitioner: function.Function[T, Int]) : Graph[UniformFanOutShape[T, T], NotUsed] = - scaladsl - .Partition(outputCount, partitioner = (t: T) ⇒ partitioner.apply(t)) + scaladsl.Partition(outputCount, + partitioner = (t: T) ⇒ partitioner.apply(t)) /** * Create a new `Partition` stage with the specified input type. diff --git a/repos/akka/akka-stream/src/main/scala/akka/stream/stage/GraphStage.scala b/repos/akka/akka-stream/src/main/scala/akka/stream/stage/GraphStage.scala index b78959f30be..851c943a41d 100644 --- a/repos/akka/akka-stream/src/main/scala/akka/stream/stage/GraphStage.scala +++ b/repos/akka/akka-stream/src/main/scala/akka/stream/stage/GraphStage.scala @@ -1270,8 +1270,10 @@ abstract class TimerGraphStageLogic(_shape: Shape) interval: FiniteDuration): Unit = { cancelTimer(timerKey) val id = timerIdGen.next() - val task = interpreter.materializer - .schedulePeriodically(initialDelay, interval, new Runnable { + val task = interpreter.materializer.schedulePeriodically( + initialDelay, + interval, + new Runnable { def run() = getTimerAsyncCallback.invoke( Scheduled(timerKey, id, repeating = true)) diff --git a/repos/akka/akka-testkit/src/test/scala/akka/testkit/AkkaSpecSpec.scala b/repos/akka/akka-testkit/src/test/scala/akka/testkit/AkkaSpecSpec.scala index 1347be77b5f..8d8edae1217 100644 --- a/repos/akka/akka-testkit/src/test/scala/akka/testkit/AkkaSpecSpec.scala +++ b/repos/akka/akka-testkit/src/test/scala/akka/testkit/AkkaSpecSpec.scala @@ -23,8 +23,7 @@ class AkkaSpecSpec extends WordSpec with Matchers { implicit val system = ActorSystem("AkkaSpec0", AkkaSpec.testConf) try { val a = system.actorOf(Props.empty) - EventFilter - .warning(start = "unhandled message", occurrences = 1) intercept { + EventFilter.warning(start = "unhandled message", occurrences = 1) intercept { a ! 42 } } finally { diff --git a/repos/akka/akka-testkit/src/test/scala/akka/testkit/Coroner.scala b/repos/akka/akka-testkit/src/test/scala/akka/testkit/Coroner.scala index 0fb8ebad055..dfe76930d63 100644 --- a/repos/akka/akka-testkit/src/test/scala/akka/testkit/Coroner.scala +++ b/repos/akka/akka-testkit/src/test/scala/akka/testkit/Coroner.scala @@ -44,8 +44,8 @@ object Coroner { val finishedLatch = new CountDownLatch(1) def waitForStart(): Unit = { - startedLatch - .await(startAndStopDuration.length, startAndStopDuration.unit) + startedLatch.await(startAndStopDuration.length, + startAndStopDuration.unit) } def started(): Unit = startedLatch.countDown() @@ -56,8 +56,8 @@ object Coroner { override def cancel(): Unit = { cancelPromise.trySuccess(true) - finishedLatch - .await(startAndStopDuration.length, startAndStopDuration.unit) + finishedLatch.await(startAndStopDuration.length, + startAndStopDuration.unit) } override def ready(atMost: Duration)( diff --git a/repos/akka/project/ActivatorDist.scala b/repos/akka/project/ActivatorDist.scala index c1b3effd6b7..acf897e57c8 100644 --- a/repos/akka/project/ActivatorDist.scala +++ b/repos/akka/project/ActivatorDist.scala @@ -45,8 +45,9 @@ object ActivatorDist { val filteredPathFinder = PathFinder(dir) descendantsExcept ("*", gitignoreFileFilter) filter (_.isFile) - filteredPathFinder pair Path - .rebase(dir, activatorDistDirectory / dir.name) map { + filteredPathFinder pair Path.rebase( + dir, + activatorDistDirectory / dir.name) map { case (source, target) => s.log.info(s"copying: $source -> $target") IO.copyFile(source, target, preserveLastModified = true) diff --git a/repos/breeze/benchmark/src/main/scala/breeze/linalg/DenseAxpyBenchmark.scala b/repos/breeze/benchmark/src/main/scala/breeze/linalg/DenseAxpyBenchmark.scala index 9f5fa3e3056..bb1bca00083 100644 --- a/repos/breeze/benchmark/src/main/scala/breeze/linalg/DenseAxpyBenchmark.scala +++ b/repos/breeze/benchmark/src/main/scala/breeze/linalg/DenseAxpyBenchmark.scala @@ -23,8 +23,12 @@ class DenseAxpyBenchmark extends BreezeBenchmark { def timeBlasSaxpy(reps: Int) = { cforRange(0 until reps) { _ => - BLAS.getInstance - .saxpy(fv.length, 0.042f, fv.data, fv.stride, fv2.data, fv2.stride) + BLAS.getInstance.saxpy(fv.length, + 0.042f, + fv.data, + fv.stride, + fv2.data, + fv2.stride) } } diff --git a/repos/breeze/benchmark/src/main/scala/breeze/linalg/DenseDotProductBenchmark.scala b/repos/breeze/benchmark/src/main/scala/breeze/linalg/DenseDotProductBenchmark.scala index d02acb83fa5..ceee355d8af 100644 --- a/repos/breeze/benchmark/src/main/scala/breeze/linalg/DenseDotProductBenchmark.scala +++ b/repos/breeze/benchmark/src/main/scala/breeze/linalg/DenseDotProductBenchmark.scala @@ -27,8 +27,11 @@ class DenseDotProductBenchmark extends BreezeBenchmark { def timeDirectBigDV(reps: Int) = { var sum = 0.0 cforRange(0 until reps) { rep => - sum += DenseVectorSupportMethods - .dotProduct_Double(dvBig.data, 0, dv2Big.data, 0, dvBig.length) + sum += DenseVectorSupportMethods.dotProduct_Double(dvBig.data, + 0, + dv2Big.data, + 0, + dvBig.length) } sum } @@ -74,8 +77,11 @@ class DenseDotProductBenchmark extends BreezeBenchmark { def timeDirectBigFV(reps: Int) = { var sum = 0.0 cforRange(0 until reps) { rep => - sum += DenseVectorSupportMethods - .dotProduct_Float(fvBig.data, 0, fv2Big.data, 0, fvBig.length) + sum += DenseVectorSupportMethods.dotProduct_Float(fvBig.data, + 0, + fv2Big.data, + 0, + fvBig.length) } sum } diff --git a/repos/breeze/math/src/main/scala/breeze/io/RandomAccessFile.scala b/repos/breeze/math/src/main/scala/breeze/io/RandomAccessFile.scala index 635c82a1f16..478634e3294 100644 --- a/repos/breeze/math/src/main/scala/breeze/io/RandomAccessFile.scala +++ b/repos/breeze/math/src/main/scala/breeze/io/RandomAccessFile.scala @@ -411,8 +411,10 @@ class RandomAccessFile(file: File, arg0: String = "r")( //the following is a hack to avoid the heavier Scala for loop var c = 0 while (c < n) { - tr(c) = converter - .bytesToInt32(ba(c * 4), ba(c * 4 + 1), ba(c * 4 + 2), ba(c * 4 + 3)) + tr(c) = converter.bytesToInt32(ba(c * 4), + ba(c * 4 + 1), + ba(c * 4 + 2), + ba(c * 4 + 3)) c += 1 } //for(c <- 0 until n) tr(c) = bytesToInt16(ba(c), ba(c + 1)) @@ -486,8 +488,10 @@ class RandomAccessFile(file: File, arg0: String = "r")( //the following is a hack to avoid the heavier Scala for loop var c = 0 while (c < n) { - tr(c) = converter - .bytesToUInt32(ba(c * 4), ba(c * 4 + 1), ba(c * 4 + 2), ba(c * 4 + 3)) + tr(c) = converter.bytesToUInt32(ba(c * 4), + ba(c * 4 + 1), + ba(c * 4 + 2), + ba(c * 4 + 3)) c += 1 } //for(c <- 0 until n) tr(c) = bytesToInt16(ba(c), ba(c + 1)) @@ -522,8 +526,14 @@ class RandomAccessFile(file: File, arg0: String = "r")( @throws(classOf[IOException]) def readInt64(): Long = { val ba = readByte(8) - converter - .bytesToInt64(ba(0), ba(1), ba(2), ba(3), ba(4), ba(5), ba(6), ba(7)) + converter.bytesToInt64(ba(0), + ba(1), + ba(2), + ba(3), + ba(4), + ba(5), + ba(6), + ba(7)) } /** Tries to read n Int64s from the current getFilePointer(). @@ -606,8 +616,14 @@ class RandomAccessFile(file: File, arg0: String = "r")( @throws(classOf[IOException]) final def readUInt64(): ULong = { val ba = readByte(8) - converter - .bytesToUInt64(ba(0), ba(1), ba(2), ba(3), ba(4), ba(5), ba(6), ba(7)) + converter.bytesToUInt64(ba(0), + ba(1), + ba(2), + ba(3), + ba(4), + ba(5), + ba(6), + ba(7)) } /** Tries to read n UInt64s from the current getFilePointer(). diff --git a/repos/breeze/math/src/main/scala/breeze/linalg/DenseMatrix.scala b/repos/breeze/math/src/main/scala/breeze/linalg/DenseMatrix.scala index ffbe3b62465..f4602d63cf5 100644 --- a/repos/breeze/math/src/main/scala/breeze/linalg/DenseMatrix.scala +++ b/repos/breeze/math/src/main/scala/breeze/linalg/DenseMatrix.scala @@ -235,9 +235,12 @@ final class DenseMatrix[@spec(Double, Int, Float, Long) V]( */ def reshape(rows: Int, cols: Int, view: View = View.Prefer): DenseMatrix[V] = { val _cols = cols //if(cols < 0) size / rows else cols - require(rows * _cols == size, - "Cannot reshape a (%d,%d) matrix to a (%d,%d) matrix!" - .format(this.rows, this.cols, rows, _cols)) + require( + rows * _cols == size, + "Cannot reshape a (%d,%d) matrix to a (%d,%d) matrix!".format(this.rows, + this.cols, + rows, + _cols)) view match { case View.Require => @@ -314,8 +317,8 @@ final class DenseMatrix[@spec(Double, Int, Float, Long) V]( if (row == 0) this(1 until rows, ::).copy else if (row == rows - 1) this(0 until rows - 1, ::).copy else - DenseMatrix - .vertcat(this(0 until row, ::), this((row + 1) until rows, ::)) + DenseMatrix.vertcat(this(0 until row, ::), + this((row + 1) until rows, ::)) } def delete(col: Int, axis: Axis._1.type): DenseMatrix[V] = { @@ -325,8 +328,8 @@ final class DenseMatrix[@spec(Double, Int, Float, Long) V]( if (col == 0) this(::, 1 until cols).copy else if (col == cols - 1) this(::, 0 until cols - 1).copy else - DenseMatrix - .horzcat(this(::, 0 until col), this(::, (col + 1) until cols)) + DenseMatrix.horzcat(this(::, 0 until col), + this(::, (col + 1) until cols)) } def delete(rows: Seq[Int], axis: Axis._0.type): DenseMatrix[V] = { diff --git a/repos/breeze/math/src/main/scala/breeze/linalg/DenseVector.scala b/repos/breeze/math/src/main/scala/breeze/linalg/DenseVector.scala index 4aae7f5c1c0..ce9e54c5d7e 100644 --- a/repos/breeze/math/src/main/scala/breeze/linalg/DenseVector.scala +++ b/repos/breeze/math/src/main/scala/breeze/linalg/DenseVector.scala @@ -731,8 +731,9 @@ object DenseVector require(a.length == b.length, s"Vectors must have same length") if (a.noOffsetOrStride && b.noOffsetOrStride && a.length < DenseVectorSupportMethods.MAX_SMALL_DOT_PRODUCT_LENGTH) { - DenseVectorSupportMethods - .smallDotProduct_Double(a.data, b.data, a.length) + DenseVectorSupportMethods.smallDotProduct_Double(a.data, + b.data, + a.length) } else { blasPath(a, b) } @@ -744,8 +745,11 @@ object DenseVector b: DenseVector[Double]): Double = { if ((a.length <= 300 || !usingNatives) && a.stride == 1 && b.stride == 1) { - DenseVectorSupportMethods - .dotProduct_Double(a.data, a.offset, b.data, b.offset, a.length) + DenseVectorSupportMethods.dotProduct_Double(a.data, + a.offset, + b.data, + b.offset, + a.length) } else { val boff = if (b.stride >= 0) b.offset diff --git a/repos/breeze/math/src/main/scala/breeze/linalg/SparseVector.scala b/repos/breeze/math/src/main/scala/breeze/linalg/SparseVector.scala index e1450c5fdf0..6ecb8d1777d 100644 --- a/repos/breeze/math/src/main/scala/breeze/linalg/SparseVector.scala +++ b/repos/breeze/math/src/main/scala/breeze/linalg/SparseVector.scala @@ -248,8 +248,11 @@ object SparseVector var off = 0 while (vec < vectors.length) { colPtrs(vec) = off - System - .arraycopy(vectors(vec).data, 0, data, off, vectors(vec).activeSize) + System.arraycopy(vectors(vec).data, + 0, + data, + off, + vectors(vec).activeSize) System.arraycopy(vectors(vec).index, 0, rowIndices, diff --git a/repos/breeze/math/src/main/scala/breeze/linalg/constructors/random.scala b/repos/breeze/math/src/main/scala/breeze/linalg/constructors/random.scala index c376c1089ed..d79dca99bcb 100644 --- a/repos/breeze/math/src/main/scala/breeze/linalg/constructors/random.scala +++ b/repos/breeze/math/src/main/scala/breeze/linalg/constructors/random.scala @@ -111,8 +111,9 @@ trait RandomGeneratorUFunc[T] extends UFunc { : Impl2[(Int, Int), (T, T), DenseMatrix[T]] = new Impl2[(Int, Int), (T, T), DenseMatrix[T]] { def apply(dimensions2: (Int, Int), range: (T, T)): DenseMatrix[T] = { - DenseMatrix - .rand(dimensions2._1, dimensions2._2, genRange(range._1, range._2)) + DenseMatrix.rand(dimensions2._1, + dimensions2._2, + genRange(range._1, range._2)) } } } diff --git a/repos/breeze/math/src/main/scala/breeze/linalg/operators/DenseVectorOps.scala b/repos/breeze/math/src/main/scala/breeze/linalg/operators/DenseVectorOps.scala index 52c1b828131..e3e9e1bb7eb 100644 --- a/repos/breeze/math/src/main/scala/breeze/linalg/operators/DenseVectorOps.scala +++ b/repos/breeze/math/src/main/scala/breeze/linalg/operators/DenseVectorOps.scala @@ -742,8 +742,9 @@ trait DenseVector_SpecialOps extends DenseVectorOps { this: DenseVector.type => require(a.length == b.length, s"Vectors must have same length") if (a.noOffsetOrStride && b.noOffsetOrStride && a.length < DenseVectorSupportMethods.MAX_SMALL_DOT_PRODUCT_LENGTH) { - DenseVectorSupportMethods - .smallDotProduct_Float(a.data, b.data, a.length) + DenseVectorSupportMethods.smallDotProduct_Float(a.data, + b.data, + a.length) } else { blasPath(a, b) } @@ -755,8 +756,11 @@ trait DenseVector_SpecialOps extends DenseVectorOps { this: DenseVector.type => b: DenseVector[Float]): Float = { if ((a.length <= 300 || !usingNatives) && a.stride == 1 && b.stride == 1) { - DenseVectorSupportMethods - .dotProduct_Float(a.data, a.offset, b.data, b.offset, a.length) + DenseVectorSupportMethods.dotProduct_Float(a.data, + a.offset, + b.data, + b.offset, + a.length) } else { val boff = if (b.stride >= 0) b.offset diff --git a/repos/breeze/math/src/main/scala/breeze/linalg/operators/SparseVectorOps.scala b/repos/breeze/math/src/main/scala/breeze/linalg/operators/SparseVectorOps.scala index baa04eecda9..e2e8e9d979f 100644 --- a/repos/breeze/math/src/main/scala/breeze/linalg/operators/SparseVectorOps.scala +++ b/repos/breeze/math/src/main/scala/breeze/linalg/operators/SparseVectorOps.scala @@ -663,8 +663,10 @@ trait SparseVectorOps { this: SparseVector.type => while (aoff < asize) { val aind: Int = a.indexAt(aoff) // the min reflects the invariant that index aind must be in the first aind active indices in b's index. - boff = util.Arrays - .binarySearch(b.index, boff, math.min(bsize, aind + 1), aind) + boff = util.Arrays.binarySearch(b.index, + boff, + math.min(bsize, aind + 1), + aind) if (boff < 0) { boff = ~boff if (boff == bsize) { @@ -673,8 +675,11 @@ trait SparseVectorOps { this: SparseVector.type => } else { // fast forward a until we get to the b we just got to val bind = b.indexAt(boff) - var newAoff = util.Arrays - .binarySearch(a.index, aoff, math.min(asize, bind + 1), bind) + var newAoff = util.Arrays.binarySearch( + a.index, + aoff, + math.min(asize, bind + 1), + bind) if (newAoff < 0) { newAoff = ~newAoff boff += 1 @@ -1098,8 +1103,10 @@ trait SparseVectorOps { this: SparseVector.type => // b moves to catch up with a, then a takes a step (possibly bringing b along) while (aoff < asize) { val aind: Int = a.indexAt(aoff) - boff = util.Arrays - .binarySearch(b.index, boff, math.min(bsize, aind + 1), aind) + boff = util.Arrays.binarySearch(b.index, + boff, + math.min(bsize, aind + 1), + aind) if (boff < 0) { boff = ~boff if (boff == bsize) { @@ -1108,8 +1115,11 @@ trait SparseVectorOps { this: SparseVector.type => } else { // fast forward a until we get to the b we just got to val bind: Int = b.indexAt(boff) - var newAoff: Int = util.Arrays - .binarySearch(a.index, aoff, math.min(asize, bind + 1), bind) + var newAoff: Int = util.Arrays.binarySearch( + a.index, + aoff, + math.min(asize, bind + 1), + bind) if (newAoff < 0) { newAoff = ~newAoff boff += 1 diff --git a/repos/breeze/math/src/main/scala/breeze/linalg/operators/VectorBuilderOps.scala b/repos/breeze/math/src/main/scala/breeze/linalg/operators/VectorBuilderOps.scala index 2c75dd6c0c0..3447b1b8ea8 100644 --- a/repos/breeze/math/src/main/scala/breeze/linalg/operators/VectorBuilderOps.scala +++ b/repos/breeze/math/src/main/scala/breeze/linalg/operators/VectorBuilderOps.scala @@ -154,8 +154,8 @@ trait VectorBuilderOps { this: VectorBuilder.type => dev: Zero[V], classTag: ClassTag[V]) : UImpl2[Op, VectorBuilder[V], Other, VectorBuilder[V]] = { - BinaryOp - .fromCopyAndUpdate[VectorBuilder[V], Other, Op](op, canCopyBuilder[V]) + BinaryOp.fromCopyAndUpdate[VectorBuilder[V], Other, Op](op, + canCopyBuilder[V]) } @expand diff --git a/repos/breeze/math/src/main/scala/breeze/numerics/financial/package.scala b/repos/breeze/math/src/main/scala/breeze/numerics/financial/package.scala index a6af5d09620..7d2604563a0 100644 --- a/repos/breeze/math/src/main/scala/breeze/numerics/financial/package.scala +++ b/repos/breeze/math/src/main/scala/breeze/numerics/financial/package.scala @@ -86,8 +86,7 @@ package object financial { val denominator = ((1.0 + rate * when.t) / rate) * (math.pow(1.0 + rate, numPeriods) - 1.0) - -1 * (futureValue + presentValue * math - .pow(1.0 + rate, numPeriods)) / denominator + -1 * (futureValue + presentValue * math.pow(1.0 + rate, numPeriods)) / denominator } } @@ -170,8 +169,8 @@ package object financial { //pading 0 to the end val fullRoots = if (0 < trailingZeros) { - DenseVector - .vertcat(complexRoots, DenseVector.zeros[Complex](trailingZeros)) + DenseVector.vertcat(complexRoots, + DenseVector.zeros[Complex](trailingZeros)) } else { complexRoots } diff --git a/repos/breeze/math/src/main/scala/breeze/optimize/ProjectedQuasiNewton.scala b/repos/breeze/math/src/main/scala/breeze/optimize/ProjectedQuasiNewton.scala index 9298405836c..0ae9238a04f 100644 --- a/repos/breeze/math/src/main/scala/breeze/optimize/ProjectedQuasiNewton.scala +++ b/repos/breeze/math/src/main/scala/breeze/optimize/ProjectedQuasiNewton.scala @@ -70,8 +70,8 @@ class CompactHessian(M: DenseMatrix[Double], // M is the 2k x 2k matrix given by: M = [ \sigma * S_k^T S_k L_k ] // [ L_k^T -D_k ] - val M = DenseMatrix - .vertcat(DenseMatrix.horzcat(STS, L), DenseMatrix.horzcat(L.t, -D)) + val M = DenseMatrix.vertcat(DenseMatrix.horzcat(STS, L), + DenseMatrix.horzcat(L.t, -D)) val newB = new CompactHessian(M, Y, S, sigma, m) newB @@ -170,8 +170,9 @@ class ProjectedQuasiNewton( x, grad, history) - val spgResult = innerOptimizer - .minimizeAndReturnState(new CachedDiffFunction(subprob), x) + val spgResult = innerOptimizer.minimizeAndReturnState( + new CachedDiffFunction(subprob), + x) logger.info( f"ProjectedQuasiNewton: outerIter ${state.iter} innerIters ${spgResult.iter}") spgResult.x - x diff --git a/repos/breeze/math/src/main/scala/breeze/optimize/TruncatedNewtonMinimizer.scala b/repos/breeze/math/src/main/scala/breeze/optimize/TruncatedNewtonMinimizer.scala index a588d3d5ac1..5ea7d6cdae9 100644 --- a/repos/breeze/math/src/main/scala/breeze/optimize/TruncatedNewtonMinimizer.scala +++ b/repos/breeze/math/src/main/scala/breeze/optimize/TruncatedNewtonMinimizer.scala @@ -162,8 +162,10 @@ class TruncatedNewtonMinimizer[T, H](maxIterations: Int = -1, norm(residual), predictedReduction, actualReduction)) - state - .copy(this_iter, delta = newDelta, stop = stop_cond, accept = false) + state.copy(this_iter, + delta = newDelta, + stop = stop_cond, + accept = false) } } } diff --git a/repos/breeze/math/src/main/scala/breeze/optimize/proximal/NonlinearMinimizer.scala b/repos/breeze/math/src/main/scala/breeze/optimize/proximal/NonlinearMinimizer.scala index e4ed124888a..b11f16eb678 100644 --- a/repos/breeze/math/src/main/scala/breeze/optimize/proximal/NonlinearMinimizer.scala +++ b/repos/breeze/math/src/main/scala/breeze/optimize/proximal/NonlinearMinimizer.scala @@ -323,8 +323,7 @@ object NonlinearMinimizer { agg + abs(entry) } val sparseQpObj = - QuadraticMinimizer - .computeObjective(regularizedGram, q, sparseQpResult.x) + + QuadraticMinimizer.computeObjective(regularizedGram, q, sparseQpResult.x) + lambdaL1 * sparseQpL1Obj val quadraticCostWithL2 = QuadraticMinimizer.Cost(regularizedGram, q) @@ -339,8 +338,7 @@ object NonlinearMinimizer { agg + abs(entry) } val nlSparseObj = - QuadraticMinimizer - .computeObjective(regularizedGram, q, nlSparseResult.x) + + QuadraticMinimizer.computeObjective(regularizedGram, q, nlSparseResult.x) + lambdaL1 * nlSparseL1Obj init := 0.0 diff --git a/repos/breeze/math/src/main/scala/breeze/signal/fourierShift.scala b/repos/breeze/math/src/main/scala/breeze/signal/fourierShift.scala index 0a279f18d48..14280ec0cca 100644 --- a/repos/breeze/math/src/main/scala/breeze/signal/fourierShift.scala +++ b/repos/breeze/math/src/main/scala/breeze/signal/fourierShift.scala @@ -23,8 +23,8 @@ object fourierShift extends UFunc { new Impl[DenseVector[T], DenseVector[T]] { def apply(dft: DenseVector[T]): DenseVector[T] = { if (isEven(dft.length)) - DenseVector - .vertcat(dft(dft.length / 2 to -1), dft(0 to dft.length / 2 - 1)) + DenseVector.vertcat(dft(dft.length / 2 to -1), + dft(0 to dft.length / 2 - 1)) else DenseVector.vertcat(dft((dft.length + 1) / 2 to -1), dft(0 to (dft.length - 1) / 2)) diff --git a/repos/breeze/math/src/main/scala/breeze/signal/iFourierShift.scala b/repos/breeze/math/src/main/scala/breeze/signal/iFourierShift.scala index 184c660e78e..0ff6b4ef03e 100644 --- a/repos/breeze/math/src/main/scala/breeze/signal/iFourierShift.scala +++ b/repos/breeze/math/src/main/scala/breeze/signal/iFourierShift.scala @@ -27,8 +27,8 @@ object iFourierShift extends UFunc { new Impl[DenseVector[T], DenseVector[T]] { def apply(dft: DenseVector[T]): DenseVector[T] = { if (isEven(dft.length)) - DenseVector - .vertcat(dft(dft.length / 2 to -1), dft(0 to dft.length / 2 - 1)) + DenseVector.vertcat(dft(dft.length / 2 to -1), + dft(0 to dft.length / 2 - 1)) else DenseVector.vertcat(dft((dft.length - 1) / 2 to -1), dft(0 to (dft.length - 1) / 2 - 1)) diff --git a/repos/breeze/math/src/main/scala/breeze/stats/regression/LeastSquares.scala b/repos/breeze/math/src/main/scala/breeze/stats/regression/LeastSquares.scala index 8194c3ce44c..9a9bae8662c 100644 --- a/repos/breeze/math/src/main/scala/breeze/stats/regression/LeastSquares.scala +++ b/repos/breeze/math/src/main/scala/breeze/stats/regression/LeastSquares.scala @@ -57,8 +57,9 @@ object leastSquares extends UFunc { def apply(data: DenseMatrix[Double], outputs: DenseVector[Double], workArray: Array[Double]): LeastSquaresRegressionResult = - leastSquaresImplementation - .doLeastSquares(data.copy, outputs.copy, workArray) + leastSquaresImplementation.doLeastSquares(data.copy, + outputs.copy, + workArray) } implicit val matrixVectorSpecifiedWork: Impl3[DenseMatrix[Double], @@ -72,8 +73,9 @@ object leastSquares extends UFunc { def apply(data: DenseMatrix[Double], outputs: DenseVector[Double], workSize: Int): LeastSquaresRegressionResult = - leastSquaresImplementation - .doLeastSquares(data.copy, outputs.copy, new Array[Double](workSize)) + leastSquaresImplementation.doLeastSquares(data.copy, + outputs.copy, + new Array[Double](workSize)) } implicit val matrixVector: Impl2[DenseMatrix[Double], @@ -117,8 +119,9 @@ object leastSquaresDestructive extends UFunc { def apply(data: DenseMatrix[Double], outputs: DenseVector[Double], workSize: Int): LeastSquaresRegressionResult = - leastSquaresImplementation - .doLeastSquares(data, outputs, new Array[Double](workSize)) + leastSquaresImplementation.doLeastSquares(data, + outputs, + new Array[Double](workSize)) } implicit val matrixVector: Impl2[DenseMatrix[Double], diff --git a/repos/breeze/math/src/main/scala/breeze/util/ArrayUtil.scala b/repos/breeze/math/src/main/scala/breeze/util/ArrayUtil.scala index b5411180cae..8f598723c5c 100644 --- a/repos/breeze/math/src/main/scala/breeze/util/ArrayUtil.scala +++ b/repos/breeze/math/src/main/scala/breeze/util/ArrayUtil.scala @@ -182,23 +182,23 @@ object ArrayUtil { case x: Array[Int] => Arrays.equals(a.asInstanceOf[Array[Int]], b.asInstanceOf[Array[Int]]) case x: Array[Float] => - Arrays - .equals(a.asInstanceOf[Array[Float]], b.asInstanceOf[Array[Float]]) + Arrays.equals(a.asInstanceOf[Array[Float]], + b.asInstanceOf[Array[Float]]) case x: Array[Boolean] => Arrays.equals(a.asInstanceOf[Array[Boolean]], b.asInstanceOf[Array[Boolean]]) case x: Array[Long] => - Arrays - .equals(a.asInstanceOf[Array[Long]], b.asInstanceOf[Array[Long]]) + Arrays.equals(a.asInstanceOf[Array[Long]], + b.asInstanceOf[Array[Long]]) case x: Array[Short] => - Arrays - .equals(a.asInstanceOf[Array[Short]], b.asInstanceOf[Array[Short]]) + Arrays.equals(a.asInstanceOf[Array[Short]], + b.asInstanceOf[Array[Short]]) case x: Array[Char] => - Arrays - .equals(a.asInstanceOf[Array[Char]], b.asInstanceOf[Array[Char]]) + Arrays.equals(a.asInstanceOf[Array[Char]], + b.asInstanceOf[Array[Char]]) case x: Array[Byte] => - Arrays - .equals(a.asInstanceOf[Array[Byte]], b.asInstanceOf[Array[Byte]]) + Arrays.equals(a.asInstanceOf[Array[Byte]], + b.asInstanceOf[Array[Byte]]) case x: Array[_] => Arrays.equals(a.asInstanceOf[Array[AnyRef]], b.asInstanceOf[Array[AnyRef]]) diff --git a/repos/cats/js/src/main/scala/cats/js/std/future.scala b/repos/cats/js/src/main/scala/cats/js/std/future.scala index 73492209c9b..d3168e8ebe0 100644 --- a/repos/cats/js/src/main/scala/cats/js/std/future.scala +++ b/repos/cats/js/src/main/scala/cats/js/std/future.scala @@ -38,8 +38,8 @@ private[std] sealed trait FutureInstances1 extends FutureInstances2 { implicit ec: E): PartialOrder[Future[A]] = new PartialOrder[Future[A]] { def partialCompare(x: Future[A], y: Future[A]): Double = - Await - .result((x zip y).map { case (x, y) => x partialCompare y }, atMost) + Await.result((x zip y).map { case (x, y) => x partialCompare y }, + atMost) } } diff --git a/repos/cats/jvm/src/main/scala/cats/jvm/std/future.scala b/repos/cats/jvm/src/main/scala/cats/jvm/std/future.scala index 9465110d8d6..0bf50ebefa7 100644 --- a/repos/cats/jvm/src/main/scala/cats/jvm/std/future.scala +++ b/repos/cats/jvm/src/main/scala/cats/jvm/std/future.scala @@ -31,8 +31,8 @@ private[std] sealed trait FutureInstances1 extends FutureInstances2 { implicit ec: E): PartialOrder[Future[A]] = new PartialOrder[Future[A]] { def partialCompare(x: Future[A], y: Future[A]): Double = - Await - .result((x zip y).map { case (x, y) => x partialCompare y }, atMost) + Await.result((x zip y).map { case (x, y) => x partialCompare y }, + atMost) } } diff --git a/repos/ensime-server/core/src/main/scala/org/ensime/core/Analyzer.scala b/repos/ensime-server/core/src/main/scala/org/ensime/core/Analyzer.scala index 5c3ace76b73..af0f94932b0 100644 --- a/repos/ensime-server/core/src/main/scala/org/ensime/core/Analyzer.scala +++ b/repos/ensime-server/core/src/main/scala/org/ensime/core/Analyzer.scala @@ -245,8 +245,9 @@ class Analyzer( case DocUriForSymbolReq(typeFullName: String, memberName: Option[String], signatureString: Option[String]) => - sender() ! scalaCompiler - .askDocSignatureForSymbol(typeFullName, memberName, signatureString) + sender() ! scalaCompiler.askDocSignatureForSymbol(typeFullName, + memberName, + signatureString) case InspectPackageByPathReq(path: String) => sender ! scalaCompiler.askPackageByPath(path).getOrElse(FalseResponse) case TypeAtPointReq(file, range: OffsetRange) => diff --git a/repos/ensime-server/core/src/main/scala/org/ensime/core/Completion.scala b/repos/ensime-server/core/src/main/scala/org/ensime/core/Completion.scala index f71decd6835..9bc2be022f6 100644 --- a/repos/ensime-server/core/src/main/scala/org/ensime/core/Completion.scala +++ b/repos/ensime-server/core/src/main/scala/org/ensime/core/Completion.scala @@ -112,8 +112,11 @@ trait CompletionControl { self: RichPresentationCompiler => val contents = Array.ofDim[Char](orig.length + 1) System.arraycopy(orig, 0, contents, 0, point) contents(point) = 'a' - System - .arraycopy(orig, point, contents, point + 1, orig.length - point) + System.arraycopy(orig, + point, + contents, + point + 1, + orig.length - point) // uses the same VirtualFile as the original val src = new BatchSourceFile(inputP.source.file, contents) diff --git a/repos/ensime-server/core/src/main/scala/org/ensime/core/JavaAnalyzer.scala b/repos/ensime-server/core/src/main/scala/org/ensime/core/JavaAnalyzer.scala index 141dcb6a5c7..24a12e20912 100644 --- a/repos/ensime-server/core/src/main/scala/org/ensime/core/JavaAnalyzer.scala +++ b/repos/ensime-server/core/src/main/scala/org/ensime/core/JavaAnalyzer.scala @@ -63,8 +63,10 @@ class JavaAnalyzer( sender() ! VoidResponse case CompletionsReq(file, point, maxResults, caseSens, _) => - sender() ! javaCompiler - .askCompletionsAtPoint(file, point, maxResults, caseSens) + sender() ! javaCompiler.askCompletionsAtPoint(file, + point, + maxResults, + caseSens) case DocUriAtPointReq(file, range) => sender() ! javaCompiler.askDocSignatureAtPoint(file, range.from) diff --git a/repos/ensime-server/core/src/main/scala/org/ensime/core/Project.scala b/repos/ensime-server/core/src/main/scala/org/ensime/core/Project.scala index 7151d9efb90..5b97ec5e757 100644 --- a/repos/ensime-server/core/src/main/scala/org/ensime/core/Project.scala +++ b/repos/ensime-server/core/src/main/scala/org/ensime/core/Project.scala @@ -107,8 +107,9 @@ class Project( log.warning( "Detected a pure Java project. Scala queries are not available.") scalac = system.deadLetters - javac = context - .actorOf(JavaAnalyzer(broadcaster, indexer, searchService), "javac") + javac = context.actorOf( + JavaAnalyzer(broadcaster, indexer, searchService), + "javac") } debugger = context.actorOf(DebugManager(broadcaster), "debugging") docs = context.actorOf(DocResolver(), "docs") diff --git a/repos/ensime-server/core/src/main/scala/org/ensime/indexer/SearchService.scala b/repos/ensime-server/core/src/main/scala/org/ensime/indexer/SearchService.scala index 03189d46eaa..a7cb3d3b561 100644 --- a/repos/ensime-server/core/src/main/scala/org/ensime/indexer/SearchService.scala +++ b/repos/ensime-server/core/src/main/scala/org/ensime/indexer/SearchService.scala @@ -278,8 +278,8 @@ class SearchService( * the list of symbols is non-empty. */ - val backlogActor = actorSystem - .actorOf(Props(new IndexingQueueActor(this)), "ClassfileIndexer") + val backlogActor = actorSystem.actorOf(Props(new IndexingQueueActor(this)), + "ClassfileIndexer") // deletion in both Lucene and H2 is really slow, batching helps def deleteInBatches( diff --git a/repos/fastparse/pythonparse/shared/src/main/scala/pythonparse/Statements.scala b/repos/fastparse/pythonparse/shared/src/main/scala/pythonparse/Statements.scala index a01958039be..9a312643420 100644 --- a/repos/fastparse/pythonparse/shared/src/main/scala/pythonparse/Statements.scala +++ b/repos/fastparse/pythonparse/shared/src/main/scala/pythonparse/Statements.scala @@ -123,8 +123,9 @@ class Statements(indent: Int) { kw("from") ~ (named | unNamed) ~ kw("import") ~ (star | "(" ~ import_as_names ~ ")" | import_as_names)).map { case (dots, module, names) => - Ast.stmt - .ImportFrom(module.map(Ast.identifier), names, dots.map(_.length)) + Ast.stmt.ImportFrom(module.map(Ast.identifier), + names, + dots.map(_.length)) } } val import_as_name: P[Ast.alias] = @@ -172,8 +173,10 @@ class Statements(indent: Int) { kw("for") ~/ exprlist ~ kw("in") ~ testlist ~ ":" ~~ suite ~~ (space_indents ~ kw("else") ~/ ":" ~~ suite).?).map { case (itervars, generator, body, orelse) => - Ast.stmt - .For(tuplize(itervars), tuplize(generator), body, orelse.toSeq.flatten) + Ast.stmt.For(tuplize(itervars), + tuplize(generator), + body, + orelse.toSeq.flatten) } val try_stmt: P[Ast.stmt] = { val `try` = P(kw("try") ~/ ":" ~~ suite) diff --git a/repos/fastparse/scalaparse/jvm/src/test/resources/scalaparse/GenJSCode.scala b/repos/fastparse/scalaparse/jvm/src/test/resources/scalaparse/GenJSCode.scala index 79558126271..bf12e70dacc 100644 --- a/repos/fastparse/scalaparse/jvm/src/test/resources/scalaparse/GenJSCode.scala +++ b/repos/fastparse/scalaparse/jvm/src/test/resources/scalaparse/GenJSCode.scala @@ -1702,8 +1702,8 @@ abstract class GenJSCode arguments: List[js.Tree])(implicit pos: Position): js.Tree = { val className = encodeClassFullName(method.owner) val methodIdent = encodeMethodSym(method) - currentMethodInfoBuilder - .addMethodCalledStatically(className, methodIdent.name) + currentMethodInfoBuilder.addMethodCalledStatically(className, + methodIdent.name) js.ApplyStatically(receiver, jstpe.ClassType(className), methodIdent, @@ -1725,8 +1725,8 @@ abstract class GenJSCode methodIdent: js.Ident, arguments: List[js.Tree], resultType: jstpe.Type)(implicit pos: Position): js.Tree = { - currentMethodInfoBuilder - .addStaticMethodCalled(implName, methodIdent.name) + currentMethodInfoBuilder.addStaticMethodCalled(implName, + methodIdent.name) js.ApplyStatic(jstpe.ClassType(implName), methodIdent, arguments)( resultType) } diff --git a/repos/finagle/finagle-core/src/main/scala/com/twitter/finagle/netty3/ChannelSnooper.scala b/repos/finagle/finagle-core/src/main/scala/com/twitter/finagle/netty3/ChannelSnooper.scala index 42c59e96d57..d552851a7bf 100644 --- a/repos/finagle/finagle-core/src/main/scala/com/twitter/finagle/netty3/ChannelSnooper.scala +++ b/repos/finagle/finagle-core/src/main/scala/com/twitter/finagle/netty3/ChannelSnooper.scala @@ -69,8 +69,9 @@ class ChannelBufferSnooper(val name: String) extends ChannelSnooper { def dump(printer: (Channel, String) => Unit, ch: Channel, buf: ChannelBuffer) { - val rawStr = buf - .toString(buf.readerIndex, buf.readableBytes, Charset.forName("UTF-8")) + val rawStr = buf.toString(buf.readerIndex, + buf.readableBytes, + Charset.forName("UTF-8")) val str = rawStr.replaceAll("\r", "\\\\r").replaceAll("\n", "\\\\n") val asciiStr = str map { c => diff --git a/repos/finagle/finagle-core/src/main/scala/com/twitter/finagle/netty3/Netty3Transporter.scala b/repos/finagle/finagle-core/src/main/scala/com/twitter/finagle/netty3/Netty3Transporter.scala index 499f0fe5907..252753a1919 100644 --- a/repos/finagle/finagle-core/src/main/scala/com/twitter/finagle/netty3/Netty3Transporter.scala +++ b/repos/finagle/finagle-core/src/main/scala/com/twitter/finagle/netty3/Netty3Transporter.scala @@ -337,8 +337,8 @@ case class Netty3Transporter[In, Out]( ): ChannelPipeline = { val pipeline = pipelineFactory.getPipeline() - pipeline - .addFirst("channelStatsHandler", channelStatsHandler(statsReceiver)) + pipeline.addFirst("channelStatsHandler", + channelStatsHandler(statsReceiver)) pipeline.addFirst("channelRequestStatsHandler", new ChannelRequestStatsHandler(statsReceiver)) @@ -402,8 +402,10 @@ case class Netty3Transporter[In, Out]( UsernamePassAuthenticationSetting(username, password) case _ => Unauthenticated } - SocksConnectHandler - .addHandler(proxyAddr, inetSockAddr, Seq(authentication), pipeline) + SocksConnectHandler.addHandler(proxyAddr, + inetSockAddr, + Seq(authentication), + pipeline) } case _ => } @@ -411,8 +413,10 @@ case class Netty3Transporter[In, Out]( (httpProxy, addr) match { case (Some(proxyAddr), inetAddr: InetSocketAddress) if !inetAddr.isUnresolved => - HttpConnectHandler - .addHandler(proxyAddr, inetAddr, pipeline, httpProxyCredentials) + HttpConnectHandler.addHandler(proxyAddr, + inetAddr, + pipeline, + httpProxyCredentials) case _ => } diff --git a/repos/finagle/finagle-core/src/main/scala/com/twitter/finagle/service/FailureAccrualFactory.scala b/repos/finagle/finagle-core/src/main/scala/com/twitter/finagle/service/FailureAccrualFactory.scala index 8d9965a1362..15fca50f32c 100644 --- a/repos/finagle/finagle-core/src/main/scala/com/twitter/finagle/service/FailureAccrualFactory.scala +++ b/repos/finagle/finagle-core/src/main/scala/com/twitter/finagle/service/FailureAccrualFactory.scala @@ -49,8 +49,8 @@ object FailureAccrualFactory { Backoff.equalJittered(5.seconds, 300.seconds) private[finagle] val defaultPolicy = () => - FailureAccrualPolicy - .consecutiveFailures(defaultConsecutiveFailures, jitteredBackoff) + FailureAccrualPolicy.consecutiveFailures(defaultConsecutiveFailures, + jitteredBackoff) /** * Add jitter in `markDeadFor` to reduce correlation. diff --git a/repos/finagle/finagle-core/src/main/scala/com/twitter/finagle/util/HashedWheelTimer.scala b/repos/finagle/finagle-core/src/main/scala/com/twitter/finagle/util/HashedWheelTimer.scala index c55fff2f290..0cff73716ad 100644 --- a/repos/finagle/finagle-core/src/main/scala/com/twitter/finagle/util/HashedWheelTimer.scala +++ b/repos/finagle/finagle-core/src/main/scala/com/twitter/finagle/util/HashedWheelTimer.scala @@ -125,8 +125,9 @@ object HashedWheelTimer { val Default: Timer = new HashedWheelTimer(nettyHwt) - TimerStats - .deviation(nettyHwt, 10.milliseconds, FinagleStatsReceiver.scope("timer")) + TimerStats.deviation(nettyHwt, + 10.milliseconds, + FinagleStatsReceiver.scope("timer")) TimerStats.hashedWheelTimerInternals(nettyHwt, () => 10.seconds, diff --git a/repos/finagle/finagle-core/src/main/scala/com/twitter/finagle/util/TimerStats.scala b/repos/finagle/finagle-core/src/main/scala/com/twitter/finagle/util/TimerStats.scala index a67395fcbf6..10d3a7eb1b9 100644 --- a/repos/finagle/finagle-core/src/main/scala/com/twitter/finagle/util/TimerStats.scala +++ b/repos/finagle/finagle-core/src/main/scala/com/twitter/finagle/util/TimerStats.scala @@ -37,8 +37,9 @@ private[finagle] object TimerStats { val deltaMillis = nowMillis - nextAtMillis nextAtMillis = nowMillis + tickDuration.inMilliseconds deviation.add(deltaMillis) - hwt - .newTimeout(this, tickDuration.inMilliseconds, TimeUnit.MILLISECONDS) + hwt.newTimeout(this, + tickDuration.inMilliseconds, + TimeUnit.MILLISECONDS) } } hwt.newTimeout(timerTask, @@ -126,7 +127,8 @@ private[finagle] object TimerStats { hwt.newTimeout(this, nextRunAt().inMilliseconds, TimeUnit.MILLISECONDS) } } - hwt - .newTimeout(timerTask, nextRunAt().inMilliseconds, TimeUnit.MILLISECONDS) + hwt.newTimeout(timerTask, + nextRunAt().inMilliseconds, + TimeUnit.MILLISECONDS) } } diff --git a/repos/finagle/finagle-core/src/test/scala/com/twitter/finagle/httpproxy/HttpConnectHandlerTest.scala b/repos/finagle/finagle-core/src/test/scala/com/twitter/finagle/httpproxy/HttpConnectHandlerTest.scala index 7ca3bfba75b..94736a839cc 100644 --- a/repos/finagle/finagle-core/src/test/scala/com/twitter/finagle/httpproxy/HttpConnectHandlerTest.scala +++ b/repos/finagle/finagle-core/src/test/scala/com/twitter/finagle/httpproxy/HttpConnectHandlerTest.scala @@ -35,8 +35,10 @@ class HttpConnectHandlerTest extends FunSuite with MockitoSugar { connectFuture, ChannelState.CONNECTED, remoteAddress) - val ch = HttpConnectHandler - .addHandler(proxyAddress, remoteAddress, pipeline, None) + val ch = HttpConnectHandler.addHandler(proxyAddress, + remoteAddress, + pipeline, + None) ch.handleDownstream(ctx, connectRequested) def checkDidClose() { diff --git a/repos/finagle/finagle-example/src/main/scala/com/twitter/finagle/example/memcache/KetamaClientStress.scala b/repos/finagle/finagle-example/src/main/scala/com/twitter/finagle/example/memcache/KetamaClientStress.scala index abe2faece63..e76769dadba 100644 --- a/repos/finagle/finagle-example/src/main/scala/com/twitter/finagle/example/memcache/KetamaClientStress.scala +++ b/repos/finagle/finagle-example/src/main/scala/com/twitter/finagle/example/memcache/KetamaClientStress.scala @@ -174,8 +174,8 @@ object KetamaClientStress extends App { val (key, value) = (randomString(config.keysize()), Buf.Utf8(randomString(config.valuesize()))) () => - ketamaClient - .add(key + load_count.getAndIncrement().toString, value) + ketamaClient.add(key + load_count.getAndIncrement().toString, + value) case "replace" => keyValueSet foreach { case (k, v) => ketamaClient.set(k, v)() } () => @@ -285,8 +285,9 @@ object KetamaClientStress extends App { Buf.Utf8(randomString(config.valuesize()))) () => { - replicationClient - .add(key + load_count.getAndIncrement().toString, value) + replicationClient.add( + key + load_count.getAndIncrement().toString, + value) } case "replace" => keyValueSet foreach { case (k, v) => replicationClient.set(k, v)() } diff --git a/repos/finagle/finagle-http/src/main/scala/com/twitter/finagle/http/Codec.scala b/repos/finagle/finagle-http/src/main/scala/com/twitter/finagle/http/Codec.scala index 278fe965046..b169bffda61 100644 --- a/repos/finagle/finagle-http/src/main/scala/com/twitter/finagle/http/Codec.scala +++ b/repos/finagle/finagle-http/src/main/scala/com/twitter/finagle/http/Codec.scala @@ -244,8 +244,8 @@ case class Http( new PayloadSizeHandler(maxRequestSizeInBytes)) // Response to ``Expect: Continue'' requests. - pipeline - .addLast("respondToExpectContinue", new RespondToExpectContinue) + pipeline.addLast("respondToExpectContinue", + new RespondToExpectContinue) if (!_streaming) pipeline.addLast("httpDechunker", new HttpChunkAggregator(maxRequestSizeInBytes)) diff --git a/repos/finagle/finagle-http/src/main/scala/com/twitter/finagle/http/Message.scala b/repos/finagle/finagle-http/src/main/scala/com/twitter/finagle/http/Message.scala index 97f343b9bc0..b5dd1eb91cb 100644 --- a/repos/finagle/finagle-http/src/main/scala/com/twitter/finagle/http/Message.scala +++ b/repos/finagle/finagle-http/src/main/scala/com/twitter/finagle/http/Message.scala @@ -477,8 +477,9 @@ object Message { val ContentTypeJavascript = MediaType.Javascript + ";" + CharsetUtf8 val ContentTypeWwwFrom = MediaType.WwwForm + ";" + CharsetUtf8 - private val HttpDateFormat = FastDateFormat - .getInstance("EEE, dd MMM yyyy HH:mm:ss", TimeZone.getTimeZone("GMT")) + private val HttpDateFormat = FastDateFormat.getInstance( + "EEE, dd MMM yyyy HH:mm:ss", + TimeZone.getTimeZone("GMT")) def httpDateFormat(date: Date): String = HttpDateFormat.format(date) + " GMT" } diff --git a/repos/finagle/finagle-http/src/main/scala/com/twitter/finagle/http/filter/Cors.scala b/repos/finagle/finagle-http/src/main/scala/com/twitter/finagle/http/filter/Cors.scala index edceb0b6160..2a5ef82c2ed 100644 --- a/repos/finagle/finagle-http/src/main/scala/com/twitter/finagle/http/filter/Cors.scala +++ b/repos/finagle/finagle-http/src/main/scala/com/twitter/finagle/http/filter/Cors.scala @@ -165,8 +165,8 @@ object Cors { */ protected[this] def setMaxAge(response: Response): Response = { policy.maxAge foreach { maxAge => - response.headers - .add("Access-Control-Max-Age", maxAge.inSeconds.toString) + response.headers.add("Access-Control-Max-Age", + maxAge.inSeconds.toString) } response } diff --git a/repos/finagle/finagle-http/src/main/scala/com/twitter/finagle/http/filter/LoggingFilter.scala b/repos/finagle/finagle-http/src/main/scala/com/twitter/finagle/http/filter/LoggingFilter.scala index 81a2e1b2201..8c67747e439 100644 --- a/repos/finagle/finagle-http/src/main/scala/com/twitter/finagle/http/filter/LoggingFilter.scala +++ b/repos/finagle/finagle-http/src/main/scala/com/twitter/finagle/http/filter/LoggingFilter.scala @@ -77,8 +77,8 @@ class CommonLogFormatter extends LogFormatter { * %D: response time in milliseconds * "%{User-Agent}i": user agent */ - val DateFormat = FastDateFormat - .getInstance("dd/MMM/yyyy:HH:mm:ss Z", TimeZone.getTimeZone("GMT")) + val DateFormat = FastDateFormat.getInstance("dd/MMM/yyyy:HH:mm:ss Z", + TimeZone.getTimeZone("GMT")) def format(request: Request, response: Response, responseTime: Duration) = { val remoteAddr = request.remoteAddress.getHostAddress diff --git a/repos/finagle/finagle-http/src/test/scala/com/twitter/finagle/http/CookieMapTest.scala b/repos/finagle/finagle-http/src/test/scala/com/twitter/finagle/http/CookieMapTest.scala index 248bc971158..8b139c8951a 100644 --- a/repos/finagle/finagle-http/src/test/scala/com/twitter/finagle/http/CookieMapTest.scala +++ b/repos/finagle/finagle-http/src/test/scala/com/twitter/finagle/http/CookieMapTest.scala @@ -44,8 +44,8 @@ class CookieMapTest extends FunSuite { test("cookie with attributes") { val request = Request() - request.headers - .set("Cookie", "name=value; Max-Age=23; Domain=.example.com; Path=/") + request.headers.set("Cookie", + "name=value; Max-Age=23; Domain=.example.com; Path=/") val cookie = request.cookies("name") assert(cookie.value == "value") diff --git a/repos/finagle/finagle-http/src/test/scala/com/twitter/finagle/http/RequestTest.scala b/repos/finagle/finagle-http/src/test/scala/com/twitter/finagle/http/RequestTest.scala index cd55abe67a0..a0ed48dfa6d 100644 --- a/repos/finagle/finagle-http/src/test/scala/com/twitter/finagle/http/RequestTest.scala +++ b/repos/finagle/finagle-http/src/test/scala/com/twitter/finagle/http/RequestTest.scala @@ -95,11 +95,9 @@ class RequestTest extends FunSuite { .queryString("/search.json", Map.empty[String, String]) == "/search.json") assert( - Request - .queryString("/search.json", "q" -> "twitter") == "/search.json?q=twitter") + Request.queryString("/search.json", "q" -> "twitter") == "/search.json?q=twitter") assert( - Request - .queryString("/search.json", Map("q" -> "twitter")) == "/search.json?q=twitter") + Request.queryString("/search.json", Map("q" -> "twitter")) == "/search.json?q=twitter") assert(Request.queryString("q" -> "twitter") == "?q=twitter") assert(Request.queryString(Map("q" -> "twitter")) == "?q=twitter") diff --git a/repos/finagle/finagle-http/src/test/scala/com/twitter/finagle/http/codec/HttpDtabTest.scala b/repos/finagle/finagle-http/src/test/scala/com/twitter/finagle/http/codec/HttpDtabTest.scala index 36220908534..530d0d23c31 100644 --- a/repos/finagle/finagle-http/src/test/scala/com/twitter/finagle/http/codec/HttpDtabTest.scala +++ b/repos/finagle/finagle-http/src/test/scala/com/twitter/finagle/http/codec/HttpDtabTest.scala @@ -32,8 +32,8 @@ class HttpDtabTest extends FunSuite with AssertionsForJUnit { test("Dtab-Local: read multiple, with commas") { val m = newMsg() - m.headers - .add("Dtab-Local", "/srv#/prod/local/role=>/$/fail;/srv=>/srv#/staging") + m.headers.add("Dtab-Local", + "/srv#/prod/local/role=>/$/fail;/srv=>/srv#/staging") m.headers.add("Dtab-Local", "/srv/local=>/srv/other,/srv=>/srv#/devel") val expected = Dtab.read( "/srv#/prod/local/role => /$/fail;" + "/srv => /srv#/staging;" + @@ -44,8 +44,8 @@ class HttpDtabTest extends FunSuite with AssertionsForJUnit { test("Dtab-Local takes precedence over X-Dtab") { val m = newMsg() - m.headers - .add("Dtab-Local", "/srv#/prod/local/role=>/$/fail;/srv=>/srv#/staging") + m.headers.add("Dtab-Local", + "/srv#/prod/local/role=>/$/fail;/srv=>/srv#/staging") // HttpDtab.write encodes X-Dtab headers HttpDtab.write(Dtab.read("/srv => /$/nil"), m) m.headers.add("Dtab-Local", "/srv/local=>/srv/other,/srv=>/srv#/devel") diff --git a/repos/finagle/finagle-http/src/test/scala/com/twitter/finagle/http/exp/MultipartTest.scala b/repos/finagle/finagle-http/src/test/scala/com/twitter/finagle/http/exp/MultipartTest.scala index c472a292bec..a53428b5f7e 100644 --- a/repos/finagle/finagle-http/src/test/scala/com/twitter/finagle/http/exp/MultipartTest.scala +++ b/repos/finagle/finagle-http/src/test/scala/com/twitter/finagle/http/exp/MultipartTest.scala @@ -75,8 +75,10 @@ class MultipartTest extends FunSuite { Buf.Utf8("." * (Multipart.MaxInMemoryFileSize.inBytes.toInt + 10)) val multipart = newRequest(foo).multipart.get - val Multipart - .OnDiskFileUpload(file, contentType, fileName, contentTransferEncoding) = + val Multipart.OnDiskFileUpload(file, + contentType, + fileName, + contentTransferEncoding) = multipart.files("groups").head val attr = multipart.attributes("type").head diff --git a/repos/finagle/finagle-memcached/src/main/scala/com/twitter/finagle/Memcached.scala b/repos/finagle/finagle-memcached/src/main/scala/com/twitter/finagle/Memcached.scala index b56300c0f7c..7d6cda08ae2 100644 --- a/repos/finagle/finagle-memcached/src/main/scala/com/twitter/finagle/Memcached.scala +++ b/repos/finagle/finagle-memcached/src/main/scala/com/twitter/finagle/Memcached.scala @@ -311,8 +311,8 @@ object Memcached val key = KetamaClientKey.fromCacheNode(node) val stk = stack.replace( FailureAccrualFactory.role, - KetamaFailureAccrualFactory - .module[Command, Response](key, healthBroker)) + KetamaFailureAccrualFactory.module[Command, Response](key, + healthBroker)) withStack(stk).newService(mkDestination(node.host, node.port), label) } diff --git a/repos/finagle/finagle-memcached/src/main/scala/com/twitter/finagle/memcached/Client.scala b/repos/finagle/finagle-memcached/src/main/scala/com/twitter/finagle/memcached/Client.scala index c93491262e5..6ce5386000b 100644 --- a/repos/finagle/finagle-memcached/src/main/scala/com/twitter/finagle/memcached/Client.scala +++ b/repos/finagle/finagle-memcached/src/main/scala/com/twitter/finagle/memcached/Client.scala @@ -913,16 +913,16 @@ private[finagle] class KetamaFailureAccrualFactory[Req, Rep]( ejectFailedHost: Boolean, label: String ) = - this( - underlying, - FailureAccrualPolicy - .consecutiveFailures(numFailures, Backoff.fromFunction(markDeadFor)), - timer, - key, - healthBroker, - ejectFailedHost, - label, - ClientStatsReceiver.scope("memcached_client")) + this(underlying, + FailureAccrualPolicy.consecutiveFailures( + numFailures, + Backoff.fromFunction(markDeadFor)), + timer, + key, + healthBroker, + ejectFailedHost, + label, + ClientStatsReceiver.scope("memcached_client")) private[this] val failureAccrualEx = Future.exception( new FailureAccrualException("Endpoint is marked dead by failureAccrual") { @@ -1141,8 +1141,11 @@ private[finagle] class KetamaPartitionedClient( expiry: Time, value: Buf, casUnique: Buf) = - ready.interruptible before super - .checkAndSet(key, flags, expiry, value, casUnique) + ready.interruptible before super.checkAndSet(key, + flags, + expiry, + value, + casUnique) override def add(key: String, flags: Int, expiry: Time, value: Buf) = ready.interruptible before super.add(key, flags, expiry, value) diff --git a/repos/finagle/finagle-memcached/src/test/scala/com/twitter/finagle/memcached/integration/ClusterClientTest.scala b/repos/finagle/finagle-memcached/src/test/scala/com/twitter/finagle/memcached/integration/ClusterClientTest.scala index a8f28e535b2..73cb4c25780 100644 --- a/repos/finagle/finagle-memcached/src/test/scala/com/twitter/finagle/memcached/integration/ClusterClientTest.scala +++ b/repos/finagle/finagle-memcached/src/test/scala/com/twitter/finagle/memcached/integration/ClusterClientTest.scala @@ -605,8 +605,9 @@ class ClusterClientTest ): Cluster[CacheNode] = { val myCachePool = if (!ignoreConfigData) - CachePoolCluster - .newZkCluster(zkPath, zookeeperClient, backupPool = backupPool) + CachePoolCluster.newZkCluster(zkPath, + zookeeperClient, + backupPool = backupPool) else CachePoolCluster.newUnmanagedZkCluster(zkPath, zookeeperClient) Await.result(myCachePool.ready, TimeOut) // wait until the pool is ready diff --git a/repos/finagle/finagle-memcached/src/test/scala/com/twitter/finagle/memcached/integration/MigrationClientTest.scala b/repos/finagle/finagle-memcached/src/test/scala/com/twitter/finagle/memcached/integration/MigrationClientTest.scala index 11ae5de36cf..629b912d4c6 100644 --- a/repos/finagle/finagle-memcached/src/test/scala/com/twitter/finagle/memcached/integration/MigrationClientTest.scala +++ b/repos/finagle/finagle-memcached/src/test/scala/com/twitter/finagle/memcached/integration/MigrationClientTest.scala @@ -125,8 +125,9 @@ class MigrationClientTest Memcached.client.newRichClient( dest = "twcache!localhost:" + zookeeperServerPort + "!" + newPoolPath) - val migrationClient = MigrationClient - .newMigrationClient("localhost:" + zookeeperServerPort, basePath) + val migrationClient = + MigrationClient.newMigrationClient("localhost:" + zookeeperServerPort, + basePath) migrationClient .loadZKData() // force loading the config to fully set-up the client @@ -158,8 +159,9 @@ class MigrationClientTest Memcached.client.newRichClient( dest = "twcache!localhost:" + zookeeperServerPort + "!" + newPoolPath) - val migrationClient = MigrationClient - .newMigrationClient("localhost:" + zookeeperServerPort, basePath) + val migrationClient = + MigrationClient.newMigrationClient("localhost:" + zookeeperServerPort, + basePath) migrationClient .loadZKData() // force loading the config to fully set-up the client @@ -198,8 +200,9 @@ class MigrationClientTest Memcached.client.newRichClient( dest = "twcache!localhost:" + zookeeperServerPort + "!" + newPoolPath) - val migrationClient = MigrationClient - .newMigrationClient("localhost:" + zookeeperServerPort, basePath) + val migrationClient = + MigrationClient.newMigrationClient("localhost:" + zookeeperServerPort, + basePath) migrationClient .loadZKData() // force loading the config to fully set-up the client @@ -240,8 +243,9 @@ class MigrationClientTest Memcached.client.newRichClient( dest = "twcache!localhost:" + zookeeperServerPort + "!" + newPoolPath) - val migrationClient = MigrationClient - .newMigrationClient("localhost:" + zookeeperServerPort, basePath) + val migrationClient = + MigrationClient.newMigrationClient("localhost:" + zookeeperServerPort, + basePath) migrationClient .loadZKData() // force loading the config to fully set-up the client @@ -277,8 +281,9 @@ class MigrationClientTest Memcached.client.newRichClient( dest = "twcache!localhost:" + zookeeperServerPort + "!" + newPoolPath) - val migrationClient = MigrationClient - .newMigrationClient("localhost:" + zookeeperServerPort, basePath) + val migrationClient = + MigrationClient.newMigrationClient("localhost:" + zookeeperServerPort, + basePath) migrationClient .loadZKData() // force loading the config to fully set-up the client diff --git a/repos/finagle/finagle-mux/src/test/scala/com/twitter/finagle/mux/ServerTest.scala b/repos/finagle/finagle-mux/src/test/scala/com/twitter/finagle/mux/ServerTest.scala index b299d7a463b..93d63d9e507 100644 --- a/repos/finagle/finagle-mux/src/test/scala/com/twitter/finagle/mux/ServerTest.scala +++ b/repos/finagle/finagle-mux/src/test/scala/com/twitter/finagle/mux/ServerTest.scala @@ -462,8 +462,11 @@ class ServerTest extends FunSuite with MockitoSugar with AssertionsForJUnit { val svc = Service.mk { req: Request => Future.value(Response.empty) } - val server = ServerDispatcher - .newRequestResponse(transport, svc, Lessor.nil, NullTracer, sr) + val server = ServerDispatcher.newRequestResponse(transport, + svc, + Lessor.nil, + NullTracer, + sr) val msg = Message.Tdispatch(tag = 10, Seq.empty, diff --git a/repos/finagle/finagle-mysql/src/main/scala/com/twitter/finagle/mysql/CanBeParameter.scala b/repos/finagle/finagle-mysql/src/main/scala/com/twitter/finagle/mysql/CanBeParameter.scala index 0013c3086cc..7a358e177e7 100644 --- a/repos/finagle/finagle-mysql/src/main/scala/com/twitter/finagle/mysql/CanBeParameter.scala +++ b/repos/finagle/finagle-mysql/src/main/scala/com/twitter/finagle/mysql/CanBeParameter.scala @@ -175,8 +175,9 @@ object CanBeParameter { def sizeOf(param: java.util.Date) = 12 def typeCode(param: java.util.Date) = Type.DateTime def write(writer: BufferWriter, param: java.util.Date) = { - valueCanBeParameter - .write(writer, TimestampValue(new java.sql.Timestamp(param.getTime))) + valueCanBeParameter.write( + writer, + TimestampValue(new java.sql.Timestamp(param.getTime))) } } } diff --git a/repos/finagle/finagle-mysql/src/test/scala/com/twitter/finagle/mysql/integration/IntegrationClient.scala b/repos/finagle/finagle-mysql/src/test/scala/com/twitter/finagle/mysql/integration/IntegrationClient.scala index 777a3b9576a..94d481063c4 100644 --- a/repos/finagle/finagle-mysql/src/test/scala/com/twitter/finagle/mysql/integration/IntegrationClient.scala +++ b/repos/finagle/finagle-mysql/src/test/scala/com/twitter/finagle/mysql/integration/IntegrationClient.scala @@ -46,8 +46,8 @@ trait IntegrationClient { val client: Option[Client] = if (isAvailable) { - logger - .log(Level.INFO, "Attempting to connect to mysqld @ localhost:3306") + logger.log(Level.INFO, + "Attempting to connect to mysqld @ localhost:3306") val username = p.getProperty("username", "") val password = p.getProperty("password", null) val db = p.getProperty("db", "test") diff --git a/repos/finagle/finagle-netty4/src/main/scala/com/twitter/finagle/netty4/Netty4Listener.scala b/repos/finagle/finagle-netty4/src/main/scala/com/twitter/finagle/netty4/Netty4Listener.scala index 1f8f38d0167..856fbbe134d 100644 --- a/repos/finagle/finagle-netty4/src/main/scala/com/twitter/finagle/netty4/Netty4Listener.scala +++ b/repos/finagle/finagle-netty4/src/main/scala/com/twitter/finagle/netty4/Netty4Listener.scala @@ -99,10 +99,10 @@ private[finagle] case class Netty4Listener[In, Out]( bootstrap.childOption[JBool](ChannelOption.TCP_NODELAY, noDelay) //todo: investigate pooled allocator CSL-2089 - bootstrap - .option(ChannelOption.ALLOCATOR, UnpooledByteBufAllocator.DEFAULT) - bootstrap - .childOption(ChannelOption.ALLOCATOR, UnpooledByteBufAllocator.DEFAULT) + bootstrap.option(ChannelOption.ALLOCATOR, + UnpooledByteBufAllocator.DEFAULT) + bootstrap.childOption(ChannelOption.ALLOCATOR, + UnpooledByteBufAllocator.DEFAULT) bootstrap.option[JBool](ChannelOption.SO_REUSEADDR, reuseAddr) bootstrap.option[JInt](ChannelOption.SO_LINGER, 0) backlog.foreach(bootstrap.option[JInt](ChannelOption.SO_BACKLOG, _)) diff --git a/repos/finagle/finagle-netty4/src/main/scala/com/twitter/finagle/netty4/channel/ChannelSnooper.scala b/repos/finagle/finagle-netty4/src/main/scala/com/twitter/finagle/netty4/channel/ChannelSnooper.scala index cd976120926..f0a169b3e5b 100644 --- a/repos/finagle/finagle-netty4/src/main/scala/com/twitter/finagle/netty4/channel/ChannelSnooper.scala +++ b/repos/finagle/finagle-netty4/src/main/scala/com/twitter/finagle/netty4/channel/ChannelSnooper.scala @@ -75,8 +75,9 @@ private[netty4] class ByteBufSnooper(val name: String) extends ChannelSnooper { def dump(printer: (Channel, String) => Unit, ch: Channel, buf: ByteBuf): Unit = { - val rawStr = buf - .toString(buf.readerIndex, buf.readableBytes, Charset.forName("UTF-8")) + val rawStr = buf.toString(buf.readerIndex, + buf.readableBytes, + Charset.forName("UTF-8")) val str = rawStr.replaceAll("\r", "\\\\r").replaceAll("\n", "\\\\n") val asciiStr = str.map { c => if (c >= 32 && c < 128) c else '?' diff --git a/repos/finagle/finagle-redis/src/main/scala/com/twitter/finagle/redis/protocol/Reply.scala b/repos/finagle/finagle-redis/src/main/scala/com/twitter/finagle/redis/protocol/Reply.scala index 8cbf7538a20..5da8af59b6d 100644 --- a/repos/finagle/finagle-redis/src/main/scala/com/twitter/finagle/redis/protocol/Reply.scala +++ b/repos/finagle/finagle-redis/src/main/scala/com/twitter/finagle/redis/protocol/Reply.scala @@ -43,8 +43,8 @@ case class BulkReply(message: ChannelBuffer) extends MultiLineReply { case class EmptyBulkReply() extends MultiLineReply { val message = "$-1" override def toChannelBuffer = - ChannelBuffers - .wrappedBuffer(RedisCodec.NIL_BULK_REPLY_BA, RedisCodec.EOL_DELIMITER_BA) + ChannelBuffers.wrappedBuffer(RedisCodec.NIL_BULK_REPLY_BA, + RedisCodec.EOL_DELIMITER_BA) } case class MBulkReply(messages: List[Reply]) extends MultiLineReply { diff --git a/repos/finagle/finagle-serversets/src/main/scala/com/twitter/finagle/serverset2/ZkSession.scala b/repos/finagle/finagle-serversets/src/main/scala/com/twitter/finagle/serverset2/ZkSession.scala index 44582372f5e..aacffa5fda9 100644 --- a/repos/finagle/finagle-serversets/src/main/scala/com/twitter/finagle/serverset2/ZkSession.scala +++ b/repos/finagle/finagle-serversets/src/main/scala/com/twitter/finagle/serverset2/ZkSession.scala @@ -334,8 +334,9 @@ private[serverset2] object ZkSession { // Upon initial connection, send auth info, then update `u`. zkSession.state.changes.filter { _ == WatchState.SessionState(SessionState.SyncConnected) - }.toFuture.unit before zkSession - .addAuthInfo("digest", Buf.Utf8(authInfo)) onSuccess { _ => + }.toFuture.unit before zkSession.addAuthInfo( + "digest", + Buf.Utf8(authInfo)) onSuccess { _ => logger.info( s"New ZKSession is connected. Session ID: ${zkSession.sessionIdAsHex}") v() = zkSession diff --git a/repos/finagle/finagle-serversets/src/main/scala/com/twitter/finagle/serverset2/client/apache/ApacheWatcher.scala b/repos/finagle/finagle-serversets/src/main/scala/com/twitter/finagle/serverset2/client/apache/ApacheWatcher.scala index c007a8094a2..416dd06c402 100644 --- a/repos/finagle/finagle-serversets/src/main/scala/com/twitter/finagle/serverset2/client/apache/ApacheWatcher.scala +++ b/repos/finagle/finagle-serversets/src/main/scala/com/twitter/finagle/serverset2/client/apache/ApacheWatcher.scala @@ -22,8 +22,9 @@ private[serverset2] class ApacheWatcher( state, WatchState.SessionState(ApacheSessionState(event.getState))) case e => - EventDeliveryThread - .offer(state, WatchState.Determined(EventFilter(ApacheNodeEvent(e)))) + EventDeliveryThread.offer( + state, + WatchState.Determined(EventFilter(ApacheNodeEvent(e)))) } } } diff --git a/repos/finagle/finagle-serversets/src/test/scala/com/twitter/finagle/serverset2/Zk2ResolverTest.scala b/repos/finagle/finagle-serversets/src/test/scala/com/twitter/finagle/serverset2/Zk2ResolverTest.scala index b8a7462b44a..7749541414c 100644 --- a/repos/finagle/finagle-serversets/src/test/scala/com/twitter/finagle/serverset2/Zk2ResolverTest.scala +++ b/repos/finagle/finagle-serversets/src/test/scala/com/twitter/finagle/serverset2/Zk2ResolverTest.scala @@ -72,8 +72,9 @@ class Zk2ResolverTest val serverSet = new ServerSetImpl(inst.zookeeperClient, "/foo/bar") val joinAddr = RandomSocket() - val status = serverSet - .join(joinAddr, Map.empty[String, InetSocketAddress].asJava, shardId) + val status = serverSet.join(joinAddr, + Map.empty[String, InetSocketAddress].asJava, + shardId) eventually { assert(va.sample() == Addr.Bound(address(joinAddr)), "resolution is not bound once the serverset exists") diff --git a/repos/finagle/finagle-serversets/src/test/scala/com/twitter/finagle/serverset2/client/apache/ApacheWatcherTest.scala b/repos/finagle/finagle-serversets/src/test/scala/com/twitter/finagle/serverset2/client/apache/ApacheWatcherTest.scala index 74ca860160f..a7399958b26 100644 --- a/repos/finagle/finagle-serversets/src/test/scala/com/twitter/finagle/serverset2/client/apache/ApacheWatcherTest.scala +++ b/repos/finagle/finagle-serversets/src/test/scala/com/twitter/finagle/serverset2/client/apache/ApacheWatcherTest.scala @@ -65,8 +65,10 @@ class ApacheWatcherTest extends FlatSpec with OneInstancePerTest { } "StatsWatcher" should "count session events" in { - val statsWatcher = SessionStats - .watcher(watcher.state, statsReceiver, 5.seconds, DefaultTimer.twitter) + val statsWatcher = SessionStats.watcher(watcher.state, + statsReceiver, + 5.seconds, + DefaultTimer.twitter) // Set a constant witness so the Var doesn't reset state statsWatcher.changes.respond(_ => ()) for (ks <- KeeperState.values) { diff --git a/repos/finagle/finagle-serversets/src/test/scala/com/twitter/finagle/serverset2/client/apache/ApacheZooKeeperTest.scala b/repos/finagle/finagle-serversets/src/test/scala/com/twitter/finagle/serverset2/client/apache/ApacheZooKeeperTest.scala index 41327f4fc2f..48d5988e2f7 100644 --- a/repos/finagle/finagle-serversets/src/test/scala/com/twitter/finagle/serverset2/client/apache/ApacheZooKeeperTest.scala +++ b/repos/finagle/finagle-serversets/src/test/scala/com/twitter/finagle/serverset2/client/apache/ApacheZooKeeperTest.scala @@ -295,8 +295,10 @@ class ApacheZooKeeperTest "existsWatch" should "submit properly constructed exists" in { val existed = zk.existsWatch(path) - verify(mockZK) - .exists(meq(path), watcher.capture, statCB.capture, meq(null)) + verify(mockZK).exists(meq(path), + watcher.capture, + statCB.capture, + meq(null)) statCB.getValue.processResult(apacheOk, path, null, apacheStat) assert(Await.result(existed).value == Some(stat)) @@ -306,8 +308,10 @@ class ApacheZooKeeperTest "existsWatch" should "handle missing node" in { val existed = zk.existsWatch(path) - verify(mockZK) - .exists(meq(path), watcher.capture, statCB.capture, meq(null)) + verify(mockZK).exists(meq(path), + watcher.capture, + statCB.capture, + meq(null)) statCB.getValue.processResult(apacheNoNode, path, null, apacheStat) assert(Await.result(existed).value == None) @@ -317,8 +321,10 @@ class ApacheZooKeeperTest "existsWatch" should "handle ZK error" in { val existed = zk.existsWatch(path) - verify(mockZK) - .exists(meq(path), watcher.capture, statCB.capture, meq(null)) + verify(mockZK).exists(meq(path), + watcher.capture, + statCB.capture, + meq(null)) statCB.getValue.processResult(apacheConnLoss, path, null, null) intercept[KeeperException.ConnectionLoss] { @@ -332,8 +338,10 @@ class ApacheZooKeeperTest .thenThrow(new IllegalArgumentException) val existed = zk.existsWatch(path) - verify(mockZK) - .exists(meq(path), watcher.capture, statCB.capture, meq(null)) + verify(mockZK).exists(meq(path), + watcher.capture, + statCB.capture, + meq(null)) intercept[IllegalArgumentException] { Await.result(existed) @@ -389,8 +397,10 @@ class ApacheZooKeeperTest "getDataWatch" should "submit properly constructed getData" in { val nodeDataWatch = zk.getDataWatch(path) - verify(mockZK) - .getData(meq(path), watcher.capture, dataCB.capture, meq(null)) + verify(mockZK).getData(meq(path), + watcher.capture, + dataCB.capture, + meq(null)) dataCB.getValue.processResult(apacheOk, path, null, _data, apacheStat) assert(Await.result(nodeDataWatch).value == Node.Data(Some(data), stat)) @@ -400,8 +410,10 @@ class ApacheZooKeeperTest "getDataWatch" should "handle empty znodes" in { val nodeDataWatch = zk.getDataWatch(path) - verify(mockZK) - .getData(meq(path), watcher.capture, dataCB.capture, meq(null)) + verify(mockZK).getData(meq(path), + watcher.capture, + dataCB.capture, + meq(null)) dataCB.getValue.processResult(apacheOk, path, null, null, apacheStat) assert(Await.result(nodeDataWatch).value == Node.Data(None, stat)) @@ -411,8 +423,10 @@ class ApacheZooKeeperTest "getDataWatch" should "handle ZK error" in { val nodeDataWatch = zk.getDataWatch(path) - verify(mockZK) - .getData(meq(path), watcher.capture, dataCB.capture, meq(null)) + verify(mockZK).getData(meq(path), + watcher.capture, + dataCB.capture, + meq(null)) dataCB.getValue.processResult(apacheConnLoss, path, null, null, null) intercept[KeeperException.ConnectionLoss] { @@ -426,8 +440,10 @@ class ApacheZooKeeperTest .thenThrow(new IllegalArgumentException) val nodeDataWatch = zk.getDataWatch(path) - verify(mockZK) - .getData(meq(path), watcher.capture, dataCB.capture, meq(null)) + verify(mockZK).getData(meq(path), + watcher.capture, + dataCB.capture, + meq(null)) intercept[IllegalArgumentException] { Await.result(nodeDataWatch) @@ -438,8 +454,11 @@ class ApacheZooKeeperTest "setData" should "submit properly constructed versioned setData" in { val nodeStat = zk.setData(path, Some(data), Some(version)) - verify(mockZK) - .setData(meq(path), meq(_data), meq(version), statCB.capture, meq(null)) + verify(mockZK).setData(meq(path), + meq(_data), + meq(version), + statCB.capture, + meq(null)) statCB.getValue.processResult(apacheOk, path, null, apacheStat) assert(Await.result(nodeStat) == stat) @@ -449,8 +468,11 @@ class ApacheZooKeeperTest "setData" should "submit properly constructed unversioned setData" in { val nodeStat = zk.setData(path, Some(data), None) - verify(mockZK) - .setData(meq(path), meq(_data), meq(-1), statCB.capture, meq(null)) + verify(mockZK).setData(meq(path), + meq(_data), + meq(-1), + statCB.capture, + meq(null)) statCB.getValue.processResult(apacheOk, path, null, apacheStat) assert(Await.result(nodeStat) == stat) @@ -460,8 +482,11 @@ class ApacheZooKeeperTest "setData" should "submit properly constructed unversioned empty znode setData" in { val nodeStat = zk.setData(path, None, None) - verify(mockZK) - .setData(meq(path), meq(null), meq(-1), statCB.capture, meq(null)) + verify(mockZK).setData(meq(path), + meq(null), + meq(-1), + statCB.capture, + meq(null)) statCB.getValue.processResult(apacheOk, path, null, apacheStat) assert(Await.result(nodeStat) == stat) @@ -471,8 +496,11 @@ class ApacheZooKeeperTest "setData" should "handle ZK error" in { val nodeStat = zk.setData(path, Some(data), Some(version)) - verify(mockZK) - .setData(meq(path), meq(_data), meq(version), statCB.capture, meq(null)) + verify(mockZK).setData(meq(path), + meq(_data), + meq(version), + statCB.capture, + meq(null)) statCB.getValue.processResult(apacheConnLoss, path, null, null) intercept[KeeperException.ConnectionLoss] { @@ -487,8 +515,11 @@ class ApacheZooKeeperTest .thenThrow(new IllegalArgumentException) val nodeStat = zk.setData(path, Some(data), Some(version)) - verify(mockZK) - .setData(meq(path), meq(_data), meq(version), statCB.capture, meq(null)) + verify(mockZK).setData(meq(path), + meq(_data), + meq(version), + statCB.capture, + meq(null)) intercept[IllegalArgumentException] { Await.result(nodeStat) @@ -548,8 +579,11 @@ class ApacheZooKeeperTest "setACL" should "submit properly constructed unversioned setACL" in { val nodeStat = zk.setACL(path, acls, None) - verify(mockZK) - .setACL(meq(path), meq(apacheACLS), meq(-1), statCB.capture, meq(null)) + verify(mockZK).setACL(meq(path), + meq(apacheACLS), + meq(-1), + statCB.capture, + meq(null)) statCB.getValue.processResult(apacheOk, path, null, apacheStat) assert(Await.result(nodeStat) == stat) @@ -596,11 +630,16 @@ class ApacheZooKeeperTest "getChildren" should "submit properly constructed getChildren" in { val nodeChildren = zk.getChildren(path) - verify(mockZK) - .getChildren(meq(path), meq(null), childrenCB.capture, meq(null)) + verify(mockZK).getChildren(meq(path), + meq(null), + childrenCB.capture, + meq(null)) - childrenCB.getValue - .processResult(apacheOk, path, null, apacheChildren, apacheStat) + childrenCB.getValue.processResult(apacheOk, + path, + null, + apacheChildren, + apacheStat) assert(Await.result(nodeChildren) == children) assert(statsReceiver.counter("read_successes")() == 1) } @@ -608,11 +647,16 @@ class ApacheZooKeeperTest "getChildren" should "handle ZK error" in { val nodeChildren = zk.getChildren(path) - verify(mockZK) - .getChildren(meq(path), meq(null), childrenCB.capture, meq(null)) + verify(mockZK).getChildren(meq(path), + meq(null), + childrenCB.capture, + meq(null)) - childrenCB.getValue - .processResult(apacheConnLoss, path, null, apacheChildren, apacheStat) + childrenCB.getValue.processResult(apacheConnLoss, + path, + null, + apacheChildren, + apacheStat) intercept[KeeperException.ConnectionLoss] { Await.result(nodeChildren) } @@ -625,8 +669,10 @@ class ApacheZooKeeperTest .thenThrow(new IllegalArgumentException) val nodeChildren = zk.getChildren(path) - verify(mockZK) - .getChildren(meq(path), meq(null), childrenCB.capture, meq(null)) + verify(mockZK).getChildren(meq(path), + meq(null), + childrenCB.capture, + meq(null)) intercept[IllegalArgumentException] { Await.result(nodeChildren) @@ -637,11 +683,16 @@ class ApacheZooKeeperTest "getChildrenWatch" should "submit properly constructed getChildren" in { val nodeChildren = zk.getChildrenWatch(path) - verify(mockZK) - .getChildren(meq(path), watcher.capture, childrenCB.capture, meq(null)) + verify(mockZK).getChildren(meq(path), + watcher.capture, + childrenCB.capture, + meq(null)) - childrenCB.getValue - .processResult(apacheOk, path, null, apacheChildren, apacheStat) + childrenCB.getValue.processResult(apacheOk, + path, + null, + apacheChildren, + apacheStat) assert(Await.result(nodeChildren).value == children) assert(statsReceiver.counter("watch_successes")() == 1) } @@ -649,11 +700,16 @@ class ApacheZooKeeperTest "getChildrenWatch" should "handle ZK error" in { val nodeChildren = zk.getChildrenWatch(path) - verify(mockZK) - .getChildren(meq(path), watcher.capture, childrenCB.capture, meq(null)) + verify(mockZK).getChildren(meq(path), + watcher.capture, + childrenCB.capture, + meq(null)) - childrenCB.getValue - .processResult(apacheConnLoss, path, null, apacheChildren, apacheStat) + childrenCB.getValue.processResult(apacheConnLoss, + path, + null, + apacheChildren, + apacheStat) intercept[KeeperException.ConnectionLoss] { Await.result(nodeChildren) } @@ -661,13 +717,17 @@ class ApacheZooKeeperTest } "getChildrenWatch" should "handle synchronous error" in { - when(mockZK - .getChildren(meq(path), watcher.capture, childrenCB.capture, meq(null))) - .thenThrow(new IllegalArgumentException) + when( + mockZK.getChildren(meq(path), + watcher.capture, + childrenCB.capture, + meq(null))).thenThrow(new IllegalArgumentException) val nodeChildren = zk.getChildrenWatch(path) - verify(mockZK) - .getChildren(meq(path), watcher.capture, childrenCB.capture, meq(null)) + verify(mockZK).getChildren(meq(path), + watcher.capture, + childrenCB.capture, + meq(null)) intercept[IllegalArgumentException] { Await.result(nodeChildren) diff --git a/repos/finagle/finagle-serversets/src/test/scala/com/twitter/finagle/zookeeper/ZookeeperServerSetClusterTest.scala b/repos/finagle/finagle-serversets/src/test/scala/com/twitter/finagle/zookeeper/ZookeeperServerSetClusterTest.scala index 676386b71ff..b37e4e7fb9c 100644 --- a/repos/finagle/finagle-serversets/src/test/scala/com/twitter/finagle/zookeeper/ZookeeperServerSetClusterTest.scala +++ b/repos/finagle/finagle-serversets/src/test/scala/com/twitter/finagle/zookeeper/ZookeeperServerSetClusterTest.scala @@ -93,8 +93,9 @@ class ZookeeperServerSetClusterSpec extends FunSuite with MockitoSugar { cluster.join(localAddress, Map("alt" -> altLocalAddress)) - verify(serverSet) - .join(localAddress, Map("alt" -> altLocalAddress).asJava, Status.ALIVE) + verify(serverSet).join(localAddress, + Map("alt" -> altLocalAddress).asJava, + Status.ALIVE) } // CSL-2175 diff --git a/repos/finagle/finagle-spdy/src/main/scala/com/twitter/finagle/spdy/Codec.scala b/repos/finagle/finagle-spdy/src/main/scala/com/twitter/finagle/spdy/Codec.scala index 52ea09d9ff7..b9c8e1b8064 100644 --- a/repos/finagle/finagle-spdy/src/main/scala/com/twitter/finagle/spdy/Codec.scala +++ b/repos/finagle/finagle-spdy/src/main/scala/com/twitter/finagle/spdy/Codec.scala @@ -82,8 +82,8 @@ case class Spdy(_version: SpdyVersion = SpdyVersion.SPDY_3_1, underlying: ServiceFactory[HttpRequest, HttpResponse], params: Stack.Params ): ServiceFactory[HttpRequest, HttpResponse] = { - new GenerateSpdyStreamId andThen super - .prepareConnFactory(underlying, params) + new GenerateSpdyStreamId andThen super.prepareConnFactory(underlying, + params) } override def newClientTransport( @@ -118,8 +118,8 @@ case class Spdy(_version: SpdyVersion = SpdyVersion.SPDY_3_1, underlying: ServiceFactory[HttpRequest, HttpResponse], params: Stack.Params ): ServiceFactory[HttpRequest, HttpResponse] = { - new AnnotateSpdyStreamId andThen super - .prepareConnFactory(underlying, params) + new AnnotateSpdyStreamId andThen super.prepareConnFactory(underlying, + params) } override def newServerDispatcher( diff --git a/repos/finagle/finagle-stats/src/main/scala/com/twitter/finagle/stats/MetricsBucketedHistogram.scala b/repos/finagle/finagle-stats/src/main/scala/com/twitter/finagle/stats/MetricsBucketedHistogram.scala index 18ed941dff5..8238a8dfe06 100644 --- a/repos/finagle/finagle-stats/src/main/scala/com/twitter/finagle/stats/MetricsBucketedHistogram.scala +++ b/repos/finagle/finagle-stats/src/main/scala/com/twitter/finagle/stats/MetricsBucketedHistogram.scala @@ -50,8 +50,8 @@ private[stats] class MetricsBucketedHistogram( // requests for the snapshot will return values from the previous `latchPeriod`. if (Time.Undefined eq nextSnapAfter.get) { - nextSnapAfter - .compareAndSet(Time.Undefined, JsonExporter.startOfNextMinute) + nextSnapAfter.compareAndSet(Time.Undefined, + JsonExporter.startOfNextMinute) } current.synchronized { diff --git a/repos/finagle/finagle-stats/src/main/scala/com/twitter/finagle/stats/MetricsStatsReceiver.scala b/repos/finagle/finagle-stats/src/main/scala/com/twitter/finagle/stats/MetricsStatsReceiver.scala index 9ba225dae2f..e9be6a4dda5 100644 --- a/repos/finagle/finagle-stats/src/main/scala/com/twitter/finagle/stats/MetricsStatsReceiver.scala +++ b/repos/finagle/finagle-stats/src/main/scala/com/twitter/finagle/stats/MetricsStatsReceiver.scala @@ -152,8 +152,11 @@ object MetricsStatsReceiver { case Event(etype, when, delta, name: String, _, tid, sid) if etype eq this => val (t, s) = serializeTrace(tid, sid) - val env = Json - .Envelope(id, when.inMilliseconds, t, s, StatAddData(name, delta)) + val env = Json.Envelope(id, + when.inMilliseconds, + t, + s, + StatAddData(name, delta)) Try(Buf.Utf8(Json.serialize(env))) case _ => diff --git a/repos/finagle/finagle-stats/src/test/scala/com/twitter/finagle/stats/JsonExporterTest.scala b/repos/finagle/finagle-stats/src/test/scala/com/twitter/finagle/stats/JsonExporterTest.scala index 8ec9bf7d90d..025555a06c9 100644 --- a/repos/finagle/finagle-stats/src/test/scala/com/twitter/finagle/stats/JsonExporterTest.scala +++ b/repos/finagle/finagle-stats/src/test/scala/com/twitter/finagle/stats/JsonExporterTest.scala @@ -27,8 +27,9 @@ class JsonExporterTest def assertParam(r: Request, expected: Boolean, default: Boolean): Unit = withClue(s"params=${r.params}") { assert( - expected == exporter - .readBooleanParam(new RequestParamMap(r), "hi", default)) + expected == exporter.readBooleanParam(new RequestParamMap(r), + "hi", + default)) } // param doesn't exist so uses default diff --git a/repos/finagle/finagle-thrift/src/main/scala/com/twitter/finagle/rich.scala b/repos/finagle/finagle-thrift/src/main/scala/com/twitter/finagle/rich.scala index 98a4a10185c..bdfd11b98fa 100644 --- a/repos/finagle/finagle-thrift/src/main/scala/com/twitter/finagle/rich.scala +++ b/repos/finagle/finagle-thrift/src/main/scala/com/twitter/finagle/rich.scala @@ -115,8 +115,11 @@ private[twitter] object ThriftUtil { clientCls, scrooge3FinagleClientWithRepClassifierParamTypes: _*) } yield - cons - .newInstance(underlying, protocolFactory, "", sr, responseClassifier) + cons.newInstance(underlying, + protocolFactory, + "", + sr, + responseClassifier) def tryScrooge3FinagledClient: Option[Iface] = for { diff --git a/repos/finagle/finagle-thrift/src/main/scala/com/twitter/finagle/thrift/TTwitterClientFilter.scala b/repos/finagle/finagle-thrift/src/main/scala/com/twitter/finagle/thrift/TTwitterClientFilter.scala index 67cb2678a8c..348f62e5982 100644 --- a/repos/finagle/finagle-thrift/src/main/scala/com/twitter/finagle/thrift/TTwitterClientFilter.scala +++ b/repos/finagle/finagle-thrift/src/main/scala/com/twitter/finagle/thrift/TTwitterClientFilter.scala @@ -126,8 +126,9 @@ private[thrift] class TTwitterClientFilter(serviceName: String, reply map { response => if (isUpgraded) { // Peel off the ResponseHeader. - InputBuffer - .peelMessage(response, new thrift.ResponseHeader, protocolFactory) + InputBuffer.peelMessage(response, + new thrift.ResponseHeader, + protocolFactory) } else response } } diff --git a/repos/finagle/finagle-thriftmux/src/main/scala/com/twitter/finagle/ThriftMux.scala b/repos/finagle/finagle-thriftmux/src/main/scala/com/twitter/finagle/ThriftMux.scala index 81967d1c7a8..51baf3dd8f0 100644 --- a/repos/finagle/finagle-thriftmux/src/main/scala/com/twitter/finagle/ThriftMux.scala +++ b/repos/finagle/finagle-thriftmux/src/main/scala/com/twitter/finagle/ThriftMux.scala @@ -399,8 +399,10 @@ object ThriftMux case e @ RetryPolicy.RetryableWriteException(_) => Future.exception(e) case e if !e.isInstanceOf[TException] => - val msg = UncaughtAppExceptionFilter - .writeExceptionMessage(request.body, e, protocolFactory) + val msg = + UncaughtAppExceptionFilter.writeExceptionMessage(request.body, + e, + protocolFactory) Future.value(mux.Response(msg)) } } @@ -482,8 +484,8 @@ object ThriftMux addr: SocketAddress, factory: ServiceFactory[Array[Byte], Array[Byte]] ): ListeningServer = { - muxer - .serve(addr, MuxToArrayFilter.andThen(tracingFilter).andThen(factory)) + muxer.serve(addr, + MuxToArrayFilter.andThen(tracingFilter).andThen(factory)) } // Java-friendly forwarders diff --git a/repos/finagle/finagle-thriftmux/src/main/scala/com/twitter/finagle/thriftmux/Netty3.scala b/repos/finagle/finagle-thriftmux/src/main/scala/com/twitter/finagle/thriftmux/Netty3.scala index 716310caf03..23707260d45 100644 --- a/repos/finagle/finagle-thriftmux/src/main/scala/com/twitter/finagle/thriftmux/Netty3.scala +++ b/repos/finagle/finagle-thriftmux/src/main/scala/com/twitter/finagle/thriftmux/Netty3.scala @@ -316,8 +316,10 @@ private[finagle] class PipelineFactory( new RequestSerializer(1)) if (isTTwitterUpNegotiation(buf)) { pipeline.replace(this, "twitter_thrift_to_mux", new TTwitterToMux) - Channels - .write(ctx, e.getFuture, upNegotiationAck, e.getRemoteAddress) + Channels.write(ctx, + e.getFuture, + upNegotiationAck, + e.getRemoteAddress) } else { pipeline.replace(this, "framed_thrift_to_mux", new TFramedToMux) super.messageReceived( diff --git a/repos/framework/core/json/src/test/scala/net/liftweb/json/ParserBugs.scala b/repos/framework/core/json/src/test/scala/net/liftweb/json/ParserBugs.scala index a80458d0540..51f2cbd39a2 100644 --- a/repos/framework/core/json/src/test/scala/net/liftweb/json/ParserBugs.scala +++ b/repos/framework/core/json/src/test/scala/net/liftweb/json/ParserBugs.scala @@ -40,8 +40,8 @@ object ParserBugs extends Specification { } "Solo quote mark should fail cleanly (not StringIndexOutOfBoundsException) (1041)" in { - JsonParser - .parse("\"", discardParser) must throwA[JsonParser.ParseException].like { + JsonParser.parse("\"", discardParser) must throwA[ + JsonParser.ParseException].like { case e => e.getMessage must startWith("unexpected eof") } } diff --git a/repos/framework/core/markdown/src/main/scala/net/liftweb/markdown/LineTokenizer.scala b/repos/framework/core/markdown/src/main/scala/net/liftweb/markdown/LineTokenizer.scala index 7dcf8ab61a8..ce416a06641 100644 --- a/repos/framework/core/markdown/src/main/scala/net/liftweb/markdown/LineTokenizer.scala +++ b/repos/framework/core/markdown/src/main/scala/net/liftweb/markdown/LineTokenizer.scala @@ -114,8 +114,7 @@ class LineTokenizer() extends Parsers { if (in.atEnd) { Success(lds.toLinkDefinition(None), in) } else { - lineParsers - .parseAll(lineParsers.linkDefinitionTitle, in.first) match { + lineParsers.parseAll(lineParsers.linkDefinitionTitle, in.first) match { case lineParsers.Success(title, _) => Success(lds.toLinkDefinition(Some(title)), in.rest) case _ => Success(lds.toLinkDefinition(None), in) diff --git a/repos/framework/persistence/db/src/main/scala/net/liftweb/db/DB.scala b/repos/framework/persistence/db/src/main/scala/net/liftweb/db/DB.scala index 4b72cb31561..910d47468b5 100644 --- a/repos/framework/persistence/db/src/main/scala/net/liftweb/db/DB.scala +++ b/repos/framework/persistence/db/src/main/scala/net/liftweb/db/DB.scala @@ -1229,9 +1229,10 @@ class StandardDBVendor(driverName: String, (dbUser, dbPassword) match { case (Full(user), Full(pwd)) => tryo { t: Throwable => - logger.error("Unable to get database connection. url=%s, user=%s" - .format(dbUrl, user), - t) + logger.error( + "Unable to get database connection. url=%s, user=%s".format(dbUrl, + user), + t) }(DriverManager.getConnection(dbUrl, user, pwd)) case _ => tryo { t: Throwable => diff --git a/repos/framework/persistence/db/src/main/scala/net/liftweb/db/LoggingStatementWrappers.scala b/repos/framework/persistence/db/src/main/scala/net/liftweb/db/LoggingStatementWrappers.scala index caa717a2d3a..87d3d4f3ff2 100644 --- a/repos/framework/persistence/db/src/main/scala/net/liftweb/db/LoggingStatementWrappers.scala +++ b/repos/framework/persistence/db/src/main/scala/net/liftweb/db/LoggingStatementWrappers.scala @@ -441,8 +441,8 @@ object DBLog { protected def chain(method: Method, args: Array[Object]): Object = try { - val m = representative - .getMethod(method.getName, method.getParameterTypes: _*) + val m = representative.getMethod(method.getName, + method.getParameterTypes: _*) m.invoke(underlying, args: _*) } catch { @@ -596,8 +596,9 @@ object DBLog { case "setBlob" => { paramMap += - args(0).asInstanceOf[Int] -> "(Blob : %s (%d bytes))" - .format(args(1), args(2)) + args(0).asInstanceOf[Int] -> "(Blob : %s (%d bytes))".format( + args(1), + args(2)) chain(method, args) } @@ -622,8 +623,9 @@ object DBLog { case "setClob" => { paramMap += - args(0).asInstanceOf[Int] -> "(Clob : %s (%d bytes))" - .format(args(1), args(2)) + args(0).asInstanceOf[Int] -> "(Clob : %s (%d bytes))".format( + args(1), + args(2)) chain(method, args) } @@ -658,8 +660,9 @@ object DBLog { case "setNClob" => { paramMap += - args(0).asInstanceOf[Int] -> "(NClob : %s (%d bytes))" - .format(args(1), args(2)) + args(0).asInstanceOf[Int] -> "(NClob : %s (%d bytes))".format( + args(1), + args(2)) chain(method, args) } diff --git a/repos/framework/persistence/mapper/src/main/scala/net/liftweb/mapper/HasManyThrough.scala b/repos/framework/persistence/mapper/src/main/scala/net/liftweb/mapper/HasManyThrough.scala index 0bf7b066a62..d3da6c15327 100644 --- a/repos/framework/persistence/mapper/src/main/scala/net/liftweb/mapper/HasManyThrough.scala +++ b/repos/framework/persistence/mapper/src/main/scala/net/liftweb/mapper/HasManyThrough.scala @@ -50,8 +50,10 @@ class HasManyThrough[From <: KeyedMapper[ThroughType, From], else st.setObject(1, indVal.jdbcFriendly, indVal.targetSQLType) DB.exec(st) { rs => - otherSingleton - .createInstances(owner.connectionIdentifier, rs, Empty, Empty) + otherSingleton.createInstances(owner.connectionIdentifier, + rs, + Empty, + Empty) } } openOr Nil } diff --git a/repos/framework/persistence/mapper/src/main/scala/net/liftweb/mapper/OneToMany.scala b/repos/framework/persistence/mapper/src/main/scala/net/liftweb/mapper/OneToMany.scala index fa4b85332a6..39bb1ee0f08 100644 --- a/repos/framework/persistence/mapper/src/main/scala/net/liftweb/mapper/OneToMany.scala +++ b/repos/framework/persistence/mapper/src/main/scala/net/liftweb/mapper/OneToMany.scala @@ -247,8 +247,9 @@ trait OneToMany[K, T <: KeyedMapper[K, T]] extends KeyedMapper[K, T] { override def toString = { val c = getClass.getSimpleName val l = c.lastIndexOf("$") - c.substring(c.lastIndexOf("$", l - 1) + 1, l) + delegate - .mkString("[", ", ", "]") + c.substring(c.lastIndexOf("$", l - 1) + 1, l) + delegate.mkString("[", + ", ", + "]") } } diff --git a/repos/framework/persistence/mapper/src/main/scala/net/liftweb/mapper/view/TableEditor.scala b/repos/framework/persistence/mapper/src/main/scala/net/liftweb/mapper/view/TableEditor.scala index ff5cd8d7cf0..8b426de4241 100644 --- a/repos/framework/persistence/mapper/src/main/scala/net/liftweb/mapper/view/TableEditor.scala +++ b/repos/framework/persistence/mapper/src/main/scala/net/liftweb/mapper/view/TableEditor.scala @@ -328,8 +328,10 @@ trait ItemsListEditor[T <: Mapper[T]] { ".title *" #> title & ".insertBtn" #> SHtml.submit(?("Insert"), onInsert _, noPrompt) & ".item" #> - (bindRegularItems ++ bindRemovedItems) & ".saveBtn" #> SHtml - .submit(?("Save"), onSubmit _, noPrompt) + (bindRegularItems ++ bindRemovedItems) & ".saveBtn" #> SHtml.submit( + ?("Save"), + onSubmit _, + noPrompt) } } } diff --git a/repos/framework/persistence/mapper/src/test/scala/net/liftweb/mapper/DBProviders.scala b/repos/framework/persistence/mapper/src/test/scala/net/liftweb/mapper/DBProviders.scala index 9daf71825cc..a234bd1c26c 100644 --- a/repos/framework/persistence/mapper/src/test/scala/net/liftweb/mapper/DBProviders.scala +++ b/repos/framework/persistence/mapper/src/test/scala/net/liftweb/mapper/DBProviders.scala @@ -74,8 +74,10 @@ object DbProviders { def deleteAllTables { DB.use(DefaultConnectionIdentifier) { conn => val md = conn.getMetaData - val rs = md - .getTables(null, Schemifier.getDefaultSchemaName(conn), null, null) + val rs = md.getTables(null, + Schemifier.getDefaultSchemaName(conn), + null, + null) var toDelete: List[String] = Nil while (rs.next) { val tableName = rs.getString(3) @@ -123,8 +125,9 @@ object DbProviders { def name = "PostgreSql" def vendor = new Vendor("org.postgresql.Driver") { def mkConn = - DriverManager - .getConnection("jdbc:postgresql://localhost/lift", "lift", "lift") + DriverManager.getConnection("jdbc:postgresql://localhost/lift", + "lift", + "lift") } def propName: String = "psql_local" } diff --git a/repos/framework/persistence/proto/src/main/scala/net/liftweb/proto/ProtoUser.scala b/repos/framework/persistence/proto/src/main/scala/net/liftweb/proto/ProtoUser.scala index e7298e3ef31..190582ca7bd 100644 --- a/repos/framework/persistence/proto/src/main/scala/net/liftweb/proto/ProtoUser.scala +++ b/repos/framework/persistence/proto/src/main/scala/net/liftweb/proto/ProtoUser.scala @@ -1002,8 +1002,7 @@ trait ProtoUser { def lostPassword = { val bind = - ".email" #> SHtml - .text("", sendPasswordReset _) & "type=submit" #> lostPasswordSubmitButton( + ".email" #> SHtml.text("", sendPasswordReset _) & "type=submit" #> lostPasswordSubmitButton( S.?("send.it")) bind(lostPasswordXhtml) @@ -1093,8 +1092,7 @@ trait ProtoUser { val passwordInput = SHtml.password_*("", LFuncHolder(s => newPassword = s)) - ".old-password" #> SHtml - .password("", s => oldPassword = s) & ".new-password" #> passwordInput & "type=submit" #> changePasswordSubmitButton( + ".old-password" #> SHtml.password("", s => oldPassword = s) & ".new-password" #> passwordInput & "type=submit" #> changePasswordSubmitButton( S.?("change"), testAndSet _) } diff --git a/repos/framework/web/webkit/src/main/scala/net/liftweb/http/LiftScreen.scala b/repos/framework/web/webkit/src/main/scala/net/liftweb/http/LiftScreen.scala index ef7ef8c4b16..a91a29ea671 100644 --- a/repos/framework/web/webkit/src/main/scala/net/liftweb/http/LiftScreen.scala +++ b/repos/framework/web/webkit/src/main/scala/net/liftweb/http/LiftScreen.scala @@ -1225,11 +1225,11 @@ trait ScreenWizardRendered extends Loggable { def defaultFields: List[CssBindFunc] = for ((bindingInfo, field) <- bindingInfoWithFields(Default)) yield - traceInline( - "Binding default field %s to %s" - .format(bindingInfo.selector(formName), defaultFieldNodeSeq), - bindingInfo.selector(formName) #> bindField(field)( - defaultFieldNodeSeq)) + traceInline("Binding default field %s to %s".format( + bindingInfo.selector(formName), + defaultFieldNodeSeq), + bindingInfo.selector(formName) #> bindField(field)( + defaultFieldNodeSeq)) def customFields: List[CssBindFunc] = for { diff --git a/repos/framework/web/webkit/src/main/scala/net/liftweb/http/LiftServlet.scala b/repos/framework/web/webkit/src/main/scala/net/liftweb/http/LiftServlet.scala index b9de73c8d41..0f6805858ee 100644 --- a/repos/framework/web/webkit/src/main/scala/net/liftweb/http/LiftServlet.scala +++ b/repos/framework/web/webkit/src/main/scala/net/liftweb/http/LiftServlet.scala @@ -537,8 +537,9 @@ class LiftServlet extends Loggable { } } finally { if (S.functionMap.size > 0) { - liftSession - .updateFunctionMap(S.functionMap, S.renderVersion, millis) + liftSession.updateFunctionMap(S.functionMap, + S.renderVersion, + millis) S.clearFunctionMap } liftSession.notices = S.getNotices @@ -692,8 +693,9 @@ class LiftServlet extends Loggable { Full(ret) } finally { if (S.functionMap.size > 0) { - liftSession - .updateFunctionMap(S.functionMap, RenderVersion.get, millis) + liftSession.updateFunctionMap(S.functionMap, + RenderVersion.get, + millis) S.clearFunctionMap } } diff --git a/repos/framework/web/webkit/src/main/scala/net/liftweb/http/LiftSession.scala b/repos/framework/web/webkit/src/main/scala/net/liftweb/http/LiftSession.scala index d26c04ebdce..808e0e8efe0 100644 --- a/repos/framework/web/webkit/src/main/scala/net/liftweb/http/LiftSession.scala +++ b/repos/framework/web/webkit/src/main/scala/net/liftweb/http/LiftSession.scala @@ -2643,8 +2643,9 @@ class LiftSession(private[http] val _contextPath: String, comet.setCometActorLocale(S.locale) asyncSync.synchronized { - nasyncComponents - .put(CometId(creationInfo.cometType, creationInfo.cometName), comet) + nasyncComponents.put(CometId(creationInfo.cometType, + creationInfo.cometName), + comet) nasyncById.put(comet.uniqueId, comet) } @@ -2826,8 +2827,9 @@ class LiftSession(private[http] val _contextPath: String, } } - nasyncComponents - .put(CometId(ca.theType openOr "Roundtrip Comet Actor", ca.name), ca) + nasyncComponents.put( + CometId(ca.theType openOr "Roundtrip Comet Actor", ca.name), + ca) nasyncById.put(ca.uniqueId, ca) ca.callInitCometActor( diff --git a/repos/framework/web/webkit/src/main/scala/net/liftweb/http/MVCHelper.scala b/repos/framework/web/webkit/src/main/scala/net/liftweb/http/MVCHelper.scala index 1bbb8b7196e..7570815e006 100644 --- a/repos/framework/web/webkit/src/main/scala/net/liftweb/http/MVCHelper.scala +++ b/repos/framework/web/webkit/src/main/scala/net/liftweb/http/MVCHelper.scala @@ -150,8 +150,10 @@ trait MVCHelper extends LiftRules.DispatchPF { session <- S.session req <- S.request template <- templateForPath(req) - resp <- session - .processTemplate(Full(bind(template)), req, req.path, 200) + resp <- session.processTemplate(Full(bind(template)), + req, + req.path, + 200) } yield resp } diff --git a/repos/framework/web/webkit/src/main/scala/net/liftweb/http/S.scala b/repos/framework/web/webkit/src/main/scala/net/liftweb/http/S.scala index 07dbfa17b97..b201da6ea7c 100644 --- a/repos/framework/web/webkit/src/main/scala/net/liftweb/http/S.scala +++ b/repos/framework/web/webkit/src/main/scala/net/liftweb/http/S.scala @@ -883,8 +883,10 @@ trait S extends HasParams with Loggable with UserAgentCalculator { ): Box[LiftCometActor] = { for { session <- session ?~ "Comet lookup and creation requires a session." - cometActor <- session - .findOrCreateComet(cometType, cometName, cometHtml, cometAttributes) + cometActor <- session.findOrCreateComet(cometType, + cometName, + cometHtml, + cometAttributes) } yield { if (receiveUpdatesOnPage) addComet(cometActor) diff --git a/repos/framework/web/webkit/src/main/scala/net/liftweb/javascript/JavaScriptContext.scala b/repos/framework/web/webkit/src/main/scala/net/liftweb/javascript/JavaScriptContext.scala index ba15ddee840..1ea0683ba56 100644 --- a/repos/framework/web/webkit/src/main/scala/net/liftweb/javascript/JavaScriptContext.scala +++ b/repos/framework/web/webkit/src/main/scala/net/liftweb/javascript/JavaScriptContext.scala @@ -40,8 +40,7 @@ object JavaScriptContext { case x :: Nil => (PassThru, Full(x)) case x :: "it" :: Nil => session.buildXformer(x, Nil) -> Empty case x :: str :: Nil if str.startsWith("it.") => - session - .buildXformer(x, str.roboSplit("\\.").filter(_ != "it")) -> Empty + session.buildXformer(x, str.roboSplit("\\.").filter(_ != "it")) -> Empty case x :: xs => session.buildXformer(x, Nil) -> Full(xs.mkString) case _ => (PassThru, Full(value)) } diff --git a/repos/framework/web/webkit/src/test/scala/net/liftweb/http/SecurityRulesSpec.scala b/repos/framework/web/webkit/src/test/scala/net/liftweb/http/SecurityRulesSpec.scala index 3f0b09ace4a..1b508948a9c 100644 --- a/repos/framework/web/webkit/src/test/scala/net/liftweb/http/SecurityRulesSpec.scala +++ b/repos/framework/web/webkit/src/test/scala/net/liftweb/http/SecurityRulesSpec.scala @@ -119,8 +119,7 @@ class ContentSecurityPolicySpec extends Specification { } "provide no headers with enforcement and logging disabled" in { - ContentSecurityPolicy() - .headers(enforce = false, logViolations = false) must be empty + ContentSecurityPolicy().headers(enforce = false, logViolations = false) must be empty } "correctly generate restriction strings for the various restriction types" in { diff --git a/repos/framework/web/webkit/src/test/scala/net/liftweb/webapptest/JettyTestServer.scala b/repos/framework/web/webkit/src/test/scala/net/liftweb/webapptest/JettyTestServer.scala index af48dffaccd..17ed2b580cf 100644 --- a/repos/framework/web/webkit/src/test/scala/net/liftweb/webapptest/JettyTestServer.scala +++ b/repos/framework/web/webkit/src/test/scala/net/liftweb/webapptest/JettyTestServer.scala @@ -36,8 +36,8 @@ final class JettyTestServer(baseUrlBox: Box[URL]) { val context = new WebAppContext() context.setServer(server) context.setContextPath("/") - val dir = System - .getProperty("net.liftweb.webapptest.src.test.webapp", "src/test/webapp") + val dir = System.getProperty("net.liftweb.webapptest.src.test.webapp", + "src/test/webapp") context.setWar(dir) //val context = new Context(_server, "/", Context.SESSIONS) //context.addFilter(new FilterHolder(new LiftFilter()), "/"); diff --git a/repos/framework/web/webkit/src/test/scala/net/liftweb/webapptest/OneShot.scala b/repos/framework/web/webkit/src/test/scala/net/liftweb/webapptest/OneShot.scala index c1b1e1ddf1b..44840a85790 100644 --- a/repos/framework/web/webkit/src/test/scala/net/liftweb/webapptest/OneShot.scala +++ b/repos/framework/web/webkit/src/test/scala/net/liftweb/webapptest/OneShot.scala @@ -41,8 +41,8 @@ object OneShot extends Specification with RequestKit with XmlMatchers { } } - private val host_ = System - .getProperty("net.liftweb.webapptest.oneshot.host", reachableLocalAddress) + private val host_ = System.getProperty("net.liftweb.webapptest.oneshot.host", + reachableLocalAddress) private val port_ = System.getProperty("net.liftweb.webapptest.oneshot.port", "8181").toInt diff --git a/repos/framework/web/webkit/src/test/scala/net/liftweb/webapptest/ToHeadUsages.scala b/repos/framework/web/webkit/src/test/scala/net/liftweb/webapptest/ToHeadUsages.scala index 9875b2dbaca..f2c7418bf07 100644 --- a/repos/framework/web/webkit/src/test/scala/net/liftweb/webapptest/ToHeadUsages.scala +++ b/repos/framework/web/webkit/src/test/scala/net/liftweb/webapptest/ToHeadUsages.scala @@ -41,8 +41,8 @@ object ToHeadUsages extends Specification { } } - private val host_ = System - .getProperty("net.liftweb.webapptest.oneshot.host", reachableLocalAddress) + private val host_ = System.getProperty("net.liftweb.webapptest.oneshot.host", + reachableLocalAddress) private val port_ = System .getProperty("net.liftweb.webapptest.toheadusages.port", "8282") .toInt diff --git a/repos/gitbucket/src/main/scala/ScalatraBootstrap.scala b/repos/gitbucket/src/main/scala/ScalatraBootstrap.scala index 188a803afb2..7832140e1c7 100644 --- a/repos/gitbucket/src/main/scala/ScalatraBootstrap.scala +++ b/repos/gitbucket/src/main/scala/ScalatraBootstrap.scala @@ -22,8 +22,8 @@ class ScalatraBootstrap extends LifeCycle { .addMappingForUrlPatterns(EnumSet.allOf(classOf[DispatcherType]), true, "/*") - context - .addFilter("basicAuthenticationFilter", new BasicAuthenticationFilter) + context.addFilter("basicAuthenticationFilter", + new BasicAuthenticationFilter) context .getFilterRegistration("basicAuthenticationFilter") .addMappingForUrlPatterns(EnumSet.allOf(classOf[DispatcherType]), diff --git a/repos/gitbucket/src/main/scala/gitbucket/core/controller/DashboardController.scala b/repos/gitbucket/src/main/scala/gitbucket/core/controller/DashboardController.scala index 334c6d5e35d..70657835bab 100644 --- a/repos/gitbucket/src/main/scala/gitbucket/core/controller/DashboardController.scala +++ b/repos/gitbucket/src/main/scala/gitbucket/core/controller/DashboardController.scala @@ -113,14 +113,17 @@ trait DashboardControllerBase extends ControllerBase { filter match { case "assigned" => - condition - .copy(assigned = Some(userName), author = None, mentioned = None) + condition.copy(assigned = Some(userName), + author = None, + mentioned = None) case "mentioned" => - condition - .copy(assigned = None, author = None, mentioned = Some(userName)) + condition.copy(assigned = None, + author = None, + mentioned = Some(userName)) case _ => - condition - .copy(assigned = None, author = Some(userName), mentioned = None) + condition.copy(assigned = None, + author = Some(userName), + mentioned = None) } } diff --git a/repos/gitbucket/src/main/scala/gitbucket/core/service/ProtectedBranchService.scala b/repos/gitbucket/src/main/scala/gitbucket/core/service/ProtectedBranchService.scala index 2a70303fbdc..8b62bd3b92c 100644 --- a/repos/gitbucket/src/main/scala/gitbucket/core/service/ProtectedBranchService.scala +++ b/repos/gitbucket/src/main/scala/gitbucket/core/service/ProtectedBranchService.scala @@ -85,8 +85,10 @@ object ProtectedBranchService { pusher: String)(implicit session: Session): Option[String] = { val branch = command.getRefName.stripPrefix("refs/heads/") if (branch != command.getRefName) { - getProtectedBranchInfo(owner, repository, branch) - .getStopReason(receivePack.isAllowNonFastForwards, command, pusher) + getProtectedBranchInfo(owner, repository, branch).getStopReason( + receivePack.isAllowNonFastForwards, + command, + pusher) } else { None } diff --git a/repos/gitbucket/src/main/scala/gitbucket/core/service/WebHookService.scala b/repos/gitbucket/src/main/scala/gitbucket/core/service/WebHookService.scala index 68422c0bbf8..15f504b2165 100644 --- a/repos/gitbucket/src/main/scala/gitbucket/core/service/WebHookService.scala +++ b/repos/gitbucket/src/main/scala/gitbucket/core/service/WebHookService.scala @@ -454,8 +454,9 @@ object WebHookService { commits = commits.map { commit => ApiCommit.forPushPayload(git, RepositoryName(repositoryInfo), commit) }, - repository = ApiRepository - .forPushPayload(repositoryInfo, owner = ApiUser(repositoryOwner)) + repository = ApiRepository.forPushPayload(repositoryInfo, + owner = + ApiUser(repositoryOwner)) ) } diff --git a/repos/gitbucket/src/main/scala/gitbucket/core/service/WikiService.scala b/repos/gitbucket/src/main/scala/gitbucket/core/service/WikiService.scala index a52e570bf93..e8b9c97c222 100644 --- a/repos/gitbucket/src/main/scala/gitbucket/core/service/WikiService.scala +++ b/repos/gitbucket/src/main/scala/gitbucket/core/service/WikiService.scala @@ -48,13 +48,13 @@ object WikiService { def wikiHttpUrl(repositoryInfo: RepositoryInfo)( implicit context: Context): String = - RepositoryService - .httpUrl(repositoryInfo.owner, repositoryInfo.name + ".wiki") + RepositoryService.httpUrl(repositoryInfo.owner, + repositoryInfo.name + ".wiki") def wikiSshUrl(repositoryInfo: RepositoryInfo)( implicit context: Context): Option[String] = - RepositoryService - .sshUrl(repositoryInfo.owner, repositoryInfo.name + ".wiki") + RepositoryService.sshUrl(repositoryInfo.owner, + repositoryInfo.name + ".wiki") } trait WikiService { diff --git a/repos/gitbucket/src/main/scala/gitbucket/core/servlet/BasicAuthenticationFilter.scala b/repos/gitbucket/src/main/scala/gitbucket/core/servlet/BasicAuthenticationFilter.scala index da87394050a..abb1a22eda0 100644 --- a/repos/gitbucket/src/main/scala/gitbucket/core/servlet/BasicAuthenticationFilter.scala +++ b/repos/gitbucket/src/main/scala/gitbucket/core/servlet/BasicAuthenticationFilter.scala @@ -131,8 +131,8 @@ class BasicAuthenticationFilter if (hasWritePermission(repository.owner, repository.name, Some(account))) { - request - .setAttribute(Keys.Request.UserName, account.userName) + request.setAttribute(Keys.Request.UserName, + account.userName) true } else false } else true diff --git a/repos/gitbucket/src/main/scala/gitbucket/core/servlet/InitializeListener.scala b/repos/gitbucket/src/main/scala/gitbucket/core/servlet/InitializeListener.scala index 02181dad7ce..96b45fb9130 100644 --- a/repos/gitbucket/src/main/scala/gitbucket/core/servlet/InitializeListener.scala +++ b/repos/gitbucket/src/main/scala/gitbucket/core/servlet/InitializeListener.scala @@ -46,8 +46,9 @@ class InitializeListener // Load plugins logger.debug("Initialize plugins") - PluginRegistry - .initialize(event.getServletContext, loadSystemSettings(), conn) + PluginRegistry.initialize(event.getServletContext, + loadSystemSettings(), + conn) } // Start Quartz scheduler diff --git a/repos/gitbucket/src/main/scala/gitbucket/core/ssh/GitCommand.scala b/repos/gitbucket/src/main/scala/gitbucket/core/ssh/GitCommand.scala index 4a5ce7a4ee2..cabcd86158e 100644 --- a/repos/gitbucket/src/main/scala/gitbucket/core/ssh/GitCommand.scala +++ b/repos/gitbucket/src/main/scala/gitbucket/core/ssh/GitCommand.scala @@ -151,8 +151,10 @@ class PluginGitUploadPack(repoName: String, routing: GitRepositoryRouting) override protected def runTask(user: String)( implicit session: Session): Unit = { - if (routing.filter - .filter("/" + repoName, Some(user), loadSystemSettings(), false)) { + if (routing.filter.filter("/" + repoName, + Some(user), + loadSystemSettings(), + false)) { val path = routing.urlPattern.r.replaceFirstIn(repoName, routing.localPath) using(Git.open(new File(Directory.GitBucketHome, path))) { git => @@ -170,8 +172,10 @@ class PluginGitReceivePack(repoName: String, routing: GitRepositoryRouting) override protected def runTask(user: String)( implicit session: Session): Unit = { - if (routing.filter - .filter("/" + repoName, Some(user), loadSystemSettings(), true)) { + if (routing.filter.filter("/" + repoName, + Some(user), + loadSystemSettings(), + true)) { val path = routing.urlPattern.r.replaceFirstIn(repoName, routing.localPath) using(Git.open(new File(Directory.GitBucketHome, path))) { git => diff --git a/repos/gitbucket/src/test/scala/gitbucket/core/service/LabelsServiceSpec.scala b/repos/gitbucket/src/test/scala/gitbucket/core/service/LabelsServiceSpec.scala index 45db5a329e4..1345ba9e81c 100644 --- a/repos/gitbucket/src/test/scala/gitbucket/core/service/LabelsServiceSpec.scala +++ b/repos/gitbucket/src/test/scala/gitbucket/core/service/LabelsServiceSpec.scala @@ -118,8 +118,11 @@ class LabelsServiceSpec extends FunSpec with ServiceSpecBase { dummyService.createLabel("user1", "repo1", "label1", "000000") dummyService.createLabel("user1", "repo2", "label1", "000000") dummyService.createLabel("user2", "repo1", "label1", "000000") - dummyService - .updateLabel("user1", "repo1", labelId, "updated-label", "ffffff") + dummyService.updateLabel("user1", + "repo1", + labelId, + "updated-label", + "ffffff") def getLabel = dummyService.getLabel("user1", "repo1", labelId) assert( getLabel == Some( diff --git a/repos/gitbucket/src/test/scala/gitbucket/core/service/ServiceSpecBase.scala b/repos/gitbucket/src/test/scala/gitbucket/core/service/ServiceSpecBase.scala index 8b8032b77df..c6b2b715387 100644 --- a/repos/gitbucket/src/test/scala/gitbucket/core/service/ServiceSpecBase.scala +++ b/repos/gitbucket/src/test/scala/gitbucket/core/service/ServiceSpecBase.scala @@ -35,8 +35,12 @@ trait ServiceSpecBase { } def generateNewAccount(name: String)(implicit s: Session): Account = { - AccountService - .createAccount(name, name, name, s"${name}@example.com", false, None) + AccountService.createAccount(name, + name, + name, + s"${name}@example.com", + false, + None) user(name) } diff --git a/repos/goose/src/main/scala/com/gravity/goose/images/ImageSaver.scala b/repos/goose/src/main/scala/com/gravity/goose/images/ImageSaver.scala index f3efd1fbcf3..29e7ca9ee93 100644 --- a/repos/goose/src/main/scala/com/gravity/goose/images/ImageSaver.scala +++ b/repos/goose/src/main/scala/com/gravity/goose/images/ImageSaver.scala @@ -85,8 +85,8 @@ object ImageSaver extends Logging { imageSrc: String): Option[HttpEntity] = { val localContext: HttpContext = new BasicHttpContext - localContext - .setAttribute(ClientContext.COOKIE_STORE, HtmlFetcher.emptyCookieStore) + localContext.setAttribute(ClientContext.COOKIE_STORE, + HtmlFetcher.emptyCookieStore) val httpget = new HttpGet(imageSrc) val response = httpClient.execute(httpget, localContext) val respStatus: String = response.getStatusLine.toString diff --git a/repos/goose/src/main/scala/com/gravity/goose/images/StandardImageExtractor.scala b/repos/goose/src/main/scala/com/gravity/goose/images/StandardImageExtractor.scala index 58c2523f2be..3cd4790b978 100644 --- a/repos/goose/src/main/scala/com/gravity/goose/images/StandardImageExtractor.scala +++ b/repos/goose/src/main/scala/com/gravity/goose/images/StandardImageExtractor.scala @@ -490,8 +490,8 @@ class StandardImageExtractor(httpClient: HttpClient, var link: String = this.buildImagePath(src) link = link.replace(" ", "%20") val localContext: HttpContext = new BasicHttpContext - localContext - .setAttribute(ClientContext.COOKIE_STORE, HtmlFetcher.emptyCookieStore) + localContext.setAttribute(ClientContext.COOKIE_STORE, + HtmlFetcher.emptyCookieStore) httpget = new HttpGet(link) var response: HttpResponse = null response = httpClient.execute(httpget, localContext) @@ -553,8 +553,10 @@ class StandardImageExtractor(httpClient: HttpClient, } try { val imageSource: String = this.buildImagePath(image.attr("src")) - val localSrcPath: String = ImageSaver - .storeTempImage(this.httpClient, this.linkhash, imageSource, config) + val localSrcPath: String = ImageSaver.storeTempImage(this.httpClient, + this.linkhash, + imageSource, + config) if (localSrcPath == null) { if (logger.isDebugEnabled) { logger.debug( @@ -571,8 +573,9 @@ class StandardImageExtractor(httpClient: HttpClient, if (continueVar) { image.attr("tempImagePath", localSrcPath) try { - var imageDims: ImageDetails = ImageUtils - .getImageDimensions(config.imagemagickIdentifyPath, localSrcPath) + var imageDims: ImageDetails = ImageUtils.getImageDimensions( + config.imagemagickIdentifyPath, + localSrcPath) width = imageDims.getWidth height = imageDims.getHeight if (depthLevel > 1) { diff --git a/repos/goose/src/main/scala/com/gravity/goose/network/HtmlFetcher.scala b/repos/goose/src/main/scala/com/gravity/goose/network/HtmlFetcher.scala index 2986f77915f..d554f61343a 100644 --- a/repos/goose/src/main/scala/com/gravity/goose/network/HtmlFetcher.scala +++ b/repos/goose/src/main/scala/com/gravity/goose/network/HtmlFetcher.scala @@ -107,15 +107,15 @@ object HtmlFetcher extends AbstractHtmlFetcher with Logging { try { val localContext: HttpContext = new BasicHttpContext - localContext - .setAttribute(ClientContext.COOKIE_STORE, HtmlFetcher.emptyCookieStore) + localContext.setAttribute(ClientContext.COOKIE_STORE, + HtmlFetcher.emptyCookieStore) httpget = new HttpGet(cleanUrl) - HttpProtocolParams - .setUserAgent(httpClient.getParams, config.getBrowserUserAgent()); + HttpProtocolParams.setUserAgent(httpClient.getParams, + config.getBrowserUserAgent()); val params = httpClient.getParams - HttpConnectionParams - .setConnectionTimeout(params, config.getConnectionTimeout()) + HttpConnectionParams.setConnectionTimeout(params, + config.getConnectionTimeout()) HttpConnectionParams.setSoTimeout(params, config.getSocketTimeout()) trace( @@ -123,8 +123,9 @@ object HtmlFetcher extends AbstractHtmlFetcher with Logging { HttpProtocolParams.getUserAgent(httpClient.getParams)) val response: HttpResponse = httpClient.execute(httpget, localContext) - HttpStatusValidator - .validate(cleanUrl, response.getStatusLine.getStatusCode) match { + HttpStatusValidator.validate( + cleanUrl, + response.getStatusLine.getStatusCode) match { case Left(ex) => throw ex case _ => } diff --git a/repos/goose/src/main/scala/com/gravity/goose/network/HttpExceptions.scala b/repos/goose/src/main/scala/com/gravity/goose/network/HttpExceptions.scala index 09e824fb2d1..b286f166a79 100644 --- a/repos/goose/src/main/scala/com/gravity/goose/network/HttpExceptions.scala +++ b/repos/goose/src/main/scala/com/gravity/goose/network/HttpExceptions.scala @@ -11,8 +11,9 @@ class LoggableException(msg: String, innerEx: Exception = null) override lazy val getMessage = { val innerMessage = if (innerEx != null) { - "%n\tand inner Exception of type %s:%n\t\tmessage: %s" - .format(innerEx.getClass.getName, innerEx.getMessage) + "%n\tand inner Exception of type %s:%n\t\tmessage: %s".format( + innerEx.getClass.getName, + innerEx.getMessage) } else { "" } diff --git a/repos/goose/src/main/scala/com/gravity/goose/text/StopWords.scala b/repos/goose/src/main/scala/com/gravity/goose/text/StopWords.scala index 2ed5fb7da10..2c2861655d2 100644 --- a/repos/goose/src/main/scala/com/gravity/goose/text/StopWords.scala +++ b/repos/goose/src/main/scala/com/gravity/goose/text/StopWords.scala @@ -28,8 +28,9 @@ import com.gravity.goose.utils.FileHelper object StopWords { // the confusing pattern below is basically just match any non-word character excluding white-space. - private val PUNCTUATION: StringReplacement = StringReplacement - .compile("[^\\p{Ll}\\p{Lu}\\p{Lt}\\p{Lo}\\p{Nd}\\p{Pc}\\s]", string.empty) + private val PUNCTUATION: StringReplacement = StringReplacement.compile( + "[^\\p{Ll}\\p{Lu}\\p{Lt}\\p{Lo}\\p{Nd}\\p{Pc}\\s]", + string.empty) val STOP_WORDS = FileHelper .loadResourceFile("stopwords-en.txt", StopWords.getClass) diff --git a/repos/goose/src/test/scala/com/gravity/goose/ExtractionsTest.scala b/repos/goose/src/test/scala/com/gravity/goose/ExtractionsTest.scala index c85f21e0d3e..4c391c95b95 100644 --- a/repos/goose/src/test/scala/com/gravity/goose/ExtractionsTest.scala +++ b/repos/goose/src/test/scala/com/gravity/goose/ExtractionsTest.scala @@ -17,8 +17,8 @@ import java.util.Date class ExtractionsTest { def getHtml(filename: String): String = { - FileHelper - .loadResourceFile(TestUtils.staticHtmlDir + filename, Goose.getClass) + FileHelper.loadResourceFile(TestUtils.staticHtmlDir + filename, + Goose.getClass) } @Test @@ -247,11 +247,9 @@ class ExtractionsTest { val url: String = "http://www.engadget.com/2010/08/18/verizon-fios-set-top-boxes-getting-a-new-hd-guide-external-stor/" val article = TestUtils.getArticle(url, html) - TestUtils - .runArticleAssertions( - article = article, - expectedStart = - "Streaming and downloading TV content to mobiles is nice") + TestUtils.runArticleAssertions( + article = article, + expectedStart = "Streaming and downloading TV content to mobiles is nice") } @Test diff --git a/repos/goose/src/test/scala/com/gravity/goose/GoldSitesTestIT.scala b/repos/goose/src/test/scala/com/gravity/goose/GoldSitesTestIT.scala index c1aa0f79220..79e95804747 100644 --- a/repos/goose/src/test/scala/com/gravity/goose/GoldSitesTestIT.scala +++ b/repos/goose/src/test/scala/com/gravity/goose/GoldSitesTestIT.scala @@ -318,13 +318,11 @@ class GoldSitesTestIT { val url: String = "http://www.engadget.com/2010/08/18/verizon-fios-set-top-boxes-getting-a-new-hd-guide-external-stor/" val article = TestUtils.getArticle(url) - TestUtils - .runArticleAssertions( - article = article, - expectedStart = - "Streaming and downloading TV content to mobiles is nice", - expectedImage = - "http://www.blogcdn.com/www.engadget.com/media/2010/08/44ni600.jpg") + TestUtils.runArticleAssertions( + article = article, + expectedStart = "Streaming and downloading TV content to mobiles is nice", + expectedImage = + "http://www.blogcdn.com/www.engadget.com/media/2010/08/44ni600.jpg") } @Test @@ -645,12 +643,10 @@ class GoldSitesTestIT { "http://finance.yahoo.com/news/Stocks-plunge-after-Fed-apf-3386772167.html?x=0" implicit val config = TestUtils.DEFAULT_CONFIG val article = TestUtils.getArticle(url) - TestUtils - .runArticleAssertions( - article = article, - expectedStart = - "NEW YORK (AP) -- The Federal Reserve did what investors", - expectedImage = "http://l.yimg.com/a/p/fi/41/20/44.jpg") + TestUtils.runArticleAssertions( + article = article, + expectedStart = "NEW YORK (AP) -- The Federal Reserve did what investors", + expectedImage = "http://l.yimg.com/a/p/fi/41/20/44.jpg") TestUtils.printReport() } diff --git a/repos/intellij-scala/compiler-settings/src/org/jetbrains/jps/incremental/scala/remote/RemoteResourceOwner.scala b/repos/intellij-scala/compiler-settings/src/org/jetbrains/jps/incremental/scala/remote/RemoteResourceOwner.scala index 8f88dafa487..37c5eee9312 100644 --- a/repos/intellij-scala/compiler-settings/src/org/jetbrains/jps/incremental/scala/remote/RemoteResourceOwner.scala +++ b/repos/intellij-scala/compiler-settings/src/org/jetbrains/jps/incremental/scala/remote/RemoteResourceOwner.scala @@ -55,8 +55,8 @@ trait RemoteResourceOwner { val s = new String(data) if (s.length > 50) s.substring(0, 50) + "..." else s } - client - .message(Kind.ERROR, "Unable to read an event from: " + chars) + client.message(Kind.ERROR, + "Unable to read an event from: " + chars) client.trace(e) } // Main server class redirects all (unexpected) stdout data to stderr. diff --git a/repos/intellij-scala/jps-plugin/src/org/jetbrains/jps/incremental/scala/IdeaIncrementalBuilder.scala b/repos/intellij-scala/jps-plugin/src/org/jetbrains/jps/incremental/scala/IdeaIncrementalBuilder.scala index 87ded12a356..32a65686314 100644 --- a/repos/intellij-scala/jps-plugin/src/org/jetbrains/jps/incremental/scala/IdeaIncrementalBuilder.scala +++ b/repos/intellij-scala/jps-plugin/src/org/jetbrains/jps/incremental/scala/IdeaIncrementalBuilder.scala @@ -201,10 +201,11 @@ class IdeaIncrementalBuilder(category: BuilderCategory) .getTempTargetRoots(target, context) .asScala } { - FileUtil - .processFilesRecursively(tempRoot.getRootFile, new Processor[File] { - def process(file: File) = checkAndCollectFile(file) - }) + FileUtil.processFilesRecursively(tempRoot.getRootFile, + new Processor[File] { + def process(file: File) = + checkAndCollectFile(file) + }) } //if no scala files to compile, return empty seq diff --git a/repos/intellij-scala/jps-plugin/src/org/jetbrains/jps/incremental/scala/SbtBuilder.scala b/repos/intellij-scala/jps-plugin/src/org/jetbrains/jps/incremental/scala/SbtBuilder.scala index 8712f67f0d6..0851b87ce5c 100644 --- a/repos/intellij-scala/jps-plugin/src/org/jetbrains/jps/incremental/scala/SbtBuilder.scala +++ b/repos/intellij-scala/jps-plugin/src/org/jetbrains/jps/incremental/scala/SbtBuilder.scala @@ -66,8 +66,8 @@ class SbtBuilder extends ModuleLevelBuilder(BuilderCategory.TRANSLATOR) { if (filesToCompile.isEmpty) return ExitCode.NOTHING_DONE // Delete dirty class files (to handle force builds and form changes) - BuildOperations - .cleanOutputsCorrespondingToChangedFiles(context, dirtyFilesHolder) + BuildOperations.cleanOutputsCorrespondingToChangedFiles(context, + dirtyFilesHolder) val sources = filesToCompile.keySet.toSeq diff --git a/repos/intellij-scala/jps-plugin/src/org/jetbrains/jps/incremental/scala/local/CompilerFactoryImpl.scala b/repos/intellij-scala/jps-plugin/src/org/jetbrains/jps/incremental/scala/local/CompilerFactoryImpl.scala index b8bf32e8a76..250e7973484 100644 --- a/repos/intellij-scala/jps-plugin/src/org/jetbrains/jps/incremental/scala/local/CompilerFactoryImpl.scala +++ b/repos/intellij-scala/jps-plugin/src/org/jetbrains/jps/incremental/scala/local/CompilerFactoryImpl.scala @@ -45,8 +45,9 @@ class CompilerFactoryImpl(sbtData: SbtData) extends CompilerFactory { Seq.empty, None)) val classpathOptions = ClasspathOptions.javac(compiler = false) - AggressiveCompile - .directOrFork(scala, classpathOptions, compilerData.javaHome) + AggressiveCompile.directOrFork(scala, + classpathOptions, + compilerData.javaHome) } new SbtCompiler(javac, scalac, fileToStore) diff --git a/repos/intellij-scala/jps-plugin/src/org/jetbrains/jps/incremental/scala/local/IdeClient.scala b/repos/intellij-scala/jps-plugin/src/org/jetbrains/jps/incremental/scala/local/IdeClient.scala index ac2376f85a6..8f6935cfe81 100644 --- a/repos/intellij-scala/jps-plugin/src/org/jetbrains/jps/incremental/scala/local/IdeClient.scala +++ b/repos/intellij-scala/jps-plugin/src/org/jetbrains/jps/incremental/scala/local/IdeClient.scala @@ -72,8 +72,9 @@ abstract class IdeClient(compilerName: String, else { val decapitalizedText = text.charAt(0).toLower.toString + text.substring(1) - "%s: %s [%s]" - .format(compilerName, decapitalizedText, modules.mkString(", ")) + "%s: %s [%s]".format(compilerName, + decapitalizedText, + modules.mkString(", ")) } context.processMessage( new ProgressMessage(formattedText, done.getOrElse(-1.0F))) diff --git a/repos/intellij-scala/jps-plugin/src/org/jetbrains/jps/incremental/scala/local/IdeClientIdea.scala b/repos/intellij-scala/jps-plugin/src/org/jetbrains/jps/incremental/scala/local/IdeClientIdea.scala index 0ae71e0a0ad..86adc40d167 100644 --- a/repos/intellij-scala/jps-plugin/src/org/jetbrains/jps/incremental/scala/local/IdeClientIdea.scala +++ b/repos/intellij-scala/jps-plugin/src/org/jetbrains/jps/incremental/scala/local/IdeClientIdea.scala @@ -47,8 +47,8 @@ class IdeClientIdea(compilerName: String, if (!isTemp) { try { if (isClassFile) - consumer - .registerCompiledClass(rootDescriptor.target, compiledClass) + consumer.registerCompiledClass(rootDescriptor.target, + compiledClass) else consumer.registerOutputFile( rootDescriptor.target, diff --git a/repos/intellij-scala/jps-plugin/src/org/jetbrains/jps/incremental/scala/local/LocalServer.scala b/repos/intellij-scala/jps-plugin/src/org/jetbrains/jps/incremental/scala/local/LocalServer.scala index e0204bc7aa2..2d31d6fe104 100644 --- a/repos/intellij-scala/jps-plugin/src/org/jetbrains/jps/incremental/scala/local/LocalServer.scala +++ b/repos/intellij-scala/jps-plugin/src/org/jetbrains/jps/incremental/scala/local/LocalServer.scala @@ -22,8 +22,9 @@ class LocalServer extends Server { val compilerFactory = compilerFactoryFrom(sbtData) client.progress("Instantiating compiler...") - compilerFactory - .createCompiler(compilerData, client, LocalServer.createAnalysisStore) + compilerFactory.createCompiler(compilerData, + client, + LocalServer.createAnalysisStore) } if (!client.isCanceled) { diff --git a/repos/intellij-scala/jps-plugin/src/org/jetbrains/jps/incremental/scala/remote/Main.scala b/repos/intellij-scala/jps-plugin/src/org/jetbrains/jps/incremental/scala/remote/Main.scala index 447f0ac4990..8063a7ba325 100644 --- a/repos/intellij-scala/jps-plugin/src/org/jetbrains/jps/incremental/scala/remote/Main.scala +++ b/repos/intellij-scala/jps-plugin/src/org/jetbrains/jps/incremental/scala/remote/Main.scala @@ -77,8 +77,10 @@ object Main { Arguments.from(strings) } - Server - .compile(args.sbtData, args.compilerData, args.compilationData, client) + Server.compile(args.sbtData, + args.compilerData, + args.compilationData, + client) if (!hasErrors) worksheetFactory.getRunner(out, standalone).loadAndRun(args, client) diff --git a/repos/intellij-scala/jps-plugin/src/org/jetbrains/jps/incremental/scala/remote/RemoteServer.scala b/repos/intellij-scala/jps-plugin/src/org/jetbrains/jps/incremental/scala/remote/RemoteServer.scala index c5c34576390..2258b90d0e2 100644 --- a/repos/intellij-scala/jps-plugin/src/org/jetbrains/jps/incremental/scala/remote/RemoteServer.scala +++ b/repos/intellij-scala/jps-plugin/src/org/jetbrains/jps/incremental/scala/remote/RemoteServer.scala @@ -35,8 +35,10 @@ class RemoteServer(val address: InetAddress, val port: Int) client.warning(message) client.debug( s"$firstLine\n${e.toString}\n${e.getStackTrace.mkString("\n")}") - ScalaBuilder.localServer - .compile(sbtData, compilerData, compilationData, client) + ScalaBuilder.localServer.compile(sbtData, + compilerData, + compilationData, + client) case e: UnknownHostException => val message = "Unknown IP address of compile server host: " + address.toString diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/hocon/codestyle/HoconLanguageCodeStyleSettingsProvider.scala b/repos/intellij-scala/src/org/jetbrains/plugins/hocon/codestyle/HoconLanguageCodeStyleSettingsProvider.scala index e921e93abcc..fb3bea767a8 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/hocon/codestyle/HoconLanguageCodeStyleSettingsProvider.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/hocon/codestyle/HoconLanguageCodeStyleSettingsProvider.scala @@ -49,8 +49,8 @@ class HoconLanguageCodeStyleSettingsProvider SPACE_AFTER_COMMA ).map(_.name): _*) - consumer - .renameStandardOption(SPACE_WITHIN_BRACES.name, "Object braces") + consumer.renameStandardOption(SPACE_WITHIN_BRACES.name, + "Object braces") consumer.renameStandardOption( SPACE_WITHIN_METHOD_CALL_PARENTHESES.name, "Include qualifier parentheses") diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/hocon/formatting/HoconFormatter.scala b/repos/intellij-scala/src/org/jetbrains/plugins/hocon/formatting/HoconFormatter.scala index e26914a0659..43208e9c173 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/hocon/formatting/HoconFormatter.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/hocon/formatting/HoconFormatter.scala @@ -190,8 +190,8 @@ class HoconFormatter(settings: CodeStyleSettings) { case Some(Colon) => Wrap.createWrap(customSettings.OBJECT_FIELDS_WITH_COLON_WRAP, true) case Some(Equals | PlusEquals) => - Wrap - .createWrap(customSettings.OBJECT_FIELDS_WITH_ASSIGNMENT_WRAP, true) + Wrap.createWrap(customSettings.OBJECT_FIELDS_WITH_ASSIGNMENT_WRAP, + true) case _ => null } diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/hocon/parser/HoconErrorHighlightingAnnotator.scala b/repos/intellij-scala/src/org/jetbrains/plugins/hocon/parser/HoconErrorHighlightingAnnotator.scala index a97ab1e65b1..fcbb2d50216 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/hocon/parser/HoconErrorHighlightingAnnotator.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/hocon/parser/HoconErrorHighlightingAnnotator.scala @@ -53,9 +53,9 @@ class HoconErrorHighlightingAnnotator extends Annotator { validateConcatenation(child.getElementType, child.getTreeNext) case (required, actual) => - holder - .createErrorAnnotation(child, s"cannot concatenate ${uncaps( - required.toString)} with ${uncaps(actual.toString)}") + holder.createErrorAnnotation( + child, + s"cannot concatenate ${uncaps(required.toString)} with ${uncaps(actual.toString)}") validateConcatenation(actual, child.getTreeNext) } } diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/actions/GoToImplicitConversionAction.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/actions/GoToImplicitConversionAction.scala index 38dc0e423f0..c2fe9db3859 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/actions/GoToImplicitConversionAction.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/actions/GoToImplicitConversionAction.scala @@ -159,8 +159,11 @@ class GoToImplicitConversionAction if (editor.getSelectionModel.hasSelection) { val selectionStart = editor.getSelectionModel.getSelectionStart val selectionEnd = editor.getSelectionModel.getSelectionEnd - val opt = ScalaRefactoringUtil - .getExpression(project, editor, file, selectionStart, selectionEnd) + val opt = ScalaRefactoringUtil.getExpression(project, + editor, + file, + selectionStart, + selectionEnd) opt match { case Some((expr, _)) => if (forExpr(expr)) return diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/actions/NewScalaTypeDefinitionAction.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/actions/NewScalaTypeDefinitionAction.scala index 7146ba3e41e..995ae1864db 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/actions/NewScalaTypeDefinitionAction.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/actions/NewScalaTypeDefinitionAction.scala @@ -52,8 +52,9 @@ class NewScalaTypeDefinitionAction .getInstance(project) .getAllTemplates) { if (isScalaTemplate(template) && checkPackageExists(directory)) { - builder - .addKind(template.getName, Icons.FILE_TYPE_LOGO, template.getName) + builder.addKind(template.getName, + Icons.FILE_TYPE_LOGO, + template.getName) } } diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/actions/ScalaActionUtil.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/actions/ScalaActionUtil.scala index 0c8039997f7..f84a0312cdb 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/actions/ScalaActionUtil.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/actions/ScalaActionUtil.scala @@ -60,8 +60,10 @@ object ScalaActionUtil { }) val position = editor.getCaretModel.getLogicalPosition - val p: Point = HintManagerImpl - .getHintPosition(hint, editor, position, HintManager.ABOVE) + val p: Point = HintManagerImpl.getHintPosition(hint, + editor, + position, + HintManager.ABOVE) hintManager.showEditorHint( hint, diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/actions/ScalaFileTemplateUtil.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/actions/ScalaFileTemplateUtil.scala index 457709cd257..96f8f13824c 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/actions/ScalaFileTemplateUtil.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/actions/ScalaFileTemplateUtil.scala @@ -26,8 +26,8 @@ object ScalaFileTemplateUtil { properties.setProperty(FileTemplate.ATTRIBUTE_CLASS_NAME, className) var classSimpleName: String = aClass.name if (classSimpleName == null) classSimpleName = "" - properties - .setProperty(FileTemplate.ATTRIBUTE_SIMPLE_CLASS_NAME, classSimpleName) + properties.setProperty(FileTemplate.ATTRIBUTE_SIMPLE_CLASS_NAME, + classSimpleName) val methodName: String = method.name properties.setProperty(FileTemplate.ATTRIBUTE_METHOD_NAME, methodName) } diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/actions/ShowImplicitParametersAction.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/actions/ShowImplicitParametersAction.scala index 44ec7f71386..2137d740f38 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/actions/ShowImplicitParametersAction.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/actions/ShowImplicitParametersAction.scala @@ -146,8 +146,11 @@ class ShowImplicitParametersAction if (editor.getSelectionModel.hasSelection) { val selectionStart = editor.getSelectionModel.getSelectionStart val selectionEnd = editor.getSelectionModel.getSelectionEnd - val opt = ScalaRefactoringUtil - .getExpression(project, editor, file, selectionStart, selectionEnd) + val opt = ScalaRefactoringUtil.getExpression(project, + editor, + file, + selectionStart, + selectionEnd) opt match { case Some((expr, _)) => forExpr(expr) diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/actions/ShowTypeInfoAction.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/actions/ShowTypeInfoAction.scala index 3474ea6879f..d279dd6dcb9 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/actions/ShowTypeInfoAction.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/actions/ShowTypeInfoAction.scala @@ -35,8 +35,9 @@ class ShowTypeInfoAction extends AnAction(ScalaBundle.message("type.info")) { val editor = CommonDataKeys.EDITOR.getData(context) if (editor == null) return - val file = PsiUtilBase - .getPsiFileInEditor(editor, CommonDataKeys.PROJECT.getData(context)) + val file = PsiUtilBase.getPsiFileInEditor( + editor, + CommonDataKeys.PROJECT.getData(context)) if (file.getLanguage != ScalaFileType.SCALA_LANGUAGE) return val selectionModel = editor.getSelectionModel diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/annotator/AnnotatorUtils.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/annotator/AnnotatorUtils.scala index 4456fc1b742..bf683e6ce2e 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/annotator/AnnotatorUtils.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/annotator/AnnotatorUtils.scala @@ -65,8 +65,9 @@ private[annotator] object AnnotatorUtils { ScTypePresentation.different(actual, expected) val annotation = holder.createErrorAnnotation( expr, - ScalaBundle - .message("type.mismatch.found.required", actualText, expText)) + ScalaBundle.message("type.mismatch.found.required", + actualText, + expText)) annotation.registerFix(ReportHighlightingErrorQuickFix) } } diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/annotator/ApplicationAnnotator.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/annotator/ApplicationAnnotator.scala index ffd74df066c..413bff64c1e 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/annotator/ApplicationAnnotator.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/annotator/ApplicationAnnotator.scala @@ -273,8 +273,9 @@ trait ApplicationAnnotator { //TODO show parameter name val (expectedText, actualText) = ScTypePresentation.different(expectedType, t) - val message = ScalaBundle - .message("type.mismatch.expected.actual", expectedText, actualText) + val message = ScalaBundle.message("type.mismatch.expected.actual", + expectedText, + actualText) val annotation = holder.createErrorAnnotation(expression, message) annotation.registerFix(ReportHighlightingErrorQuickFix) } diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/annotator/AssignmentAnnotator.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/annotator/AssignmentAnnotator.scala index ce593f9cb2d..2d648817c91 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/annotator/AssignmentAnnotator.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/annotator/AssignmentAnnotator.scala @@ -102,8 +102,8 @@ trait AssignmentAnnotator { "Wrong right assignment side") } case _ => - holder - .createErrorAnnotation(assignment, "Reassignment to val") + holder.createErrorAnnotation(assignment, + "Reassignment to val") } case f: ScFunction => holder.createErrorAnnotation(assignment, "Reassignment to val") @@ -112,12 +112,13 @@ trait AssignmentAnnotator { method.containingClass match { case c: PsiClass if c.isAnnotationType => //do nothing case _ => - holder - .createErrorAnnotation(assignment, "Reassignment to val") + holder.createErrorAnnotation(assignment, + "Reassignment to val") } case v: ScValue => - val annotation = holder - .createErrorAnnotation(assignment, "Reassignment to val") + val annotation = holder.createErrorAnnotation( + assignment, + "Reassignment to val") annotation.registerFix( new ValToVarQuickFix( ScalaPsiUtil.nameContext(r.element).asInstanceOf[ScValue])) diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/annotator/FunctionAnnotator.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/annotator/FunctionAnnotator.scala index 95dc5072b8f..b4e4f404709 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/annotator/FunctionAnnotator.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/annotator/FunctionAnnotator.scala @@ -33,8 +33,9 @@ trait FunctionAnnotator { typeAware: Boolean) { if (!function.hasExplicitType && !function.returnTypeIsDefined) { function.recursiveReferences.foreach { ref => - val message = ScalaBundle - .message("function.recursive.need.result.type", function.name) + val message = + ScalaBundle.message("function.recursive.need.result.type", + function.name) holder.createErrorAnnotation(ref.element, message) } } @@ -110,8 +111,9 @@ trait FunctionAnnotator { } def needsTypeAnnotation() = { - val message = ScalaBundle - .message("function.must.define.type.explicitly", function.name) + val message = ScalaBundle.message( + "function.must.define.type.explicitly", + function.name) val returnTypes = function.returnUsages(withBooleanInfix = false).toSeq.collect { case retStmt: ScReturnStmt => @@ -126,8 +128,8 @@ trait FunctionAnnotator { } def redundantReturnExpression() = { - val message = ScalaBundle - .message("return.expression.is.redundant", usageType.presentableText) + val message = ScalaBundle.message("return.expression.is.redundant", + usageType.presentableText) holder.createWarningAnnotation( usage.asInstanceOf[ScReturnStmt].expr.get, message) diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/annotator/OverridingAnnotator.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/annotator/OverridingAnnotator.scala index f0ae9d1d164..2e055338139 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/annotator/OverridingAnnotator.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/annotator/OverridingAnnotator.scala @@ -175,8 +175,9 @@ trait OverridingAnnotator { if (owner.hasModifierProperty("override")) { val annotation: Annotation = holder.createErrorAnnotation( member.nameId, - ScalaBundle - .message("member.overrides.nothing", memberType, member.name)) + ScalaBundle.message("member.overrides.nothing", + memberType, + member.name)) annotation.setHighlightType( ProblemHighlightType.GENERIC_ERROR_OR_WARNING) annotation.registerFix(new RemoveModifierQuickFix(owner, "override")) @@ -249,8 +250,9 @@ trait OverridingAnnotator { if (overridesFinal) { val annotation: Annotation = holder.createErrorAnnotation( member.nameId, - ScalaBundle - .message("can.not.override.final", memberType, member.name)) + ScalaBundle.message("can.not.override.final", + memberType, + member.name)) annotation.setHighlightType( ProblemHighlightType.GENERIC_ERROR_OR_WARNING) } diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/annotator/PatternAnnotator.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/annotator/PatternAnnotator.scala index 47f2dbd91a5..6d40e473169 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/annotator/PatternAnnotator.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/annotator/PatternAnnotator.scala @@ -142,8 +142,7 @@ object PatternAnnotator { val (exprTypeText, patTypeText) = ScTypePresentation.different(exprType, patType) val message = - ScalaBundle - .message("fruitless.type.test", exprTypeText, patTypeText) + + ScalaBundle.message("fruitless.type.test", exprTypeText, patTypeText) + erasureWarn holder.createWarningAnnotation(pattern, message) case StableIdResolvesToVar() => diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/annotator/ScalaAnnotator.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/annotator/ScalaAnnotator.scala index 4bb4fd30387..30c61ce7d6c 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/annotator/ScalaAnnotator.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/annotator/ScalaAnnotator.scala @@ -630,8 +630,9 @@ class ScalaAnnotator case _ => return } val expectedType = Success(tp, None) - val conformance = ScalaAnnotator - .smartCheckConformance(expectedType, returnType) + val conformance = ScalaAnnotator.smartCheckConformance( + expectedType, + returnType) if (!conformance) { if (typeAware) { val (retTypeText, expectedTypeText) = @@ -1224,8 +1225,8 @@ class ScalaAnnotator } val (exprTypeText, expectedTypeText) = - ScTypePresentation - .different(exprType.getOrNothing, expectedType.get) + ScTypePresentation.different(exprType.getOrNothing, + expectedType.get) val error = ScalaBundle.message( "expr.type.does.not.conform.expected.type", exprTypeText, @@ -1287,8 +1288,7 @@ class ScalaAnnotator private def checkUnboundUnderscore(under: ScUnderscoreSection, holder: AnnotationHolder) { if (under.getText == "_") { - ScalaPsiUtil - .getParentOfType(under, classOf[ScVariableDefinition]) match { + ScalaPsiUtil.getParentOfType(under, classOf[ScVariableDefinition]) match { case varDef @ ScVariableDefinition.expr(expr) if varDef.expr.contains(under) => if (varDef.containingClass == null) { diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/annotator/createFromUsage/CreateApplyOrUnapplyQuickFix.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/annotator/createFromUsage/CreateApplyOrUnapplyQuickFix.scala index fd63b9d0bf0..527fd607426 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/annotator/createFromUsage/CreateApplyOrUnapplyQuickFix.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/annotator/createFromUsage/CreateApplyOrUnapplyQuickFix.scala @@ -93,8 +93,8 @@ abstract class CreateApplyOrUnapplyQuickFix(td: ScTypeDefinition) val newEditor = CreateFromUsageUtil.positionCursor(entity.getLastChild) val range = entity.getTextRange - newEditor.getDocument - .deleteString(range.getStartOffset, range.getEndOffset) + newEditor.getDocument.deleteString(range.getStartOffset, + range.getEndOffset) TemplateManager.getInstance(project).startTemplate(newEditor, template) } } diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/annotator/createFromUsage/CreateEntityQuickFix.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/annotator/createFromUsage/CreateEntityQuickFix.scala index 41d96216446..5fe31a59257 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/annotator/createFromUsage/CreateEntityQuickFix.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/annotator/createFromUsage/CreateEntityQuickFix.scala @@ -137,8 +137,8 @@ abstract class CreateEntityQuickFix(ref: ScReferenceExpression, if (!isScalaConsole) { val newEditor = positionCursor(entity.getLastChild) val range = entity.getTextRange - newEditor.getDocument - .deleteString(range.getStartOffset, range.getEndOffset) + newEditor.getDocument.deleteString(range.getStartOffset, + range.getEndOffset) TemplateManager.getInstance(project).startTemplate(newEditor, template) } } @@ -149,8 +149,10 @@ object CreateEntityQuickFix { private def materializeSytheticObject(obj: ScObject): ScObject = { val clazz = obj.fakeCompanionClassOrCompanionClass val objText = s"object ${clazz.name} {}" - val fromText = ScalaPsiElementFactory - .createTemplateDefinitionFromText(objText, clazz.getParent, clazz) + val fromText = ScalaPsiElementFactory.createTemplateDefinitionFromText( + objText, + clazz.getParent, + clazz) clazz.getParent.addAfter(fromText, clazz).asInstanceOf[ScObject] } diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/annotator/createFromUsage/CreateTypeDefinitionQuickFix.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/annotator/createFromUsage/CreateTypeDefinitionQuickFix.scala index b386102cacc..01206c58626 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/annotator/createFromUsage/CreateTypeDefinitionQuickFix.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/annotator/createFromUsage/CreateTypeDefinitionQuickFix.scala @@ -107,8 +107,10 @@ abstract class CreateTypeDefinitionQuickFix(ref: ScReferenceElement, parent)) return val text = s"${kind.keyword} $name" - val newTd = ScalaPsiElementFactory - .createTemplateDefinitionFromText(text, parent, parent.getFirstChild) + val newTd = ScalaPsiElementFactory.createTemplateDefinitionFromText( + text, + parent, + parent.getFirstChild) val anchor = anchorAfter.orNull parent.addBefore(ScalaPsiElementFactory.createNewLine(parent.getManager), anchor) @@ -245,8 +247,10 @@ abstract class CreateTypeDefinitionQuickFix(ref: ScReferenceElement, case cl: ScClass => val constr = cl.constructor.get val text = parametersText(ref) - val parameters = ScalaPsiElementFactory - .createParamClausesWithContext(text, constr, constr.getFirstChild) + val parameters = ScalaPsiElementFactory.createParamClausesWithContext( + text, + constr, + constr.getFirstChild) constr.parameterList.replace(parameters) case _ => } diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/annotator/gutter/ScalaLineMarkerProvider.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/annotator/gutter/ScalaLineMarkerProvider.scala index 31adaa03271..a9ad86ccd88 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/annotator/gutter/ScalaLineMarkerProvider.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/annotator/gutter/ScalaLineMarkerProvider.scala @@ -290,8 +290,9 @@ private object GutterUtil { val overrides = new ArrayBuffer[PsiNamedElement] for (member <- members) overrides ++= - ScalaOverridingMemberSearcher - .search(member, deep = false, withSelfType = true) + ScalaOverridingMemberSearcher.search(member, + deep = false, + withSelfType = true) if (overrides.nonEmpty) { val icon = if (!GutterUtil.isAbstract(member)) OVERRIDEN_METHOD_MARKER_RENDERER diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/annotator/gutter/ScalaMarkerType.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/annotator/gutter/ScalaMarkerType.scala index 40944fcc131..6cad281dbdb 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/annotator/gutter/ScalaMarkerType.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/annotator/gutter/ScalaMarkerType.scala @@ -306,8 +306,8 @@ object ScalaMarkerType { presentation.getPresentableText + " " + presentation.getLocationString else { - ClassPresentationUtil - .getNameForClass(method.containingClass, false) + ClassPresentationUtil.getNameForClass(method.containingClass, + false) } case xlass: PsiClass => val presentation = xlass.getPresentation diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/annotator/quickfix/AddLToLongLiteralFix.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/annotator/quickfix/AddLToLongLiteralFix.scala index 0ea3a90e4a3..43f254a7144 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/annotator/quickfix/AddLToLongLiteralFix.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/annotator/quickfix/AddLToLongLiteralFix.scala @@ -21,8 +21,9 @@ class AddLToLongLiteralFix(literal: ScLiteral) extends IntentionAction { def invoke(project: Project, editor: Editor, file: PsiFile): Unit = { if (!literal.isValid) return - val psi = ScalaPsiElementFactory - .createExpressionFromText(literal.getText + "L", literal.getManager) + val psi = ScalaPsiElementFactory.createExpressionFromText( + literal.getText + "L", + literal.getManager) literal.replace(psi) } } diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/annotator/quickfix/ChangeTypeFix.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/annotator/quickfix/ChangeTypeFix.scala index d1ce52a409d..e534b94dd05 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/annotator/quickfix/ChangeTypeFix.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/annotator/quickfix/ChangeTypeFix.scala @@ -15,8 +15,9 @@ import org.jetbrains.plugins.scala.lang.psi.types.ScType class ChangeTypeFix(typeElement: ScTypeElement, newType: ScType) extends IntentionAction { - val getText: String = "Change type '%s' to '%s'" - .format(typeElement.getText, newType.presentableText) + val getText: String = "Change type '%s' to '%s'".format( + typeElement.getText, + newType.presentableText) def getFamilyName: String = "Change Type" @@ -31,8 +32,9 @@ class ChangeTypeFix(typeElement: ScTypeElement, newType: ScType) typeElement.getContainingFile)) return if (typeElement.getParent == null || typeElement.getParent.getNode == null) return - val newTypeElement = ScalaPsiElementFactory - .createTypeElementFromText(newType.canonicalText, typeElement.getManager) + val newTypeElement = ScalaPsiElementFactory.createTypeElementFromText( + newType.canonicalText, + typeElement.getManager) val replaced = typeElement.replace(newTypeElement) ScalaPsiUtil.adjustTypes(replaced) UndoUtil.markPsiFileForUndo(file) diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/annotator/quickfix/ConvertOctalToHexFix.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/annotator/quickfix/ConvertOctalToHexFix.scala index 0e66b54d669..c71e0e769a3 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/annotator/quickfix/ConvertOctalToHexFix.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/annotator/quickfix/ConvertOctalToHexFix.scala @@ -36,8 +36,9 @@ class ConvertOctalToHexFix(literal: ScLiteral) extends IntentionAction { if (!literal.isValid) return val text = literal.getText if (!(text.length >= 2 && text(0) == '0' && text(1).toLower != 'x')) return - val psi = ScalaPsiElementFactory - .createExpressionFromText(convertOctalToHex(text), literal.getManager) + val psi = ScalaPsiElementFactory.createExpressionFromText( + convertOctalToHex(text), + literal.getManager) literal.replace(psi) } } diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/annotator/quickfix/ImplementMethodsQuickFix.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/annotator/quickfix/ImplementMethodsQuickFix.scala index b5b176c8d3e..91176adac99 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/annotator/quickfix/ImplementMethodsQuickFix.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/annotator/quickfix/ImplementMethodsQuickFix.scala @@ -20,8 +20,10 @@ class ImplementMethodsQuickFix(clazz: ScTemplateDefinition) def isAvailable(project: Project, editor: Editor, file: PsiFile): Boolean = clazz.isValid && clazz.getManager.isInProject(file) && file.isWritable def invoke(project: Project, editor: Editor, file: PsiFile) { - ScalaOIUtil - .invokeOverrideImplement(project, editor, file, isImplement = true) + ScalaOIUtil.invokeOverrideImplement(project, + editor, + file, + isImplement = true) } def getFamilyName: String = ScalaBundle.message("implement.methods.fix") } diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/annotator/quickfix/WrapInOptionQuickFix.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/annotator/quickfix/WrapInOptionQuickFix.scala index 64bf37709df..33c63fe7a39 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/annotator/quickfix/WrapInOptionQuickFix.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/annotator/quickfix/WrapInOptionQuickFix.scala @@ -30,8 +30,9 @@ class WrapInOptionQuickFix(expr: ScExpression, def invoke(project: Project, editor: Editor, file: PsiFile) { if (expr.isValid) { val newText = "Option(" + expr.getText + ")" - val newExpr = ScalaPsiElementFactory - .createExpressionFromText(newText, expr.getManager) + val newExpr = ScalaPsiElementFactory.createExpressionFromText( + newText, + expr.getManager) expr.replaceExpression(newExpr, removeParenthesis = true) } } diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/annotator/quickfix/modifiers/AddModifierWithValOrVarQuickFix.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/annotator/quickfix/modifiers/AddModifierWithValOrVarQuickFix.scala index b4ce7adda9f..b3b179cfbf8 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/annotator/quickfix/modifiers/AddModifierWithValOrVarQuickFix.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/annotator/quickfix/modifiers/AddModifierWithValOrVarQuickFix.scala @@ -19,12 +19,16 @@ class AddModifierWithValOrVarQuickFix(member: ScModifierListOwner, override def invoke(project: Project, editor: Editor, file: PsiFile) = { val psiKeyword = if (addVal) { - val decl = ScalaPsiElementFactory - .createDeclarationFromText("val x", member.getParent, member) + val decl = ScalaPsiElementFactory.createDeclarationFromText( + "val x", + member.getParent, + member) decl.findFirstChildByType(ScalaTokenTypes.kVAL) } else { - val decl = ScalaPsiElementFactory - .createDeclarationFromText("var x", member.getParent, member) + val decl = ScalaPsiElementFactory.createDeclarationFromText( + "var x", + member.getParent, + member) decl.findFirstChildByType(ScalaTokenTypes.kVAR) } member.addAfter(psiKeyword, member.getModifierList) diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/annotator/template/CaseClassWithoutParamList.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/annotator/template/CaseClassWithoutParamList.scala index ee63142e470..7dbdf4cf541 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/annotator/template/CaseClassWithoutParamList.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/annotator/template/CaseClassWithoutParamList.scala @@ -64,8 +64,10 @@ class ConvertToObjectFix(c: ScClass) extends IntentionAction { val charsToReplace = classKeywordTextRange.getLength val classText = c.getText val objectText = classText.patch(start, "object", charsToReplace) - val objectElement = ScalaPsiElementFactory - .createObjectWithContext(objectText, c.getContext, c) + val objectElement = ScalaPsiElementFactory.createObjectWithContext( + objectText, + c.getContext, + c) c.replace(objectElement) // TODO update references to class. // new X -> X diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/annotator/template/IllegalInheritance.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/annotator/template/IllegalInheritance.scala index 576b1124a36..1472b17c349 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/annotator/template/IllegalInheritance.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/annotator/template/IllegalInheritance.scala @@ -12,8 +12,9 @@ import org.jetbrains.plugins.scala.lang.psi.types.result.TypingContext */ object IllegalInheritance extends AnnotatorPart[ScTemplateDefinition] { val Message = - "Illegal inheritance, self-type %s does not conform to %s" - .format(_: String, _: String) + "Illegal inheritance, self-type %s does not conform to %s".format( + _: String, + _: String) def kind = classOf[ScTemplateDefinition] diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/annotator/template/ObjectCreationImpossible.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/annotator/template/ObjectCreationImpossible.scala index 971d0207635..f47716c03c9 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/annotator/template/ObjectCreationImpossible.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/annotator/template/ObjectCreationImpossible.scala @@ -54,8 +54,9 @@ object ObjectCreationImpossible extends AnnotatorPart[ScTemplateDefinition] { val element = if (isNew) refElement else definition.asInstanceOf[ScObject].nameId - val annotation = holder - .createErrorAnnotation(element, message(undefined.toSeq: _*)) + val annotation = + holder.createErrorAnnotation(element, + message(undefined.toSeq: _*)) annotation.registerFix(new ImplementMethodsQuickFix(definition)) } case _ => diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/caches/CachesUtil.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/caches/CachesUtil.scala index 47e6a45ccfa..65cd78ec986 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/caches/CachesUtil.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/caches/CachesUtil.scala @@ -240,8 +240,10 @@ object CachesUtil { throw new ProbablyRecursionException(e, data, key, Set(fun)) } } else { - guard - .doPreventingRecursion((e, data), false, new Computable[Result] { + guard.doPreventingRecursion( + (e, data), + false, + new Computable[Result] { def compute(): Result = { try { builder(e, data) @@ -254,8 +256,9 @@ object CachesUtil { case t @ ProbablyRecursionException(ee, innerData, k, set) if k == key => val fun = - PsiTreeUtil - .getContextOfType(e, true, classOf[ScFunction]) + PsiTreeUtil.getContextOfType(e, + true, + classOf[ScFunction]) if (fun == null || fun.isProbablyRecursive) throw t else { fun.setProbablyRecursive(true) diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInsight/generation/ScalaGenerateCompanionObjectHandler.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInsight/generation/ScalaGenerateCompanionObjectHandler.scala index 6092f8c8f4c..8abf661284a 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInsight/generation/ScalaGenerateCompanionObjectHandler.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInsight/generation/ScalaGenerateCompanionObjectHandler.scala @@ -58,8 +58,9 @@ class ScalaGenerateCompanionObjectHandler if (canAddCompanionObject(clazz)) { val name = clazz.name val text = s"object $name {\n \n}" - ScalaPsiElementFactory - .createObjectWithContext(text, clazz.getContext, clazz) + ScalaPsiElementFactory.createObjectWithContext(text, + clazz.getContext, + clazz) } else throw new IllegalArgumentException("Cannot create companion object") } } diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInsight/generation/ScalaGenerateEqualsHandler.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInsight/generation/ScalaGenerateEqualsHandler.scala index 795f5d1e9da..9b4d5864d3d 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInsight/generation/ScalaGenerateEqualsHandler.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInsight/generation/ScalaGenerateEqualsHandler.scala @@ -126,8 +126,9 @@ class ScalaGenerateEqualsHandler extends LanguageCodeInsightActionHandler { | $firstStmtText | $calculationText |}""".stripMargin.replace("\r", "") - ScalaPsiElementFactory - .createMethodWithContext(methodText, aClass, aClass.extendsBlock) + ScalaPsiElementFactory.createMethodWithContext(methodText, + aClass, + aClass.extendsBlock) } protected def createCanEqual(aClass: ScClass, project: Project): ScFunction = { @@ -139,8 +140,9 @@ class ScalaGenerateEqualsHandler extends LanguageCodeInsightActionHandler { ScSubstitutor.empty) val overrideMod = overrideModifier(aClass, sign) val text = s"$overrideMod $declText = other.isInstanceOf[${aClass.name}]" - ScalaPsiElementFactory - .createMethodWithContext(text, aClass, aClass.extendsBlock) + ScalaPsiElementFactory.createMethodWithContext(text, + aClass, + aClass.extendsBlock) } protected def createEquals(aClass: ScClass, project: Project): ScFunction = { @@ -165,8 +167,9 @@ class ScalaGenerateEqualsHandler extends LanguageCodeInsightActionHandler { | $checksText | case _ $arrow false |}""".stripMargin.replace("\r", "") - ScalaPsiElementFactory - .createMethodWithContext(text, aClass, aClass.extendsBlock) + ScalaPsiElementFactory.createMethodWithContext(text, + aClass, + aClass.extendsBlock) } def invoke(project: Project, editor: Editor, file: PsiFile) { @@ -222,8 +225,9 @@ class ScalaGenerateEqualsHandler extends LanguageCodeInsightActionHandler { } private def hasHashCode(aClass: ScClass): Option[ScFunction] = { - val method = ScalaPsiElementFactory - .createMethodFromText("def hashCode(): Int", aClass.getManager) + val method = ScalaPsiElementFactory.createMethodFromText( + "def hashCode(): Int", + aClass.getManager) findSuchMethod(aClass, "hashCode", method.methodType) } diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInsight/generation/ScalaGeneratePropertyHandler.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInsight/generation/ScalaGeneratePropertyHandler.scala index 798aa28e147..a6bf2801fe0 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInsight/generation/ScalaGeneratePropertyHandler.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInsight/generation/ScalaGeneratePropertyHandler.scala @@ -49,8 +49,9 @@ class ScalaGeneratePropertyHandler extends LanguageCodeInsightActionHandler { val modifiers = varDef.getModifierList.getText def createDefinition(text: String) = - ScalaPsiElementFactory - .createDefinitionWithContext(text, varDef.getContext, varDef) + ScalaPsiElementFactory.createDefinitionWithContext(text, + varDef.getContext, + varDef) val backingVarText = s"private[this] var _$name: $typeText = $defaultValue" val backingVar_0 = createDefinition(backingVarText) diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInsight/generation/ScalaGenerateToStringHandler.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInsight/generation/ScalaGenerateToStringHandler.scala index 67856e561ff..932334726cb 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInsight/generation/ScalaGenerateToStringHandler.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInsight/generation/ScalaGenerateToStringHandler.scala @@ -41,8 +41,9 @@ class ScalaGenerateToStringHandler extends LanguageCodeInsightActionHandler { val toStringMethod = createToString(aType, project) extensions.inWriteAction { - GenerationUtil - .addMembers(aType, toStringMethod.toList, editor.getDocument) + GenerationUtil.addMembers(aType, + toStringMethod.toList, + editor.getDocument) } } } @@ -90,8 +91,9 @@ class ScalaGenerateToStringHandler extends LanguageCodeInsightActionHandler { val fieldsText = fieldsWtihNames.mkString(s"$typeName(", ", ", ")") val methodText = s"""override def toString = s"$fieldsText"""" - ScalaPsiElementFactory - .createMethodWithContext(methodText, aType, aType.extendsBlock) + ScalaPsiElementFactory.createMethodWithContext(methodText, + aType, + aType.extendsBlock) } } diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInsight/intention/booleans/DeMorganLawIntention.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInsight/intention/booleans/DeMorganLawIntention.scala index e9e82a1766d..671b61f4d07 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInsight/intention/booleans/DeMorganLawIntention.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInsight/intention/booleans/DeMorganLawIntention.scala @@ -64,8 +64,9 @@ class DeMorganLawIntention extends PsiElementBaseIntentionAction { .append(" ") .append(IntentionUtils.negate(infixExpr.getArgExpr)) - val res = IntentionUtils - .negateAndValidateExpression(infixExpr, element.getManager, buf) + val res = IntentionUtils.negateAndValidateExpression(infixExpr, + element.getManager, + buf) inWriteAction { res._1.replaceExpression(res._2, true) diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInsight/intention/booleans/ExpandBooleanIntention.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInsight/intention/booleans/ExpandBooleanIntention.scala index 3dbfb2db07a..2d896673d45 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInsight/intention/booleans/ExpandBooleanIntention.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInsight/intention/booleans/ExpandBooleanIntention.scala @@ -68,8 +68,8 @@ class ExpandBooleanIntention extends PsiElementBaseIntentionAction { expr.append("{ return true } else { return false }") val newReturnStmt: ScExpression = - ScalaPsiElementFactory - .createExpressionFromText(expr.toString(), element.getManager) + ScalaPsiElementFactory.createExpressionFromText(expr.toString(), + element.getManager) inWriteAction { returnStmt.replaceExpression(newReturnStmt, true) diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInsight/intention/booleans/FlipComparisonInInfixExprIntention.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInsight/intention/booleans/FlipComparisonInInfixExprIntention.scala index e80d25b4cc6..16fb635b2d3 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInsight/intention/booleans/FlipComparisonInInfixExprIntention.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInsight/intention/booleans/FlipComparisonInInfixExprIntention.scala @@ -84,8 +84,9 @@ class FlipComparisonInInfixExprIntention .append(" ") .append(infixExpr.getBaseExpr.getText) - val newInfixExpr = ScalaPsiElementFactory - .createExpressionFromText(expr.toString(), element.getManager) + val newInfixExpr = ScalaPsiElementFactory.createExpressionFromText( + expr.toString(), + element.getManager) val size = newInfixExpr diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInsight/intention/booleans/FlipComparisonInMethodCallExprIntention.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInsight/intention/booleans/FlipComparisonInMethodCallExprIntention.scala index 4efc69511c7..07b8d7ceef3 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInsight/intention/booleans/FlipComparisonInMethodCallExprIntention.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInsight/intention/booleans/FlipComparisonInMethodCallExprIntention.scala @@ -114,8 +114,8 @@ class FlipComparisonInMethodCallExprIntention } val newQualExpr: ScExpression = - ScalaPsiElementFactory - .createExpressionFromText(newQual, element.getManager) + ScalaPsiElementFactory.createExpressionFromText(newQual, + element.getManager) expr .append(methodCallExpr.args.getText) @@ -128,8 +128,9 @@ class FlipComparisonInMethodCallExprIntention .getText)) .append(newArgs) - val newMethodCallExpr = ScalaPsiElementFactory - .createExpressionFromText(expr.toString(), element.getManager) + val newMethodCallExpr = ScalaPsiElementFactory.createExpressionFromText( + expr.toString(), + element.getManager) newMethodCallExpr .asInstanceOf[ScMethodCall] diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInsight/intention/booleans/NegateComparisonIntention.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInsight/intention/booleans/NegateComparisonIntention.scala index 5e2cf5b5cd6..c2f59b4f250 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInsight/intention/booleans/NegateComparisonIntention.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInsight/intention/booleans/NegateComparisonIntention.scala @@ -76,8 +76,9 @@ class NegateComparisonIntention extends PsiElementBaseIntentionAction { .append(" ") .append(infixExpr.getArgExpr.getText) - val res = IntentionUtils - .negateAndValidateExpression(infixExpr, element.getManager, buf) + val res = IntentionUtils.negateAndValidateExpression(infixExpr, + element.getManager, + buf) inWriteAction { res._1.replaceExpression(res._2, true) diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInsight/intention/booleans/ReplaceEqualsOrEqualityInInfixExprIntention.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInsight/intention/booleans/ReplaceEqualsOrEqualityInInfixExprIntention.scala index cc8a61d5b97..aaf4c3b67f5 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInsight/intention/booleans/ReplaceEqualsOrEqualityInInfixExprIntention.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInsight/intention/booleans/ReplaceEqualsOrEqualityInInfixExprIntention.scala @@ -61,8 +61,9 @@ class ReplaceEqualsOrEqualityInInfixExprIntention .append(" ") .append(infixExpr.getArgExpr.getText) - val newInfixExpr = ScalaPsiElementFactory - .createExpressionFromText(expr.toString(), element.getManager) + val newInfixExpr = ScalaPsiElementFactory.createExpressionFromText( + expr.toString(), + element.getManager) val size = newInfixExpr diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInsight/intention/booleans/ReplaceEqualsOrEqualityInMethodCallExprIntention.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInsight/intention/booleans/ReplaceEqualsOrEqualityInMethodCallExprIntention.scala index 9368dda4f74..fb25c5e6948 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInsight/intention/booleans/ReplaceEqualsOrEqualityInMethodCallExprIntention.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInsight/intention/booleans/ReplaceEqualsOrEqualityInMethodCallExprIntention.scala @@ -86,8 +86,9 @@ class ReplaceEqualsOrEqualityInMethodCallExprIntention .append(replaceOper(oper)) .append(methodCallExpr.args.getText) - val newMethodCallExpr = ScalaPsiElementFactory - .createExpressionFromText(expr.toString(), element.getManager) + val newMethodCallExpr = ScalaPsiElementFactory.createExpressionFromText( + expr.toString(), + element.getManager) val size = newMethodCallExpr diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInsight/intention/comprehension/ConvertToCurlyBracesIntention.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInsight/intention/comprehension/ConvertToCurlyBracesIntention.scala index 3229473a517..41432d1f139 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInsight/intention/comprehension/ConvertToCurlyBracesIntention.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInsight/intention/comprehension/ConvertToCurlyBracesIntention.scala @@ -42,8 +42,8 @@ class ConvertToCurlyBracesIntention extends PsiElementBaseIntentionAction { for (rParen <- Option( statement.findFirstChildByType(ScalaTokenTypes.tRPARENTHESIS))) { val rBrace = rParen.replace(block.getLastChild) - statement - .addBefore(ScalaPsiElementFactory.createNewLine(manager), rBrace) + statement.addBefore(ScalaPsiElementFactory.createNewLine(manager), + rBrace) } for (enumerators <- statement.enumerators; diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInsight/intention/controlflow/InvertIfConditionIntention.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInsight/intention/controlflow/InvertIfConditionIntention.scala index 5bae17f0d10..722c5932b33 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInsight/intention/controlflow/InvertIfConditionIntention.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInsight/intention/controlflow/InvertIfConditionIntention.scala @@ -107,8 +107,8 @@ class InvertIfConditionIntention extends PsiElementBaseIntentionAction { } expr.append(res) val newStmt: ScExpression = - ScalaPsiElementFactory - .createExpressionFromText(expr.toString(), element.getManager) + ScalaPsiElementFactory.createExpressionFromText(expr.toString(), + element.getManager) inWriteAction { ifStmt.replaceExpression(newStmt, true) diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInsight/intention/controlflow/MergeElseIfIntention.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInsight/intention/controlflow/MergeElseIfIntention.scala index fa8c912ee45..34dacf17b43 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInsight/intention/controlflow/MergeElseIfIntention.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInsight/intention/controlflow/MergeElseIfIntention.scala @@ -76,8 +76,8 @@ class MergeElseIfIntention extends PsiElementBaseIntentionAction { .append(ifStmt.elseBranch.get.getText.trim.drop(1).dropRight(1)) val newIfStmt: ScExpression = - ScalaPsiElementFactory - .createExpressionFromText(expr.toString(), element.getManager) + ScalaPsiElementFactory.createExpressionFromText(expr.toString(), + element.getManager) val size = newIfStmt .asInstanceOf[ScIfStmt] diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInsight/intention/controlflow/MergeIfToAndIntention.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInsight/intention/controlflow/MergeIfToAndIntention.scala index f2967ed32dd..ee15b96e23e 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInsight/intention/controlflow/MergeIfToAndIntention.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInsight/intention/controlflow/MergeIfToAndIntention.scala @@ -82,8 +82,8 @@ class MergeIfToAndIntention extends PsiElementBaseIntentionAction { .append(innerThenBranch.getText) val newIfStmt: ScExpression = - ScalaPsiElementFactory - .createExpressionFromText(expr.toString(), element.getManager) + ScalaPsiElementFactory.createExpressionFromText(expr.toString(), + element.getManager) inWriteAction { ifStmt.replaceExpression(newIfStmt, true) diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInsight/intention/controlflow/MergeIfToOrIntention.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInsight/intention/controlflow/MergeIfToOrIntention.scala index 632d08ceba2..1fe18bf5a97 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInsight/intention/controlflow/MergeIfToOrIntention.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInsight/intention/controlflow/MergeIfToOrIntention.scala @@ -66,8 +66,10 @@ class MergeIfToOrIntention extends PsiElementBaseIntentionAction { } } - PsiEquivalenceUtil - .areElementsEquivalent(thenBranch, innerThenBranch, comparator, false) + PsiEquivalenceUtil.areElementsEquivalent(thenBranch, + innerThenBranch, + comparator, + false) } override def invoke(project: Project, editor: Editor, element: PsiElement) { @@ -93,8 +95,8 @@ class MergeIfToOrIntention extends PsiElementBaseIntentionAction { expr.append(" else ").append(innerElseBranch.getText) val newIfStmt: ScExpression = - ScalaPsiElementFactory - .createExpressionFromText(expr.toString(), element.getManager) + ScalaPsiElementFactory.createExpressionFromText(expr.toString(), + element.getManager) inWriteAction { ifStmt.replaceExpression(newIfStmt, true) diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInsight/intention/controlflow/RemoveRedundantElseIntention.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInsight/intention/controlflow/RemoveRedundantElseIntention.scala index e2bcbd8da28..ba106714fbb 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInsight/intention/controlflow/RemoveRedundantElseIntention.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInsight/intention/controlflow/RemoveRedundantElseIntention.scala @@ -80,8 +80,8 @@ class RemoveRedundantElseIntention extends PsiElementBaseIntentionAction { elseKeyWord.delete() elseBranch.delete() ifStmt.getParent.addRangeAfter(from, to, ifStmt) - ifStmt.getParent - .addAfter(ScalaPsiElementFactory.createNewLine(manager), ifStmt) + ifStmt.getParent.addAfter(ScalaPsiElementFactory.createNewLine(manager), + ifStmt) PsiDocumentManager .getInstance(project) .commitDocument(editor.getDocument) diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInsight/intention/controlflow/ReplaceDoWhileWithWhileIntention.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInsight/intention/controlflow/ReplaceDoWhileWithWhileIntention.scala index a9bc3a92fa2..2445d89ae60 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInsight/intention/controlflow/ReplaceDoWhileWithWhileIntention.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInsight/intention/controlflow/ReplaceDoWhileWithWhileIntention.scala @@ -105,8 +105,9 @@ class ReplaceDoWhileWithWhileIntention extends PsiElementBaseIntentionAction { val manager = element.getManager - val newWhileStmt = ScalaPsiElementFactory - .createExpressionFromText(whileText.toString, manager) + val newWhileStmt = ScalaPsiElementFactory.createExpressionFromText( + whileText.toString, + manager) val newBody = ScalaPsiElementFactory.createExpressionFromText(bodyText, manager) @@ -130,8 +131,9 @@ class ReplaceDoWhileWithWhileIntention extends PsiElementBaseIntentionAction { val doStmtInBraces = doStmt.replaceExpression( ScalaPsiElementFactory.createBlockFromExpr(doStmt, manager), removeParenthesis = true) - PsiTreeUtil - .findChildOfType(doStmtInBraces, classOf[ScDoStmt], true) + PsiTreeUtil.findChildOfType(doStmtInBraces, + classOf[ScDoStmt], + true) } else doStmt val newExpression: ScExpression = newDoStmt.replaceExpression(newWhileStmt, removeParenthesis = true) diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInsight/intention/controlflow/ReplaceWhileWithDoWhileIntention.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInsight/intention/controlflow/ReplaceWhileWithDoWhileIntention.scala index 203d227c82e..93430ec84b7 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInsight/intention/controlflow/ReplaceWhileWithDoWhileIntention.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInsight/intention/controlflow/ReplaceWhileWithDoWhileIntention.scala @@ -58,8 +58,8 @@ class ReplaceWhileWithDoWhileIntention extends PsiElementBaseIntentionAction { val newStmtText = s"if ($condText) {\n do $bodyText while ($condText)\n}" val newStmt: ScExpression = - ScalaPsiElementFactory - .createExpressionFromText(newStmtText, element.getManager) + ScalaPsiElementFactory.createExpressionFromText(newStmtText, + element.getManager) inWriteAction { whileStmt.replaceExpression(newStmt, removeParenthesis = true) diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInsight/intention/controlflow/SplitElseIfIntention.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInsight/intention/controlflow/SplitElseIfIntention.scala index 7a72d0115b7..41aa09cded2 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInsight/intention/controlflow/SplitElseIfIntention.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInsight/intention/controlflow/SplitElseIfIntention.scala @@ -74,8 +74,8 @@ class SplitElseIfIntention extends PsiElementBaseIntentionAction { .append("\n}") val newIfStmt: ScExpression = - ScalaPsiElementFactory - .createExpressionFromText(expr.toString(), element.getManager) + ScalaPsiElementFactory.createExpressionFromText(expr.toString(), + element.getManager) val size = newIfStmt .asInstanceOf[ScIfStmt] diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInsight/intention/controlflow/SplitIfIntention.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInsight/intention/controlflow/SplitIfIntention.scala index faafbff85f1..c5b49ef8053 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInsight/intention/controlflow/SplitIfIntention.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInsight/intention/controlflow/SplitIfIntention.scala @@ -89,8 +89,8 @@ class SplitIfIntention extends PsiElementBaseIntentionAction { } val newIfStmt: ScExpression = - ScalaPsiElementFactory - .createExpressionFromText(expr.toString(), element.getManager) + ScalaPsiElementFactory.createExpressionFromText(expr.toString(), + element.getManager) val diff = newIfStmt .asInstanceOf[ScIfStmt] diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInsight/intention/expression/ConvertToInfixExpressionIntention.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInsight/intention/expression/ConvertToInfixExpressionIntention.scala index 3a706797eca..a159b2bcd73 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInsight/intention/expression/ConvertToInfixExpressionIntention.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInsight/intention/expression/ConvertToInfixExpressionIntention.scala @@ -123,8 +123,7 @@ class ConvertToInfixExpressionIntention extends PsiElementBaseIntentionAction { } val text = expr.toString() - ScalaPsiElementFactory - .createExpressionFromText(text, element.getManager) match { + ScalaPsiElementFactory.createExpressionFromText(text, element.getManager) match { case infixExpr: ScInfixExpr => infixExpr .asInstanceOf[ScInfixExpr] diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInsight/intention/expression/IntroduceExplicitParameterIntention.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInsight/intention/expression/IntroduceExplicitParameterIntention.scala index 0a4d301cc33..0200b85bb92 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInsight/intention/expression/IntroduceExplicitParameterIntention.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInsight/intention/expression/IntroduceExplicitParameterIntention.scala @@ -157,8 +157,9 @@ class IntroduceExplicitParameterIntention val diff = buf.length buf.append(expr.getText) - val newExpr = ScalaPsiElementFactory - .createExpressionFromText(buf.toString(), element.getManager) + val newExpr = ScalaPsiElementFactory.createExpressionFromText( + buf.toString(), + element.getManager) inWriteAction { val document = editor.getDocument diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInsight/intention/expression/IntroduceImplicitParameterIntention.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInsight/intention/expression/IntroduceImplicitParameterIntention.scala index e0c08e077c7..778eb859e32 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInsight/intention/expression/IntroduceImplicitParameterIntention.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInsight/intention/expression/IntroduceImplicitParameterIntention.scala @@ -107,8 +107,9 @@ object IntroduceImplicitParameterIntention { buf.replace(offset, offset + p.name.length, newParam) } - val newExpr = ScalaPsiElementFactory - .createExpressionFromText(buf.toString(), expr.getManager) + val newExpr = ScalaPsiElementFactory.createExpressionFromText( + buf.toString(), + expr.getManager) if (!isValidExpr(newExpr, expr.parameters.length)) return Right( diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInsight/intention/expression/RemoveApplyIntention.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInsight/intention/expression/RemoveApplyIntention.scala index 9ec583007f3..8dc6c749266 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInsight/intention/expression/RemoveApplyIntention.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInsight/intention/expression/RemoveApplyIntention.scala @@ -222,8 +222,9 @@ class RemoveApplyIntention extends PsiElementBaseIntentionAction { } buf.append(expr.args.getText) - val newExpr = ScalaPsiElementFactory - .createExpressionFromText(buf.toString(), element.getManager) + val newExpr = ScalaPsiElementFactory.createExpressionFromText( + buf.toString(), + element.getManager) inWriteAction { expr.replace(newExpr) diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInsight/intention/expression/RemoveUnnecessaryParenthesesIntention.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInsight/intention/expression/RemoveUnnecessaryParenthesesIntention.scala index 3039fe30b46..1c4b082da7e 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInsight/intention/expression/RemoveUnnecessaryParenthesesIntention.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInsight/intention/expression/RemoveUnnecessaryParenthesesIntention.scala @@ -42,10 +42,12 @@ class RemoveUnnecessaryParenthesesIntention if UnnecessaryParenthesesUtil.canBeStripped( expr, ignoreClarifying = false) => - val stripped: String = UnnecessaryParenthesesUtil - .getTextOfStripped(expr, ignoreClarifying = false) - val newExpr = ScalaPsiElementFactory - .createExpressionFromText(stripped, expr.getManager) + val stripped: String = UnnecessaryParenthesesUtil.getTextOfStripped( + expr, + ignoreClarifying = false) + val newExpr = + ScalaPsiElementFactory.createExpressionFromText(stripped, + expr.getManager) inWriteAction { expr.replaceExpression(newExpr, removeParenthesis = true) } diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInsight/intention/format/AbstractFormatConversionIntention.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInsight/intention/format/AbstractFormatConversionIntention.scala index f72faeefc02..b4fd44c5c00 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInsight/intention/format/AbstractFormatConversionIntention.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInsight/intention/format/AbstractFormatConversionIntention.scala @@ -55,8 +55,8 @@ abstract class AbstractFormatConversionIntention(name: String, target.replace(result) match { case lit: ScLiteral if lit.isMultiLineString => - MultilineStringUtil - .addMarginsAndFormatMLString(lit, editor.getDocument) + MultilineStringUtil.addMarginsAndFormatMLString(lit, + editor.getDocument) case _ => } } diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInsight/intention/format/ConvertFormattedStringToInterpolatedString.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInsight/intention/format/ConvertFormattedStringToInterpolatedString.scala index 75b9cd822bc..746302d001b 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInsight/intention/format/ConvertFormattedStringToInterpolatedString.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInsight/intention/format/ConvertFormattedStringToInterpolatedString.scala @@ -22,8 +22,9 @@ class ConvertFormattedStringToInterpolatedString override def isAvailable(project: Project, editor: Editor, element: PsiElement): Boolean = { - if (!super - .isAvailable(project: Project, editor: Editor, element: PsiElement)) + if (!super.isAvailable(project: Project, + editor: Editor, + element: PsiElement)) return false element.scalaLanguageLevel.getOrElse(ScalaLanguageLevel.Default) >= Scala_2_10 } diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInsight/intention/format/ConvertStringConcatenationToInterpolatedString.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInsight/intention/format/ConvertStringConcatenationToInterpolatedString.scala index 38e99820254..6f006078f04 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInsight/intention/format/ConvertStringConcatenationToInterpolatedString.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInsight/intention/format/ConvertStringConcatenationToInterpolatedString.scala @@ -23,8 +23,9 @@ class ConvertStringConcatenationToInterpolatedString override def isAvailable(project: Project, editor: Editor, element: PsiElement): Boolean = { - if (!super - .isAvailable(project: Project, editor: Editor, element: PsiElement)) + if (!super.isAvailable(project: Project, + editor: Editor, + element: PsiElement)) return false element.scalaLanguageLevel.getOrElse(ScalaLanguageLevel.Default) >= Scala_2_10 } diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInsight/intention/imports/ImportMembersUtil.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInsight/intention/imports/ImportMembersUtil.scala index dbf7f31cfdf..69dce26fa27 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInsight/intention/imports/ImportMembersUtil.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInsight/intention/imports/ImportMembersUtil.scala @@ -132,8 +132,9 @@ object ImportMembersUtil { case m: PsiMember => Option(m.getContainingClass) case _ => None } - val refExpr = ScalaPsiElementFactory - .createExpressionFromText(name, oldRef.getManager) + val refExpr = ScalaPsiElementFactory.createExpressionFromText( + name, + oldRef.getManager) val replaced = expr.replaceExpression(refExpr, removeParenthesis = true) replaced @@ -168,8 +169,9 @@ object ImportMembersUtil { } } val lessThan: (PsiReference, PsiReference) => Boolean = { (ref1, ref2) => - PsiTreeUtil - .isAncestor(actuallyReplaced(ref2), actuallyReplaced(ref1), true) + PsiTreeUtil.isAncestor(actuallyReplaced(ref2), + actuallyReplaced(ref1), + true) } usages.toSeq.sortWith(lessThan) } diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInsight/intention/literal/InsertGapIntoStringIntention.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInsight/intention/literal/InsertGapIntoStringIntention.scala index 3b9f2eab9b2..b72a93b9f83 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInsight/intention/literal/InsertGapIntoStringIntention.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInsight/intention/literal/InsertGapIntoStringIntention.scala @@ -28,8 +28,11 @@ class InsertGapIntoStringIntention extends PsiElementBaseIntentionAction { def insertString(str: String, caretMove: Int) { extensions.inWriteAction { editor.getDocument.insertString(editor.getCaretModel.getOffset, str) - editor.getCaretModel - .moveCaretRelatively(caretMove, 0, false, false, false) + editor.getCaretModel.moveCaretRelatively(caretMove, + 0, + false, + false, + false) } } diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInsight/intention/literal/StringToMultilineStringIntention.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInsight/intention/literal/StringToMultilineStringIntention.scala index 1d452c088a3..45b2b2980a3 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInsight/intention/literal/StringToMultilineStringIntention.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInsight/intention/literal/StringToMultilineStringIntention.scala @@ -65,8 +65,9 @@ class StringToMultilineStringIntention extends PsiElementBaseIntentionAction { InterpolatedStringFormatter.formatContent(parts, toMultiline = true) val quote = "\"\"\"" val text = s"$prefix$quote$content$quote" - val newLiteral = ScalaPsiElementFactory - .createExpressionFromText(text, literal.getManager) + val newLiteral = ScalaPsiElementFactory.createExpressionFromText( + text, + literal.getManager) val replaced = interpolated.replace(newLiteral) addMarginsAndFormatMLString(replaced, document) case _ => @@ -97,8 +98,9 @@ class StringToMultilineStringIntention extends PsiElementBaseIntentionAction { InterpolatedStringFormatter.formatContent(parts, toMultiline = false) val quote = "\"" val text = s"$prefix$quote$content$quote" - val newLiteral = ScalaPsiElementFactory - .createExpressionFromText(text, literal.getManager) + val newLiteral = ScalaPsiElementFactory.createExpressionFromText( + text, + literal.getManager) toReplace.replace(newLiteral) case _ => var toReplace: PsiElement = literal @@ -116,8 +118,9 @@ class StringToMultilineStringIntention extends PsiElementBaseIntentionAction { case Seq(Text(s)) => val newLiteralText = "\"" + StringUtil.escapeStringCharacters(s) + "\"" - val newLiteral = ScalaPsiElementFactory - .createExpressionFromText(newLiteralText, literal.getManager) + val newLiteral = ScalaPsiElementFactory.createExpressionFromText( + newLiteralText, + literal.getManager) toReplace.replace(newLiteral) case _ => } diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInsight/intention/matcher/ConvertToTypedPatternIntention.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInsight/intention/matcher/ConvertToTypedPatternIntention.scala index 3709069c84d..3c0b2c173b1 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInsight/intention/matcher/ConvertToTypedPatternIntention.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInsight/intention/matcher/ConvertToTypedPatternIntention.scala @@ -59,8 +59,9 @@ class ConvertToTypedPatternIntention extends PsiElementBaseIntentionAction { case _ => "value" } // TODO replace references to the constructor pattern params with "value.param" - val newPattern = ScalaPsiElementFactory - .createPatternFromText("%s: %s".format(name, codeRef.getText), manager) + val newPattern = ScalaPsiElementFactory.createPatternFromText( + "%s: %s".format(name, codeRef.getText), + manager) constrPattern.replace(newPattern) } } diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInsight/intention/matcher/CreateCaseClausesIntention.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInsight/intention/matcher/CreateCaseClausesIntention.scala index 299fb19464b..5e23f81cc6d 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInsight/intention/matcher/CreateCaseClausesIntention.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInsight/intention/matcher/CreateCaseClausesIntention.scala @@ -70,8 +70,9 @@ final class CreateCaseClausesIntention extends PsiElementBaseIntentionAction { element: PsiElement) { val inheritors = inheritorsOf(cls) val (caseClauseTexts, bindTos) = inheritors.map(caseClauseText).unzip - val newMatchStmt = ScalaPsiElementFactory - .createMatch(expr.getText, caseClauseTexts, element.getManager) + val newMatchStmt = ScalaPsiElementFactory.createMatch(expr.getText, + caseClauseTexts, + element.getManager) val replaced = matchStmt.replace(newMatchStmt).asInstanceOf[ScMatchStmt] bindReferences(replaced, bindTos) } @@ -85,8 +86,9 @@ final class CreateCaseClausesIntention extends PsiElementBaseIntentionAction { } val caseClauseTexts = enumConsts.map(ec => "case %s.%s =>".format(cls.name, ec.name)) - val newMatchStmt = ScalaPsiElementFactory - .createMatch(expr.getText, caseClauseTexts, element.getManager) + val newMatchStmt = ScalaPsiElementFactory.createMatch(expr.getText, + caseClauseTexts, + element.getManager) val replaced = matchStmt.replace(newMatchStmt).asInstanceOf[ScMatchStmt] bindReferences(replaced, _ => cls) } @@ -104,8 +106,9 @@ final class CreateCaseClausesIntention extends PsiElementBaseIntentionAction { else Seq( s"\n$defaultCaseClauseText //could not find inherited objects or case classes\n") - val newMatchStmt = ScalaPsiElementFactory - .createMatch(expr.getText, caseClauseTexts, element.getManager) + val newMatchStmt = ScalaPsiElementFactory.createMatch(expr.getText, + caseClauseTexts, + element.getManager) val replaced = matchStmt.replace(newMatchStmt).asInstanceOf[ScMatchStmt] bindReferences(replaced, bindTos) } diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInsight/intention/matcher/ExpandPatternIntention.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInsight/intention/matcher/ExpandPatternIntention.scala index 4efe35ecb44..2770edbbab1 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInsight/intention/matcher/ExpandPatternIntention.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInsight/intention/matcher/ExpandPatternIntention.scala @@ -49,8 +49,9 @@ class ExpandPatternIntention extends PsiElementBaseIntentionAction { IdeDocumentHistory .getInstance(project) .includeCurrentPlaceAsChangePlace() - val newPattern = ScalaPsiElementFactory - .createPatternFromText(newPatternText, element.getManager) + val newPattern = ScalaPsiElementFactory.createPatternFromText( + newPatternText, + element.getManager) val replaced = origPattern.replace(newPattern) ScalaPsiUtil.adjustTypes(replaced) case None => diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInsight/intention/types/ConvertFromInfixIntention.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInsight/intention/types/ConvertFromInfixIntention.scala index b394a740cc0..0c44c3e5619 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInsight/intention/types/ConvertFromInfixIntention.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInsight/intention/types/ConvertFromInfixIntention.scala @@ -45,8 +45,9 @@ class ConvertFromInfixIntention extends PsiElementBaseIntentionAction { val newTypeText = infixTypeElement.ref.getText + "[" + infixTypeElement.lOp.getText + ", " + infixTypeElement.rOp.map(_.getText).getOrElse("") + "]" - val newTypeElement = ScalaPsiElementFactory - .createTypeElementFromText(newTypeText, element.getManager) + val newTypeElement = ScalaPsiElementFactory.createTypeElementFromText( + newTypeText, + element.getManager) val replaced = elementToReplace.replace(newTypeElement) UndoUtil.markPsiFileForUndo(replaced.getContainingFile) } diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInsight/intention/types/ConvertImplicitBoundsToImplicitParameter.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInsight/intention/types/ConvertImplicitBoundsToImplicitParameter.scala index 4e06779f9de..331ae9d71db 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInsight/intention/types/ConvertImplicitBoundsToImplicitParameter.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInsight/intention/types/ConvertImplicitBoundsToImplicitParameter.scala @@ -56,8 +56,10 @@ object ConvertImplicitBoundsToImplicitParameter { def canBeConverted(element: PsiElement): Boolean = { val paramTypeElement: ScTypeBoundsOwner = PsiTreeUtil.getParentOfType(element, classOf[ScTypeBoundsOwner], false) - val scTypeParamOwner: ScTypeParametersOwner = PsiTreeUtil - .getParentOfType(paramTypeElement, classOf[ScTypeParametersOwner], true) + val scTypeParamOwner: ScTypeParametersOwner = PsiTreeUtil.getParentOfType( + paramTypeElement, + classOf[ScTypeParametersOwner], + true) paramTypeElement != null && paramTypeElement.hasImplicitBound && !scTypeParamOwner.isInstanceOf[ScTrait] } @@ -66,8 +68,10 @@ object ConvertImplicitBoundsToImplicitParameter { if (element == null || !element.isValid) return Seq.empty val (function: ScMethodLike, paramOwner: ScParameterOwner, - typeParamOwner: ScTypeParametersOwner) = PsiTreeUtil - .getParentOfType(element, classOf[ScParameterOwner], false) match { + typeParamOwner: ScTypeParametersOwner) = PsiTreeUtil.getParentOfType( + element, + classOf[ScParameterOwner], + false) match { case x: ScFunction => (x, x, x) case x: ScClass => (x.constructor.getOrElse(return Seq.empty), x, x) case _ => return Seq.empty @@ -84,12 +88,14 @@ object ConvertImplicitBoundsToImplicitParameter { paramClause.delete() function.effectiveParameterClauses.lastOption match { case Some(implicitParamClause) if implicitParamClause.isImplicit => - val newClause = ScalaPsiElementFactory - .createClauseFromText(implicitParamClause.getText, manager) + val newClause = ScalaPsiElementFactory.createClauseFromText( + implicitParamClause.getText, + manager) val addedParametersCount = newClause.parameters.size for (p <- paramClause.parameters) { - val newParam = ScalaPsiElementFactory - .createParameterFromText(p.getText, manager) + val newParam = ScalaPsiElementFactory.createParameterFromText( + p.getText, + manager) newClause.addParameter(newParam) } val addedClause = diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInsight/intention/types/ConvertToInfixIntention.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInsight/intention/types/ConvertToInfixIntention.scala index eef153ec681..c034beafa62 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInsight/intention/types/ConvertToInfixIntention.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInsight/intention/types/ConvertToInfixIntention.scala @@ -39,8 +39,9 @@ class ConvertToInfixIntention extends PsiElementBaseIntentionAction { override def invoke(project: Project, editor: Editor, element: PsiElement) { if (element == null || !element.isValid) return val paramTypeElement: ScParameterizedTypeElement = - PsiTreeUtil - .getParentOfType(element, classOf[ScParameterizedTypeElement], false) + PsiTreeUtil.getParentOfType(element, + classOf[ScParameterizedTypeElement], + false) val Seq(targ1, targ2) = paramTypeElement.typeArgList.typeArgs val needParens = paramTypeElement.getParent match { case _: ScTypeArgs | _: ScParenthesisedTypeElement => false @@ -50,16 +51,18 @@ class ConvertToInfixIntention extends PsiElementBaseIntentionAction { .map(_.getText) .mkString(" ") .parenthesisedIf(needParens) - val newTypeElement = ScalaPsiElementFactory - .createTypeElementFromText(newTypeText, element.getManager) + val newTypeElement = ScalaPsiElementFactory.createTypeElementFromText( + newTypeText, + element.getManager) if (paramTypeElement.isValid) { val replaced = try { paramTypeElement.replace(newTypeElement) } catch { case npe: NullPointerException => - throw new RuntimeException("Unable to replace: %s with %s" - .format(paramTypeElement, newTypeText), - npe) + throw new RuntimeException( + "Unable to replace: %s with %s".format(paramTypeElement, + newTypeText), + npe) } UndoUtil.markPsiFileForUndo(replaced.getContainingFile) } diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInsight/intention/types/MakeTypeMoreSpecificIntention.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInsight/intention/types/MakeTypeMoreSpecificIntention.scala index 124a6e45af7..e6cc3930c5b 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInsight/intention/types/MakeTypeMoreSpecificIntention.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInsight/intention/types/MakeTypeMoreSpecificIntention.scala @@ -35,8 +35,9 @@ class MakeTypeMoreSpecificIntention extends PsiElementBaseIntentionAction { override def invoke(project: Project, editor: Editor, element: PsiElement): Unit = { - ToggleTypeAnnotation - .complete(new MakeTypeMoreSpecificStrategy(Option(editor)), element) + ToggleTypeAnnotation.complete( + new MakeTypeMoreSpecificStrategy(Option(editor)), + element) } override def isAvailable(project: Project, diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInsight/intention/types/ToggleTypeAnnotation.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInsight/intention/types/ToggleTypeAnnotation.scala index d5f27659e8a..5b6641cfa78 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInsight/intention/types/ToggleTypeAnnotation.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInsight/intention/types/ToggleTypeAnnotation.scala @@ -33,8 +33,8 @@ class ToggleTypeAnnotation extends PsiElementBaseIntentionAction { } override def invoke(project: Project, editor: Editor, element: PsiElement) { - ToggleTypeAnnotation - .complete(new AddOrRemoveStrategy(Option(editor)), element) + ToggleTypeAnnotation.complete(new AddOrRemoveStrategy(Option(editor)), + element) } } diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInsight/intention/types/UpdateStrategy.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInsight/intention/types/UpdateStrategy.scala index b87293f06f0..8722fe387d0 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInsight/intention/types/UpdateStrategy.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInsight/intention/types/UpdateStrategy.scala @@ -98,8 +98,9 @@ abstract class UpdateStrategy(editor: Option[Editor]) extends Strategy { } def removeFromPattern(pattern: ScTypedPattern) { - val newPattern = ScalaPsiElementFactory - .createPatternFromText(pattern.name, pattern.getManager) + val newPattern = ScalaPsiElementFactory.createPatternFromText( + pattern.name, + pattern.getManager) pattern.replace(newPattern) } @@ -130,10 +131,12 @@ abstract class UpdateStrategy(editor: Option[Editor]) extends Strategy { } def removeFromParameter(param: ScParameter) { - val newParam = ScalaPsiElementFactory - .createParameterFromText(param.name, param.getManager) - val newClause = ScalaPsiElementFactory - .createClauseForFunctionExprFromText(newParam.getText, param.getManager) + val newParam = ScalaPsiElementFactory.createParameterFromText( + param.name, + param.getManager) + val newClause = ScalaPsiElementFactory.createClauseForFunctionExprFromText( + newParam.getText, + param.getManager) val expr: ScFunctionExpr = PsiTreeUtil.getParentOfType(param, classOf[ScFunctionExpr], false) if (expr != null) { diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInsight/template/util/MacroUtil.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInsight/template/util/MacroUtil.scala index 8f0c2eb1bdd..5db2e8d7a4d 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInsight/template/util/MacroUtil.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInsight/template/util/MacroUtil.scala @@ -34,8 +34,10 @@ object MacroUtil { def getVariablesForScope(element: PsiElement): Array[ScalaResolveResult] = { val completionProcessor = new VariablesCompletionProcessor( StdKinds.valuesRef) - PsiTreeUtil - .treeWalkUp(completionProcessor, element, null, ResolveState.initial) + PsiTreeUtil.treeWalkUp(completionProcessor, + element, + null, + ResolveState.initial) completionProcessor.candidates } diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInspection/ReplaceQuickFix.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInspection/ReplaceQuickFix.scala index 3ff1ef43e66..45432bcb096 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInspection/ReplaceQuickFix.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInspection/ReplaceQuickFix.scala @@ -17,8 +17,9 @@ class ReplaceQuickFix(family: String, name: String, substitution: String) override def applyFix(project: Project, descriptor: ProblemDescriptor): Unit = { val element = descriptor.getPsiElement - val newElement = ScalaPsiElementFactory - .parseElement(substitution, PsiManager.getInstance(project)) + val newElement = ScalaPsiElementFactory.parseElement( + substitution, + PsiManager.getInstance(project)) element.replace(newElement) } } diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInspection/booleans/SimplifyBooleanInspection.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInspection/booleans/SimplifyBooleanInspection.scala index 6dc8b713b68..63e96758d3d 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInspection/booleans/SimplifyBooleanInspection.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInspection/booleans/SimplifyBooleanInspection.scala @@ -119,8 +119,8 @@ object SimplifyBooleanUtil { else { booleanConst(operand) match { case Some(bool: Boolean) => - ScalaPsiElementFactory - .createExpressionFromText((!bool).toString, expr.getManager) + ScalaPsiElementFactory.createExpressionFromText((!bool).toString, + expr.getManager) case None => expr } } diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInspection/cast/ScalaRedundantCastInspection.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInspection/cast/ScalaRedundantCastInspection.scala index c733bf98d9c..5e8d0b562bc 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInspection/cast/ScalaRedundantCastInspection.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInspection/cast/ScalaRedundantCastInspection.scala @@ -35,8 +35,9 @@ class ScalaRedundantCastInspection val descriptor = { val range = new TextRange(left.getTextLength, call.getTextLength) - val message = "Casting '%s' to '%s' is redundant" - .format(left.getText, castType.presentableText) + val message = "Casting '%s' to '%s' is redundant".format( + left.getText, + castType.presentableText) new ProblemDescriptorImpl( call, diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInspection/cast/ScalaRedundantConversionInspection.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInspection/cast/ScalaRedundantConversionInspection.scala index 325fb15a58d..afb41a7d60a 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInspection/cast/ScalaRedundantConversionInspection.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInspection/cast/ScalaRedundantConversionInspection.scala @@ -72,8 +72,8 @@ class ScalaRedundantConversionInspection val descriptor = { val range = new TextRange(offset, element.getTextLength) - val message = "Casting '%s' to '%s' is redundant" - .format(left.getText, conversionType) + val message = "Casting '%s' to '%s' is redundant".format(left.getText, + conversionType) new ProblemDescriptorImpl( element, diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInspection/collections/MapGetOrElseInspection.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInspection/collections/MapGetOrElseInspection.scala index 410c841346d..1084dc72df1 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInspection/collections/MapGetOrElseInspection.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInspection/collections/MapGetOrElseInspection.scala @@ -53,8 +53,8 @@ object MapGetOrElse extends SimplificationType() { case ExpressionType(ScFunctionType(retType, _)) => retType case _ => return false } - ScalaPsiElementFactory - .createExpressionFromText(replacementText, qual.getContext) match { + ScalaPsiElementFactory.createExpressionFromText(replacementText, + qual.getContext) match { case ScMethodCall(ScMethodCall(_, Seq(firstArg)), _) => mapArgRetType.conforms(firstArg.getType().getOrNothing) case _ => false @@ -80,8 +80,9 @@ object MapGetOrElse extends SimplificationType() { val secondArgText = stripped(mapArg).getText val newExprText = s"${baseExpr.getText}.fold {$firstArgText}{$secondArgText}" - ScalaPsiElementFactory - .createExpressionFromText(newExprText, baseExpr.getContext) match { + ScalaPsiElementFactory.createExpressionFromText( + newExprText, + baseExpr.getContext) match { case ScMethodCall(ScMethodCall(_, Seq(firstArg)), _) => mapArgRetType.conforms(firstArg.getType().getOrNothing) case _ => false diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInspection/collections/OperationOnCollectionQuickFix.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInspection/collections/OperationOnCollectionQuickFix.scala index 2634f198b4d..f70be9cf41d 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInspection/collections/OperationOnCollectionQuickFix.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInspection/collections/OperationOnCollectionQuickFix.scala @@ -15,8 +15,9 @@ class OperationOnCollectionQuickFix(expr: ScExpression, simpl: Simplification) def doApplyFix(project: Project) { val toReplace = simpl.exprToReplace.getElement if (!toReplace.isValid) return - val newExpr = ScalaPsiElementFactory - .createExpressionFromText(simpl.replacementText, toReplace.getManager) + val newExpr = ScalaPsiElementFactory.createExpressionFromText( + simpl.replacementText, + toReplace.getManager) toReplace.replaceExpression(newExpr, removeParenthesis = true) } } diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInspection/controlFlow/ScalaUselessExpressionInspection.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInspection/controlFlow/ScalaUselessExpressionInspection.scala index 2c67b62e81b..40b3522c665 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInspection/controlFlow/ScalaUselessExpressionInspection.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInspection/controlFlow/ScalaUselessExpressionInspection.scala @@ -44,8 +44,9 @@ class ScalaUselessExpressionInspection val message = "Useless expression" val removeElemFix = new RemoveElementQuickFix("Remove expression", expr) - val addReturnKeywordFix = PsiTreeUtil - .getParentOfType(expr, classOf[ScFunctionDefinition]) match { + val addReturnKeywordFix = PsiTreeUtil.getParentOfType( + expr, + classOf[ScFunctionDefinition]) match { case null => Seq.empty case fun if fun.returnType.getOrAny != types.Unit => Seq(new AddReturnQuickFix(expr)) diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInspection/etaExpansion/ConvertibleToMethodValueInspection.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInspection/etaExpansion/ConvertibleToMethodValueInspection.scala index 7f1a4c501b0..f9c6510aec4 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInspection/etaExpansion/ConvertibleToMethodValueInspection.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInspection/etaExpansion/ConvertibleToMethodValueInspection.scala @@ -163,8 +163,9 @@ class ConvertibleToMethodValueQuickFix(expr: ScExpression, def doApplyFix(project: Project) { val scExpr = getElement if (!scExpr.isValid) return - val newExpr = ScalaPsiElementFactory - .createExpressionFromText(replacement, scExpr.getManager) + val newExpr = ScalaPsiElementFactory.createExpressionFromText( + replacement, + scExpr.getManager) scExpr.replaceExpression(newExpr, removeParenthesis = true) } } diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInspection/functionExpressions/MatchToPartialFunctionInspection.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInspection/functionExpressions/MatchToPartialFunctionInspection.scala index 82d26823715..543519d50d5 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInspection/functionExpressions/MatchToPartialFunctionInspection.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInspection/functionExpressions/MatchToPartialFunctionInspection.scala @@ -130,10 +130,11 @@ class MatchToPartialFunctionQuickFix(matchStmt: ScMatchStmt, if (leftBrace == null) return addNamingPatterns(matchStmtCopy, needNamingPattern(mStmt)) - matchStmtCopy - .deleteChildRange(matchStmtCopy.getFirstChild, leftBrace.getPrevSibling) - val newBlock = ScalaPsiElementFactory - .createExpressionFromText(matchStmtCopy.getText, mStmt.getManager) + matchStmtCopy.deleteChildRange(matchStmtCopy.getFirstChild, + leftBrace.getPrevSibling) + val newBlock = ScalaPsiElementFactory.createExpressionFromText( + matchStmtCopy.getText, + mStmt.getManager) CodeEditUtil.setOldIndentation( newBlock.getNode.asInstanceOf[TreeElement], CodeEditUtil.getOldIndentation(matchStmtCopy.getNode)) diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInspection/imports/RemoveBracesForSingleImportQuickFix.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInspection/imports/RemoveBracesForSingleImportQuickFix.scala index 5bba57e191d..6ccb4276f8b 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInspection/imports/RemoveBracesForSingleImportQuickFix.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInspection/imports/RemoveBracesForSingleImportQuickFix.scala @@ -28,8 +28,9 @@ class RemoveBracesForSingleImportQuickFix(importExpr: ScImportExpr) buf.append(iExpr.getNames(0)) } - val newImportExpr = ScalaPsiElementFactory - .createImportExprFromText(buf.toString(), iExpr.getManager) + val newImportExpr = ScalaPsiElementFactory.createImportExprFromText( + buf.toString(), + iExpr.getManager) inWriteAction { iExpr.replace(newImportExpr) diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInspection/literal/FloatLiteralEndingWithDecimalPointInspection.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInspection/literal/FloatLiteralEndingWithDecimalPointInspection.scala index d49361feccd..6a4f0fc35aa 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInspection/literal/FloatLiteralEndingWithDecimalPointInspection.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInspection/literal/FloatLiteralEndingWithDecimalPointInspection.scala @@ -27,8 +27,9 @@ class MakeDoubleFix(lit: ScLiteral) lit) { def doApplyFix(project: Project) { val l = getElement - val exp = ScalaPsiElementFactory - .createExpressionFromText(l.getText.dropRight(1) + "d", l.getManager) + val exp = ScalaPsiElementFactory.createExpressionFromText( + l.getText.dropRight(1) + "d", + l.getManager) l.replace(exp) } } @@ -39,8 +40,9 @@ class MakeFloatFix(lit: ScLiteral) lit) { def doApplyFix(project: Project) { val l = getElement - val exp = ScalaPsiElementFactory - .createExpressionFromText(l.getText.dropRight(1) + "f", l.getManager) + val exp = ScalaPsiElementFactory.createExpressionFromText( + l.getText.dropRight(1) + "f", + l.getManager) l.replace(exp) } } @@ -50,8 +52,8 @@ class AddZeroAfterDecimalPoint(lit: ScLiteral) lit) { def doApplyFix(project: Project) { val l = getElement - val exp = ScalaPsiElementFactory - .createExpressionFromText(l.getText + "0", l.getManager) + val exp = ScalaPsiElementFactory.createExpressionFromText(l.getText + "0", + l.getManager) l.replace(exp) } } diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInspection/methodSignature/AccessorLikeMethodIsEmptyParenInspection.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInspection/methodSignature/AccessorLikeMethodIsEmptyParenInspection.scala index 74335f236f1..6e3052cf691 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInspection/methodSignature/AccessorLikeMethodIsEmptyParenInspection.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInspection/methodSignature/AccessorLikeMethodIsEmptyParenInspection.scala @@ -17,7 +17,8 @@ class AccessorLikeMethodIsEmptyParenInspection case f: ScFunction if f.hasQueryLikeName && f.isEmptyParen && !f.hasUnitResultType && f.superMethods.isEmpty && !isScalaJSFacade(f.getContainingClass) => - holder - .registerProblem(f.nameId, getDisplayName, new RemoveParentheses(f)) + holder.registerProblem(f.nameId, + getDisplayName, + new RemoveParentheses(f)) } } diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInspection/methodSignature/MutatorLikeMethodIsParameterlessInspection.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInspection/methodSignature/MutatorLikeMethodIsParameterlessInspection.scala index 73cbfae5f31..a94dd8a4834 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInspection/methodSignature/MutatorLikeMethodIsParameterlessInspection.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInspection/methodSignature/MutatorLikeMethodIsParameterlessInspection.scala @@ -21,8 +21,9 @@ class MutatorLikeMethodIsParameterlessInspection case f: ScFunction if f.hasMutatorLikeName && f.isParameterless && !f.hasUnitResultType && f.superMethods.isEmpty && !isUndescoreFunction(f) => - holder - .registerProblem(f.nameId, getDisplayName, new AddEmptyParentheses(f)) + holder.registerProblem(f.nameId, + getDisplayName, + new AddEmptyParentheses(f)) } private def isUndescoreFunction(f: ScFunction): Boolean = f match { diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInspection/methodSignature/UnitMethodDefinedWithEqualsSignInspection.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInspection/methodSignature/UnitMethodDefinedWithEqualsSignInspection.scala index 22f7b4d168c..1301e95167b 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInspection/methodSignature/UnitMethodDefinedWithEqualsSignInspection.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInspection/methodSignature/UnitMethodDefinedWithEqualsSignInspection.scala @@ -18,8 +18,9 @@ class UnitMethodDefinedWithEqualsSignInspection if !f.hasExplicitType && f.hasUnitResultType && !f.isSecondaryConstructor => f.assignment.foreach { assignment => - holder - .registerProblem(assignment, getDisplayName, new RemoveEqualsSign(f)) + holder.registerProblem(assignment, + getDisplayName, + new RemoveEqualsSign(f)) } } } diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInspection/methodSignature/UnitMethodIsParameterlessInspection.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInspection/methodSignature/UnitMethodIsParameterlessInspection.scala index 1ba81fb8f45..b610ecc0f3c 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInspection/methodSignature/UnitMethodIsParameterlessInspection.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInspection/methodSignature/UnitMethodIsParameterlessInspection.scala @@ -16,7 +16,8 @@ class UnitMethodIsParameterlessInspection case f: ScFunction if f.isParameterless && f.hasUnitResultType && f.superMethods.isEmpty => - holder - .registerProblem(f.nameId, getDisplayName, new AddEmptyParentheses(f)) + holder.registerProblem(f.nameId, + getDisplayName, + new AddEmptyParentheses(f)) } } diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInspection/methodSignature/quickfix/InsertReturnTypeAndEquals.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInspection/methodSignature/quickfix/InsertReturnTypeAndEquals.scala index e77f9473cfa..bea933bad7c 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInspection/methodSignature/quickfix/InsertReturnTypeAndEquals.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInspection/methodSignature/quickfix/InsertReturnTypeAndEquals.scala @@ -24,8 +24,11 @@ class InsertReturnTypeAndEquals(functionDef: ScFunctionDefinition) funDef.removeAssignment() funDef.removeExplicitType() val manager = funDef.getManager - val fakeDecl = ScalaPsiElementFactory - .createDeclaration("x", "Unit", isVariable = false, null, manager) + val fakeDecl = ScalaPsiElementFactory.createDeclaration("x", + "Unit", + isVariable = false, + null, + manager) val colon = fakeDecl.findFirstChildByType(ScalaTokenTypes.tCOLON) val assign = fakeDecl.findFirstChildByType(ScalaTokenTypes.tASSIGN) val body = funDef.body.get diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInspection/packageNameInspection/ScalaMoveToPackageQuickFix.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInspection/packageNameInspection/ScalaMoveToPackageQuickFix.scala index 5ffc3ac328f..fe2ffa019ba 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInspection/packageNameInspection/ScalaMoveToPackageQuickFix.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInspection/packageNameInspection/ScalaMoveToPackageQuickFix.scala @@ -36,8 +36,10 @@ class ScalaMoveToPackageQuickFix(myFile: ScalaFile, packQualName: String) val fileIndex: ProjectFileIndex = ProjectRootManager.getInstance(project).getFileIndex val currentModule: Module = fileIndex.getModuleForFile(file.getVirtualFile) - val directory = PackageUtil - .findOrCreateDirectoryForPackage(currentModule, packageName, null, true) + val directory = PackageUtil.findOrCreateDirectoryForPackage(currentModule, + packageName, + null, + true) if (directory == null) { return diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInspection/parentheses/ScalaUnnecessaryParenthesesInspectionBase.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInspection/parentheses/ScalaUnnecessaryParenthesesInspectionBase.scala index 7ebc7d96941..ca67c01a895 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInspection/parentheses/ScalaUnnecessaryParenthesesInspectionBase.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInspection/parentheses/ScalaUnnecessaryParenthesesInspectionBase.scala @@ -66,8 +66,9 @@ class UnnecessaryParenthesesQuickFix(parenthesized: ScParenthesisedExpr, val parenthExpr = getElement if (!parenthExpr.isValid) return - val newExpr = ScalaPsiElementFactory - .createExpressionFromText(textOfStripped, parenthExpr.getManager) + val newExpr = ScalaPsiElementFactory.createExpressionFromText( + textOfStripped, + parenthExpr.getManager) val replaced = parenthExpr.replaceExpression(newExpr, removeParenthesis = true) diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInspection/semicolon/ScalaUnnecessarySemicolonInspection.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInspection/semicolon/ScalaUnnecessarySemicolonInspection.scala index 10d2ebc927b..62f9ed32358 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInspection/semicolon/ScalaUnnecessarySemicolonInspection.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInspection/semicolon/ScalaUnnecessarySemicolonInspection.scala @@ -40,8 +40,9 @@ class ScalaUnnecessarySemicolonInspection extends LocalInspectionTool { val text = file.getText val textWithoutSemicolon = text.take(offset) + text.drop(offset + 1) - val newFile = ScalaPsiElementFactory - .createScalaFile(textWithoutSemicolon, element.getManager) + val newFile = ScalaPsiElementFactory.createScalaFile( + textWithoutSemicolon, + element.getManager) var elem1 = file.findElementAt(offset - 1) var elem2 = newFile.findElementAt(offset - 1) while (elem1 != null && endOffset(elem1) <= offset && diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInspection/shadow/VariablePatternShadowInspection.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInspection/shadow/VariablePatternShadowInspection.scala index 8fef2e2e530..f501c2808d1 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInspection/shadow/VariablePatternShadowInspection.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInspection/shadow/VariablePatternShadowInspection.scala @@ -63,8 +63,9 @@ class ConvertToStableIdentifierPatternFix(r: ScReferencePattern) r) { def doApplyFix(project: Project) { val ref = getElement - val stableIdPattern = ScalaPsiElementFactory - .createPatternFromText("`%s`".format(ref.getText), ref.getManager) + val stableIdPattern = ScalaPsiElementFactory.createPatternFromText( + "`%s`".format(ref.getText), + ref.getManager) ref.replace(stableIdPattern) } } diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInspection/suppression/ScalaSuppressableInspectionTool.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInspection/suppression/ScalaSuppressableInspectionTool.scala index 15b171a7839..dae0e362515 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInspection/suppression/ScalaSuppressableInspectionTool.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInspection/suppression/ScalaSuppressableInspectionTool.scala @@ -22,8 +22,9 @@ object ScalaSuppressableInspectionTool { val text: String = comment.getText val matcher: Matcher = SuppressionUtil.SUPPRESS_IN_LINE_COMMENT_PATTERN.matcher(text) - if (matcher.matches && SuppressionUtil - .isInspectionToolIdMentioned(matcher.group(1), toolId)) { + if (matcher.matches && SuppressionUtil.isInspectionToolIdMentioned( + matcher.group(1), + toolId)) { return Some(comment) } } diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInspection/typeChecking/ComparingUnrelatedTypesInspection.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInspection/typeChecking/ComparingUnrelatedTypesInspection.scala index 873cdbc0fdf..61acf2029c5 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInspection/typeChecking/ComparingUnrelatedTypesInspection.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInspection/typeChecking/ComparingUnrelatedTypesInspection.scala @@ -115,8 +115,10 @@ class ComparingUnrelatedTypesInspection } { val (elemTypeText, argTypeText) = ScTypePresentation.different(elemType, argType) - val message = InspectionBundle - .message("comparing.unrelated.types.hint", elemTypeText, argTypeText) + val message = InspectionBundle.message( + "comparing.unrelated.types.hint", + elemTypeText, + argTypeText) holder.registerProblem(arg, message, ProblemHighlightType.GENERIC_ERROR_OR_WARNING) diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInspection/typeChecking/TypeCheckCanBeMatchInspection.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInspection/typeChecking/TypeCheckCanBeMatchInspection.scala index 994c5cd5ed0..571f7c0cf33 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInspection/typeChecking/TypeCheckCanBeMatchInspection.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInspection/typeChecking/TypeCheckCanBeMatchInspection.scala @@ -166,8 +166,7 @@ object TypeCheckToMatchUtil { } def typeNeedParentheses(typeElem: ScTypeElement): Boolean = { - PsiTreeUtil - .getChildOfType(typeElem, classOf[ScExistentialClause]) != null + PsiTreeUtil.getChildOfType(typeElem, classOf[ScExistentialClause]) != null } for { @@ -204,8 +203,9 @@ object TypeCheckToMatchUtil { ifStmt.getParent)) val name = suggestedNames(0) asInstOfEverywhere.foreach { c => - val newExpr = ScalaPsiElementFactory - .createExpressionFromText(name, ifStmt.getManager) + val newExpr = ScalaPsiElementFactory.createExpressionFromText( + name, + ifStmt.getManager) inWriteAction { c.replaceExpression(newExpr, removeParenthesis = true) } @@ -226,8 +226,9 @@ object TypeCheckToMatchUtil { patternDef.delete() } val name = definedName.get - val newExpr = ScalaPsiElementFactory - .createExpressionFromText(name, ifStmt.getManager) + val newExpr = ScalaPsiElementFactory.createExpressionFromText( + name, + ifStmt.getManager) inWriteAction { asInstOfEverywhere.foreach( _.replaceExpression(newExpr, removeParenthesis = true)) diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInspection/typeLambdaSimplify/KindProjectorSimplifyTypeProjectionInspection.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInspection/typeLambdaSimplify/KindProjectorSimplifyTypeProjectionInspection.scala index b9d2a7d1769..dbda3dde751 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInspection/typeLambdaSimplify/KindProjectorSimplifyTypeProjectionInspection.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInspection/typeLambdaSimplify/KindProjectorSimplifyTypeProjectionInspection.scala @@ -214,8 +214,8 @@ class KindProjectorSimplifyTypeProjectionQuickFix(e: PsiElement, val elem = getElement if (!elem.isValid) return - val te = ScalaPsiElementFactory - .createTypeElementFromText(replacement, elem.getManager) + val te = ScalaPsiElementFactory.createTypeElementFromText(replacement, + elem.getManager) elem.replace(te) } } diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInspection/typeLambdaSimplify/KindProjectorUseCorrectLambdaKeywordInspection.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInspection/typeLambdaSimplify/KindProjectorUseCorrectLambdaKeywordInspection.scala index 6a9d1a54372..83937d4f2eb 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInspection/typeLambdaSimplify/KindProjectorUseCorrectLambdaKeywordInspection.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInspection/typeLambdaSimplify/KindProjectorUseCorrectLambdaKeywordInspection.scala @@ -74,8 +74,9 @@ class KindProjectorUseCorrectLambdaKeywordQuickFix(e: PsiElement, val elem = getElement if (!elem.isValid) return - val repl = ScalaPsiElementFactory - .createTypeElementFromText(replacement, elem.getManager) + val repl = ScalaPsiElementFactory.createTypeElementFromText( + replacement, + elem.getManager) elem.replace(repl) } } diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInspection/unusedInspections/ScalaUnusedImportPass.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInspection/unusedInspections/ScalaUnusedImportPass.scala index 32fec82f5d7..0e40066fcd7 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInspection/unusedInspections/ScalaUnusedImportPass.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInspection/unusedInspections/ScalaUnusedImportPass.scala @@ -92,8 +92,9 @@ class ScalaUnusedImportPass(val file: PsiFile, ScalaApplicationSettings.getInstance().OPTIMIZE_IMPORTS_ON_THE_FLY && ScalaUnusedImportPass.timeToOptimizeImports(file) && file.isWritable) { - ScalaUnusedImportPass - .invokeOnTheFlyImportOptimizer(myOptimizeImportsRunnable, file) + ScalaUnusedImportPass.invokeOnTheFlyImportOptimizer( + myOptimizeImportsRunnable, + file) } } } diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInspection/unusedInspections/ScalaUnusedImportPassBase.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInspection/unusedInspections/ScalaUnusedImportPassBase.scala index 06ed8cb1713..4379277ba46 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInspection/unusedInspections/ScalaUnusedImportPassBase.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInspection/unusedInspections/ScalaUnusedImportPassBase.scala @@ -76,8 +76,9 @@ trait ScalaUnusedImportPassBase { self: TextEditorHighlightingPass => .isAlwaysUsedImport(qName)) => Seq.empty case Some(psi) => - val annotation = annotationHolder - .createWarningAnnotation(psi, "Unused import statement") + val annotation = annotationHolder.createWarningAnnotation( + psi, + "Unused import statement") annotation setHighlightType ProblemHighlightType.LIKE_UNUSED_SYMBOL getFixes.foreach(annotation.registerFix) qName.foreach(name => diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInspection/unusedInspections/ScalaUnusedSymbolPass.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInspection/unusedInspections/ScalaUnusedSymbolPass.scala index ef6b3a950a5..5f2350c0523 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInspection/unusedInspections/ScalaUnusedSymbolPass.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/codeInspection/unusedInspections/ScalaUnusedSymbolPass.scala @@ -228,8 +228,10 @@ class ScalaUnusedSymbolPass(file: PsiFile, editor: Editor) .getStartOffset val end = declElementHolder.getTextRange.getEndOffset val range = TextRange.create(start, end) - val annotation = state.annotationHolder - .createAnnotation(severity, new TextRange(start, end), message) + val annotation = state.annotationHolder.createAnnotation( + severity, + new TextRange(start, end), + message) val key = HighlightDisplayKey.find(VarCouldBeValInspection.ShortName) val fix = new VarToValFix( declElementHolder.asInstanceOf[ScVariableDefinition], @@ -243,13 +245,13 @@ class ScalaUnusedSymbolPass(file: PsiFile, editor: Editor) override def getInfos: java.util.List[HighlightInfo] = highlightInfos.toList private def isUnusedSymbolSuppressed(element: PsiElement) = { - inspectionSuppressor - .isSuppressedFor(element, ScalaUnusedSymbolInspection.ShortName) + inspectionSuppressor.isSuppressedFor(element, + ScalaUnusedSymbolInspection.ShortName) } private def isVarCouldBeValSuppressed(element: PsiElement) = { - inspectionSuppressor - .isSuppressedFor(element, VarCouldBeValInspection.ShortName) + inspectionSuppressor.isSuppressedFor(element, + VarCouldBeValInspection.ShortName) } } diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/compiler/CompileServerManager.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/compiler/CompileServerManager.scala index d865d0d4eb5..3fc0a250796 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/compiler/CompileServerManager.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/compiler/CompileServerManager.scala @@ -143,8 +143,11 @@ class CompileServerManager(project: Project) extends ProjectComponent { val group = new DefaultActionGroup(actions: _*) val context = DataManager.getInstance.getDataContext(e.getComponent) - val popup = JBPopupFactory.getInstance - .createActionGroupPopup(title, group, context, mnemonics, true) + val popup = JBPopupFactory.getInstance.createActionGroupPopup(title, + group, + context, + mnemonics, + true) val dimension = popup.getContent.getPreferredSize val at = new Point(0, -dimension.height) diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/compiler/RemoteServerRunner.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/compiler/RemoteServerRunner.scala index 4f00a2676d4..9eb698e50fe 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/compiler/RemoteServerRunner.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/compiler/RemoteServerRunner.scala @@ -42,8 +42,9 @@ class RemoteServerRunner(project: Project) extends RemoteResourceOwner { send(serverAlias, arguments, client) } catch { case e: ConnectException => - val message = "Cannot connect to compile server at %s:%s" - .format(address.toString, port) + val message = "Cannot connect to compile server at %s:%s".format( + address.toString, + port) client.error(message) client.debug( s"$message\n${e.toString}\n${e.getStackTrace.mkString("\n")}") diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/components/HighlightingAdvisor.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/components/HighlightingAdvisor.scala index dc1f0bcc546..f6853705459 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/components/HighlightingAdvisor.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/components/HighlightingAdvisor.scala @@ -117,8 +117,7 @@ class HighlightingAdvisor(project: Project) private def notify(title: String, message: String, notificationType: NotificationType) { - NotificationUtil - .builder(project, message) setNotificationType notificationType setTitle title setHandler { + NotificationUtil.builder(project, message) setNotificationType notificationType setTitle title setHandler { case "enable" => enabled = true case "disable" => enabled = false case _ => @@ -166,8 +165,9 @@ class HighlightingAdvisor(project: Project) override def consume(dataContext: DataContext): Unit = { CommonDataKeys.EDITOR_EVEN_IF_INACTIVE.getData(dataContext) match { case editor: EditorEx => - FileContentUtil - .reparseFiles(project, Seq(editor.getVirtualFile), true) + FileContentUtil.reparseFiles(project, + Seq(editor.getVirtualFile), + true) case _ => // do nothing } } @@ -192,8 +192,9 @@ class HighlightingAdvisor(project: Project) def getClickConsumer = ClickConsumer def getTooltipText = - "%s (click to %s, or press Ctrl+Shift+Alt+E)" - .format(status, if (enabled) "disable" else "enable") + "%s (click to %s, or press Ctrl+Shift+Alt+E)".format( + status, + if (enabled) "disable" else "enable") object ClickConsumer extends Consumer[MouseEvent] { def consume(t: MouseEvent) { diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/console/ScalaConsoleExecuteAction.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/console/ScalaConsoleExecuteAction.scala index 0eec8c506c9..0f34711c01f 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/console/ScalaConsoleExecuteAction.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/console/ScalaConsoleExecuteAction.scala @@ -51,8 +51,8 @@ class ScalaConsoleExecuteAction extends AnAction { // Process input and add to history extensions.inWriteAction { val range: TextRange = new TextRange(0, document.getTextLength) - editor.getSelectionModel - .setSelection(range.getStartOffset, range.getEndOffset) + editor.getSelectionModel.setSelection(range.getStartOffset, + range.getEndOffset) console.addToHistory(range, console.getConsoleEditor, true) model.addToHistory(text) diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/console/ScalaLanguageConsole.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/console/ScalaLanguageConsole.scala index 38ad1d85bcc..9eba0a7fd8c 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/console/ScalaLanguageConsole.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/console/ScalaLanguageConsole.scala @@ -51,8 +51,9 @@ class ScalaLanguageConsole(project: Project, title: String) private[console] def textSent(text: String) { textBuffer.append(text) - scalaFile = ScalaPsiElementFactory - .createScalaFileFromText(textBuffer.toString() + ";\n1", project) + scalaFile = ScalaPsiElementFactory.createScalaFileFromText( + textBuffer.toString() + ";\n1", + project) val types = new mutable.HashMap[String, TextRange] val values = new mutable.HashMap[String, (TextRange, Boolean)] def addValue(name: String, @@ -63,8 +64,9 @@ class ScalaLanguageConsole(project: Project, title: String) val newText = if (r) "_" + StringUtil.repeatSymbol(' ', oldRange.getLength - 1) else StringUtil.repeatSymbol(' ', oldRange.getLength) - textBuffer - .replace(oldRange.getStartOffset, oldRange.getEndOffset, newText) + textBuffer.replace(oldRange.getStartOffset, + oldRange.getEndOffset, + newText) case None => } values.put(name, (range, replaceWithPlaceholder)) @@ -73,8 +75,9 @@ class ScalaLanguageConsole(project: Project, title: String) types.get(name) match { case Some(oldRange) => val newText = StringUtil.repeatSymbol(' ', oldRange.getLength) - textBuffer - .replace(oldRange.getStartOffset, oldRange.getEndOffset, newText) + textBuffer.replace(oldRange.getStartOffset, + oldRange.getEndOffset, + newText) case None => } types.put(name, range) @@ -101,8 +104,9 @@ class ScalaLanguageConsole(project: Project, title: String) case t: ScTypeAlias => addType(t.name, t.nameId.getTextRange) case _ => //do nothing } - scalaFile = ScalaPsiElementFactory - .createScalaFileFromText(textBuffer.toString() + ";\n1", project) + scalaFile = ScalaPsiElementFactory.createScalaFileFromText( + textBuffer.toString() + ";\n1", + project) getFile .asInstanceOf[ScalaFile] .setContext(scalaFile, scalaFile.getLastChild) diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/console/SendSelectionToConsoleAction.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/console/SendSelectionToConsoleAction.scala index 4d69b971dd3..905dbcddec2 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/console/SendSelectionToConsoleAction.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/console/SendSelectionToConsoleAction.scala @@ -88,8 +88,8 @@ class SendSelectionToConsoleAction extends AnAction { extensions.inWriteAction { val range: TextRange = new TextRange(0, document.getTextLength) - consoleEditor.getSelectionModel - .setSelection(range.getStartOffset, range.getEndOffset) + consoleEditor.getSelectionModel.setSelection(range.getStartOffset, + range.getEndOffset) console.addToHistory(range, console.getConsoleEditor, true) controller.addToHistory(text) diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/conversion/JavaToScala.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/conversion/JavaToScala.scala index 4a92f7bc5d9..8b97eb98210 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/conversion/JavaToScala.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/conversion/JavaToScala.scala @@ -506,8 +506,8 @@ object JavaToScala { return AnonymousClassExpression( convertPsiToIntermdeiate(n.getAnonymousClass, externalProperties)) } - val mtype = TypeConstruction - .createStringTypePresentation(n.getType, n.getProject) + val mtype = TypeConstruction.createStringTypePresentation(n.getType, + n.getProject) if (n.getArrayInitializer != null) { NewExpression(mtype, n.getArrayInitializer.getInitializers.map( diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/conversion/copy/JavaCopyPastePostProcessor.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/conversion/copy/JavaCopyPastePostProcessor.scala index 8f2b59f7142..625719f8255 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/conversion/copy/JavaCopyPastePostProcessor.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/conversion/copy/JavaCopyPastePostProcessor.scala @@ -110,8 +110,10 @@ class JavaCopyPastePostProcessor def getRefs: Seq[ReferenceData] = { val refs = { - val data = referenceProcessor - .collectTransferableData(file, editor, startOffsets, endOffsets) + val data = referenceProcessor.collectTransferableData(file, + editor, + startOffsets, + endOffsets) if (data.isEmpty) null else data.get(0).asInstanceOf[ReferenceTransferableData] } diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/debugger/LocationLineManager.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/debugger/LocationLineManager.scala index ac094e0e5c5..883c9b855e3 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/debugger/LocationLineManager.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/debugger/LocationLineManager.scala @@ -49,8 +49,9 @@ trait LocationLineManager { self: ScalaPositionManager => def exactLineNumber(location: Location): Int = { checkAndUpdateCaches(location.declaringType()) - customizedLocationsCache - .getOrElse(location, ScalaPositionManager.checkedLineNumber(location)) + customizedLocationsCache.getOrElse( + location, + ScalaPositionManager.checkedLineNumber(location)) } def shouldSkip(location: Location): Boolean = { @@ -95,8 +96,9 @@ trait LocationLineManager { self: ScalaPositionManager => val key = (location.declaringType(), customLine) val old = lineToCustomizedLocationCache.getOrElse(key, Seq.empty) - lineToCustomizedLocationCache - .update(key, (old :+ location).sortBy(_.codeIndex())) + lineToCustomizedLocationCache.update( + key, + (old :+ location).sortBy(_.codeIndex())) } private def computeCustomizedLocationsFor(refType: ReferenceType): Unit = { diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/debugger/ScalaFrameExtraVariablesProvider.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/debugger/ScalaFrameExtraVariablesProvider.scala index cb817ee372a..a83f0bbf2cb 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/debugger/ScalaFrameExtraVariablesProvider.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/debugger/ScalaFrameExtraVariablesProvider.scala @@ -96,8 +96,10 @@ class ScalaFrameExtraVariablesProvider extends FrameExtraVariablesProvider { alreadyCollected: util.Set[String]) = { val initialCandidates = inReadAction { val completionProcessor = new CollectingProcessor(elem) - PsiTreeUtil - .treeWalkUp(completionProcessor, elem, null, ResolveState.initial) + PsiTreeUtil.treeWalkUp(completionProcessor, + elem, + null, + ResolveState.initial) completionProcessor.candidates .filter( srr => @@ -133,8 +135,9 @@ class ScalaFrameExtraVariablesProvider extends FrameExtraVariablesProvider { } val funDef = PsiTreeUtil.getParentOfType(place, classOf[ScFunctionDefinition]) - val lazyVal = PsiTreeUtil - .getParentOfType(place, classOf[ScPatternDefinition]) match { + val lazyVal = PsiTreeUtil.getParentOfType( + place, + classOf[ScPatternDefinition]) match { case null => null case LazyVal(lzy) => lzy case _ => null diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/debugger/evaluation/ScalaCodeFragment.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/debugger/evaluation/ScalaCodeFragment.scala index afc1fdc9dbb..d8fbdfd21e1 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/debugger/evaluation/ScalaCodeFragment.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/debugger/evaluation/ScalaCodeFragment.scala @@ -131,8 +131,10 @@ class ScalaCodeFragment(project: Project, text: String) extends { lastParent: PsiElement, place: PsiElement): Boolean = { for (qName <- imports) { - val imp = ScalaPsiElementFactory - .createImportFromTextWithContext("import _root_." + qName, this, this) + val imp = ScalaPsiElementFactory.createImportFromTextWithContext( + "import _root_." + qName, + this, + this) if (!imp.processDeclarations(processor, state, lastParent, place)) return false } diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/debugger/evaluation/ScalaEvaluatorBuilderUtil.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/debugger/evaluation/ScalaEvaluatorBuilderUtil.scala index 3949236d7e0..77fdf4287df 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/debugger/evaluation/ScalaEvaluatorBuilderUtil.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/debugger/evaluation/ScalaEvaluatorBuilderUtil.scala @@ -205,8 +205,10 @@ private[evaluation] trait ScalaEvaluatorBuilderUtil { val transformed = NameTransformer.encode(fun.name) fun match { case ScalaPositionManager.InsideAsync(call) => - val containingFun = PsiTreeUtil - .getParentOfType(fun, classOf[ScFunctionDefinition], true) + val containingFun = PsiTreeUtil.getParentOfType( + fun, + classOf[ScFunctionDefinition], + true) if (containingFun != null && call.isAncestorOf(containingFun)) transformed else transformed + "$macro" @@ -287,8 +289,10 @@ private[evaluation] trait ScalaEvaluatorBuilderUtil { val qualText = qualOpt.fold("this")(_.getText) val exprText = s"($qualText).concat(_root_.java.lang.String.valueOf(${arguments.head.getText}))" - val expr = ScalaPsiElementFactory - .createExpressionWithContextFromText(exprText, ref.getContext, ref) + val expr = ScalaPsiElementFactory.createExpressionWithContextFromText( + exprText, + ref.getContext, + ref) return evaluatorFor(expr) } @@ -533,8 +537,10 @@ private[evaluation] trait ScalaEvaluatorBuilderUtil { else "" val exprText = s"_root_.scala.collection.Seq.newBuilder[$argTypeText]$argsText" - val newExpr = ScalaPsiElementFactory - .createExpressionWithContextFromText(exprText, context, context) + val newExpr = ScalaPsiElementFactory.createExpressionWithContextFromText( + exprText, + context, + context) evaluatorFor(newExpr) } if (exprsForP.length == 1) { @@ -893,8 +899,9 @@ private[evaluation] trait ScalaEvaluatorBuilderUtil { .headOption .getOrElse("$this") val text = s"new $clName($paramName).${call.getText}" - val expr = ScalaPsiElementFactory - .createExpressionFromText(text, call.getContext) + val expr = ScalaPsiElementFactory.createExpressionFromText( + text, + call.getContext) evaluatorFor(expr) case _ => val args: Seq[Evaluator] = @@ -1396,8 +1403,10 @@ private[evaluation] trait ScalaEvaluatorBuilderUtil { def fromLocalArgEvaluator(local: ScTypedDefinition): Evaluator = { val name = local.asInstanceOf[PsiNamedElement].name val elemAt = position.getElementAt - val ref = ScalaPsiElementFactory - .createExpressionWithContextFromText(name, elemAt, elemAt) + val ref = ScalaPsiElementFactory.createExpressionWithContextFromText( + name, + elemAt, + elemAt) val refEval = evaluatorFor(ref) if (local.isInstanceOf[ScObject]) { @@ -1412,8 +1421,10 @@ private[evaluation] trait ScalaEvaluatorBuilderUtil { def expressionFromTextEvaluator(string: String, context: PsiElement): Evaluator = { - val expr = ScalaPsiElementFactory - .createExpressionWithContextFromText(string, context.getContext, context) + val expr = ScalaPsiElementFactory.createExpressionWithContextFromText( + string, + context.getContext, + context) evaluatorFor(expr) } @@ -1457,8 +1468,9 @@ private[evaluation] trait ScalaEvaluatorBuilderUtil { evaluatorOpt.getOrElse(ScalaLiteralEvaluator(l)) case _ if l.isSymbol => val value = l.getValue.asInstanceOf[Symbol].name - val expr = ScalaPsiElementFactory - .createExpressionFromText(s"""Symbol("$value")""", l.getContext) + val expr = ScalaPsiElementFactory.createExpressionFromText( + s"""Symbol("$value")""", + l.getContext) evaluatorFor(expr) case _ => ScalaLiteralEvaluator(l) } @@ -1598,8 +1610,10 @@ private[evaluation] trait ScalaEvaluatorBuilderUtil { val operationText = operation.refName.dropRight(1) val argText = infix.getArgExpr.getText val exprText = s"$baseExprText = $baseExprText $operationText $argText" - val expr = ScalaPsiElementFactory - .createExpressionWithContextFromText(exprText, infix.getContext, infix) + val expr = ScalaPsiElementFactory.createExpressionWithContextFromText( + exprText, + infix.getContext, + infix) evaluatorFor(expr) } else { val equivCall = ScalaPsiElementFactory.createEquivMethodCall(infix) @@ -1623,8 +1637,10 @@ private[evaluation] trait ScalaEvaluatorBuilderUtil { def prefixExprEvaluator(p: ScPrefixExpr): Evaluator = { val newExprText = s"(${p.operand.getText}).unary_${p.operation.refName}" - val newExpr = ScalaPsiElementFactory - .createExpressionWithContextFromText(newExprText, p.getContext, p) + val newExpr = ScalaPsiElementFactory.createExpressionWithContextFromText( + newExprText, + p.getContext, + p) evaluatorFor(newExpr) } @@ -1644,8 +1660,10 @@ private[evaluation] trait ScalaEvaluatorBuilderUtil { val exprText = "_root_.scala.Tuple" + tuple.exprs.length + tuple.exprs.map(_.getText).mkString("(", ", ", ")") - val expr = ScalaPsiElementFactory - .createExpressionWithContextFromText(exprText, tuple.getContext, tuple) + val expr = ScalaPsiElementFactory.createExpressionWithContextFromText( + exprText, + tuple.getContext, + tuple) evaluatorFor(expr) } @@ -1897,8 +1915,9 @@ object ScalaEvaluatorBuilderUtil { @tailrec final def isStable(o: ScObject): Boolean = { - val context = PsiTreeUtil - .getParentOfType(o, classOf[ScTemplateDefinition], classOf[ScExpression]) + val context = PsiTreeUtil.getParentOfType(o, + classOf[ScTemplateDefinition], + classOf[ScExpression]) if (context == null) return true context match { case o: ScObject => isStable(o) diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/debugger/evaluation/evaluator/ScalaEqEvaluator.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/debugger/evaluation/evaluator/ScalaEqEvaluator.scala index 900f52e9098..e2c6a11e9e2 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/debugger/evaluation/evaluator/ScalaEqEvaluator.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/debugger/evaluation/evaluator/ScalaEqEvaluator.scala @@ -26,13 +26,15 @@ class ScalaEqEvaluator(left: Evaluator, right: Evaluator) extends Evaluator { case (v1: PrimitiveValue, v2: PrimitiveValue) if DebuggerUtils.isInteger(leftResult) && DebuggerUtils.isInteger(rightResult) => - DebuggerUtilsEx - .createValue(vm, "boolean", v1.longValue == v2.longValue) + DebuggerUtilsEx.createValue(vm, + "boolean", + v1.longValue == v2.longValue) case (v1: PrimitiveValue, v2: PrimitiveValue) if DebuggerUtils.isNumeric(leftResult) && DebuggerUtils.isNumeric(rightResult) => - DebuggerUtilsEx - .createValue(vm, "boolean", v1.doubleValue == v2.doubleValue) + DebuggerUtilsEx.createValue(vm, + "boolean", + v1.doubleValue == v2.doubleValue) case (v1: BooleanValue, v2: BooleanValue) => DebuggerUtilsEx.createValue(vm, "boolean", v1 == v2) case (v1: CharValue, v2: CharValue) => diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/debugger/evaluation/evaluator/ScalaInstanceofEvaluator.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/debugger/evaluation/evaluator/ScalaInstanceofEvaluator.scala index cfe177f4370..56471a9a42b 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/debugger/evaluation/evaluator/ScalaInstanceofEvaluator.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/debugger/evaluation/evaluator/ScalaInstanceofEvaluator.scala @@ -41,8 +41,9 @@ class ScalaInstanceofEvaluator(operandEvaluator: Evaluator, val classObject: ClassObjectReference = refType.classObject val classRefType: ClassType = classObject.referenceType.asInstanceOf[ClassType] - val method: Method = classRefType - .concreteMethodByName("isAssignableFrom", "(Ljava/lang/Class;)Z") + val method: Method = classRefType.concreteMethodByName( + "isAssignableFrom", + "(Ljava/lang/Class;)Z") val args: java.util.List[Object] = new util.LinkedList[Object] args.add(value.asInstanceOf[ObjectReference].referenceType.classObject) context.getDebugProcess.invokeMethod(context, classObject, method, args) diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/debugger/evaluation/evaluator/ScalaMethodEvaluator.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/debugger/evaluation/evaluator/ScalaMethodEvaluator.scala index c0cbde1b19e..6977709784e 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/debugger/evaluation/evaluator/ScalaMethodEvaluator.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/debugger/evaluation/evaluator/ScalaMethodEvaluator.scala @@ -50,8 +50,8 @@ case class ScalaMethodEvaluator( private def getOrUpdateMethod( referenceType: ReferenceType, findMethod: ReferenceType => Method): Option[Method] = { - jdiMethodsCache - .getOrElseUpdate(referenceType, Option(findMethod(referenceType))) + jdiMethodsCache.getOrElseUpdate(referenceType, + Option(findMethod(referenceType))) } def evaluate(context: EvaluationContextImpl): AnyRef = { @@ -86,8 +86,9 @@ case class ScalaMethodEvaluator( val referenceType: ReferenceType = obj match { case o: ObjectReference => val qualifierType = o.referenceType() - debugProcess - .findClass(context, qualifierType.name, qualifierType.classLoader) + debugProcess.findClass(context, + qualifierType.name, + qualifierType.classLoader) case obj: ClassType => debugProcess.findClass(context, obj.name, context.getClassLoader) case _ => @@ -129,8 +130,9 @@ case class ScalaMethodEvaluator( } if (jdiMethod == null && localMethod) { for (method <- sortedMethodCandidates if jdiMethod == null) { - mName = DebuggerUtilsEx - .methodName(referenceType.name, method.name(), sign) + mName = DebuggerUtilsEx.methodName(referenceType.name, + method.name(), + sign) jdiMethod = referenceType .asInstanceOf[ClassType] .concreteMethodByName(mName, signature.getName(debugProcess)) @@ -209,8 +211,9 @@ case class ScalaMethodEvaluator( case Some(tr) => val className: String = tr.getName(context.getDebugProcess) if (className != null) { - context.getDebugProcess - .findClass(context, className, context.getClassLoader) match { + context.getDebugProcess.findClass(context, + className, + context.getClassLoader) match { case c: ClassType => _refType = c case _ => _refType = referenceType.asInstanceOf[ClassType].superclass @@ -231,8 +234,9 @@ case class ScalaMethodEvaluator( case Some(tr) => val className: String = tr.getName(context.getDebugProcess) if (className != null) { - context.getDebugProcess - .findClass(context, className, context.getClassLoader) match { + context.getDebugProcess.findClass(context, + className, + context.getClassLoader) match { case c: ClassType => return debugProcess.invokeMethod( context, diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/debugger/smartStepInto/ScalaSmartStepIntoHandler.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/debugger/smartStepInto/ScalaSmartStepIntoHandler.scala index 3704771d147..52640ab2cc4 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/debugger/smartStepInto/ScalaSmartStepIntoHandler.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/debugger/smartStepInto/ScalaSmartStepIntoHandler.scala @@ -98,8 +98,9 @@ class ScalaSmartStepIntoHandler extends JvmSmartStepIntoHandler { val scalaFilter = methodTarget.getMethod match { case f @ (_: ScMethodLike | _: FakeAnonymousClassConstructor) if stepTarget.needsBreakpointRequest() => - ScalaBreakpointMethodFilter - .from(f, stepTarget.getCallingExpressionLines) + ScalaBreakpointMethodFilter.from( + f, + stepTarget.getCallingExpressionLines) case fun: ScMethodLike => Some( new ScalaMethodFilter(fun, stepTarget.getCallingExpressionLines)) diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/debugger/ui/NonStrictCollectionsRenderer.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/debugger/ui/NonStrictCollectionsRenderer.scala index 8046decd814..6180a3753a3 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/debugger/ui/NonStrictCollectionsRenderer.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/debugger/ui/NonStrictCollectionsRenderer.scala @@ -75,8 +75,9 @@ class NonStrictCollectionsRenderer extends NodeRendererImpl { val suitableMethods = objectRef.referenceType().methodsByName(methodName, "()" + signature) if (suitableMethods.size() > 0) { - companionObject - .invokeEmptyArgsMethod(objectRef, suitableMethods get 0, context) + companionObject.invokeEmptyArgsMethod(objectRef, + suitableMethods get 0, + context) } else { MethodNotFound() } @@ -248,8 +249,7 @@ object NonStrictCollectionsRenderer { method: Method, context: EvaluationContext): SimpleMethodInvocationResult[_] = { try { - context.getDebugProcess - .invokeMethod(context, obj, method, EMPTY_ARGS) match { + context.getDebugProcess.invokeMethod(context, obj, method, EMPTY_ARGS) match { case intValue: IntegerValue => Success[Int](intValue.intValue()) case boolValue: BooleanValue => Success[Boolean](boolValue.booleanValue()) diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/debugger/ui/ScalaCollectionRenderer.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/debugger/ui/ScalaCollectionRenderer.scala index 49c77ddb12d..dd0e8e47cfc 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/debugger/ui/ScalaCollectionRenderer.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/debugger/ui/ScalaCollectionRenderer.scala @@ -248,8 +248,9 @@ object ScalaCollectionRenderer { evaluateChildren(evaluationContext, parentDescriptor) val defaultChildrenRenderer: ChildrenRenderer = DebugProcessImpl.getDefaultRenderer(value.`type`) - defaultChildrenRenderer - .isExpandable(children, evaluationContext, parentDescriptor) + defaultChildrenRenderer.isExpandable(children, + evaluationContext, + parentDescriptor) } catch { case e: EvaluateException => true @@ -297,8 +298,9 @@ object ScalaCollectionRenderer { !renderer.isApplicable(childrenValue.`type`)) { renderer = DebugProcessImpl.getDefaultRenderer( if (childrenValue != null) childrenValue.`type` else null) - ExpressionChildrenRenderer - .setPreferableChildrenRenderer(parentDescriptor, renderer) + ExpressionChildrenRenderer.setPreferableChildrenRenderer( + parentDescriptor, + renderer) } renderer } diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/editor/backspaceHandler/ScalaBackspaceHandler.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/editor/backspaceHandler/ScalaBackspaceHandler.scala index 093ca275c19..85a2547f682 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/editor/backspaceHandler/ScalaBackspaceHandler.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/editor/backspaceHandler/ScalaBackspaceHandler.scala @@ -81,8 +81,8 @@ class ScalaBackspaceHandler extends BackspaceHandlerDelegate { element.getNextSibling != null && element.getNextSibling.getNode.getElementType == ScalaXmlTokenTypes.XML_ATTRIBUTE_VALUE_END_DELIMITER) { extensions.inWriteAction { - editor.getDocument - .deleteString(element.getTextOffset + 1, element.getTextOffset + 2) + editor.getDocument.deleteString(element.getTextOffset + 1, + element.getTextOffset + 2) PsiDocumentManager .getInstance(file.getProject) .commitDocument(editor.getDocument) @@ -103,8 +103,8 @@ class ScalaBackspaceHandler extends BackspaceHandlerDelegate { def correctMultilineString(closingQuotesOffset: Int) { extensions.inWriteAction { - editor.getDocument - .deleteString(closingQuotesOffset, closingQuotesOffset + 3) + editor.getDocument.deleteString(closingQuotesOffset, + closingQuotesOffset + 3) // editor.getCaretModel.moveCaretRelatively(-1, 0, false, false, false) //http://youtrack.jetbrains.com/issue/SCL-6490 PsiDocumentManager .getInstance(file.getProject) diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/editor/documentationProvider/CreateScalaDocStubAction.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/editor/documentationProvider/CreateScalaDocStubAction.scala index a944ab90ac7..47f0a53772f 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/editor/documentationProvider/CreateScalaDocStubAction.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/editor/documentationProvider/CreateScalaDocStubAction.scala @@ -44,8 +44,9 @@ class CreateScalaDocStubAction val editor = CommonDataKeys.EDITOR.getData(context) if (editor == null) return - val file = PsiUtilBase - .getPsiFileInEditor(editor, CommonDataKeys.PROJECT.getData(context)) + val file = PsiUtilBase.getPsiFileInEditor( + editor, + CommonDataKeys.PROJECT.getData(context)) if (file.getLanguage != ScalaFileType.SCALA_LANGUAGE) return file findElementAt editor.getCaretModel.getOffset match { diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/editor/documentationProvider/ScalaDocumentationProvider.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/editor/documentationProvider/ScalaDocumentationProvider.scala index 9c1a2f9f4ed..5722083d074 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/editor/documentationProvider/ScalaDocumentationProvider.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/editor/documentationProvider/ScalaDocumentationProvider.scala @@ -414,8 +414,7 @@ object ScalaDocumentationProvider { member match { case named: ScNamedElement => - ScalaPsiUtil - .superValsSignatures(named, withSelfType = false) map { + ScalaPsiUtil.superValsSignatures(named, withSelfType = false) map { case sig => sig.namedElement } foreach { case od: ScDocCommentOwner => tc += od @@ -541,8 +540,9 @@ object ScalaDocumentationProvider { for (param <- owner.parameters) { if (inheritedParams contains param.name) { val paramText = inheritedParams.get(param.name).get.getText - buffer append leadingAsterisks append paramText - .substring(0, paramText.lastIndexOf("\n") + 1) + buffer append leadingAsterisks append paramText.substring( + 0, + paramText.lastIndexOf("\n") + 1) } else { buffer append leadingAsterisks append PARAM_TAG append " " append param.name append "\n" } diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/editor/enterHandler/MultilineStringEnterHandler.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/editor/enterHandler/MultilineStringEnterHandler.scala index cb2d390b546..f2438cb6251 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/editor/enterHandler/MultilineStringEnterHandler.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/editor/enterHandler/MultilineStringEnterHandler.scala @@ -49,15 +49,16 @@ class MultilineStringEnterHandler extends EnterHandlerDelegateAdapter { whiteSpaceAfterCaret = text.substring(caretOffset).takeWhile(c => c == ' ' || c == '\t') - document - .deleteString(caretOffset, caretOffset + whiteSpaceAfterCaret.length) + document.deleteString(caretOffset, + caretOffset + whiteSpaceAfterCaret.length) if ((ch1 != '(' || ch2 != ')') && (ch1 != '{' || ch2 != '}') || !CodeInsightSettings.getInstance.SMART_INDENT_ON_ENTER) return Result.Continue - originalHandler - .execute(editor, editor.getCaretModel.getCurrentCaret, dataContext) + originalHandler.execute(editor, + editor.getCaretModel.getCurrentCaret, + dataContext) Result.DefaultForceIndent } @@ -101,8 +102,9 @@ class MultilineStringEnterHandler extends EnterHandlerDelegateAdapter { def getSmartSpaces(count: Int) = if (useTabs) { - StringUtil.repeat("\t", count / tabSize) + StringUtil - .repeat(" ", count % tabSize) + StringUtil.repeat("\t", count / tabSize) + StringUtil.repeat( + " ", + count % tabSize) } else { StringUtil.repeat(" ", count) } diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/editor/importOptimizer/ScalaImportOptimizer.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/editor/importOptimizer/ScalaImportOptimizer.scala index d3055a0b276..813a420e0d4 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/editor/importOptimizer/ScalaImportOptimizer.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/editor/importOptimizer/ScalaImportOptimizer.scala @@ -248,8 +248,9 @@ class ScalaImportOptimizer extends ImportOptimizer { new TextRange(start, end) } else range - document - .replaceString(newRange.getStartOffset, newRange.getEndOffset, text) + document.replaceString(newRange.getStartOffset, + newRange.getEndOffset, + text) } def collectImportRanges(holder: ScImportsHolder, @@ -781,8 +782,10 @@ object ScalaImportOptimizer { def namesAtRangeStart(imp: ScImportStmt): Set[String] = { val refText = "someIdentifier" - val reference = ScalaPsiElementFactory - .createReferenceFromText(refText, imp.getContext, imp) + val reference = ScalaPsiElementFactory.createReferenceFromText( + refText, + imp.getContext, + imp) val rangeNamesSet = new mutable.HashSet[String]() def addName(name: String): Unit = rangeNamesSet += ScalaNamesUtil.changeKeyword(name) diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/editor/selectioner/ScalaCodeBlockSelectioner.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/editor/selectioner/ScalaCodeBlockSelectioner.scala index 47687d17733..bf2c5436abb 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/editor/selectioner/ScalaCodeBlockSelectioner.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/editor/selectioner/ScalaCodeBlockSelectioner.scala @@ -35,8 +35,9 @@ class ScalaCodeBlockSelectioner extends ExtendWordSelectionHandlerBase { val end = lastChild.getTextRange.getStartOffset if (start >= end) new util.ArrayList[TextRange]() // '{ }' case else - ExtendWordSelectionHandlerBase - .expandToWholeLine(editorText, new TextRange(start, end)) + ExtendWordSelectionHandlerBase.expandToWholeLine( + editorText, + new TextRange(start, end)) } else new util.ArrayList[TextRange] } } diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/extensions/package.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/extensions/package.scala index ac00e585752..05e377eeed6 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/extensions/package.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/extensions/package.scala @@ -482,8 +482,10 @@ package object extensions { } catching(classOf[Exception]).withTry { - progressManager - .runProcessWithProgressSynchronously(computable, title, false, null) + progressManager.runProcessWithProgressSynchronously(computable, + title, + false, + null) } } diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/findUsages/NonMemberMethodUsagesSearcher.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/findUsages/NonMemberMethodUsagesSearcher.scala index c2677325b45..fab2490e70a 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/findUsages/NonMemberMethodUsagesSearcher.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/findUsages/NonMemberMethodUsagesSearcher.scala @@ -30,8 +30,11 @@ class NonMemberMethodUsagesSearcher consumer.process(t) } } - ReferencesSearch - .searchOptimized(method, searchScope, false, collector, newConsumer) + ReferencesSearch.searchOptimized(method, + searchScope, + false, + collector, + newConsumer) } } } diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/findUsages/factory/ScalaFindUsagesHandler.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/findUsages/factory/ScalaFindUsagesHandler.scala index d7b72f7f519..04719519450 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/findUsages/factory/ScalaFindUsagesHandler.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/findUsages/factory/ScalaFindUsagesHandler.scala @@ -157,8 +157,9 @@ class ScalaFindUsagesHandler(element: PsiElement, isSingleFile, this) case _ => - super - .getFindUsagesDialog(isSingleFile, toShowInNewTab, mustOpenInNewTab) + super.getFindUsagesDialog(isSingleFile, + toShowInNewTab, + mustOpenInNewTab) } } diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/findUsages/parameters/ConstructorParamsInConstructorPatternSearcher.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/findUsages/parameters/ConstructorParamsInConstructorPatternSearcher.scala index 5a5b5594540..ad487e60700 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/findUsages/parameters/ConstructorParamsInConstructorPatternSearcher.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/findUsages/parameters/ConstructorParamsInConstructorPatternSearcher.scala @@ -70,8 +70,7 @@ class ConstructorParamsInConstructorPatternSearcher inReadAction { if (!param.isValid) return None - PsiTreeUtil - .getParentOfType(param, classOf[ScPrimaryConstructor]) match { + PsiTreeUtil.getParentOfType(param, classOf[ScPrimaryConstructor]) match { case pc @ ScPrimaryConstructor.ofClass(cls) if cls.isCase => pc.parameters.indexOf(param) match { case -1 => None diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/highlighter/usages/ScalaHighlightUsagesHandlerFactory.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/highlighter/usages/ScalaHighlightUsagesHandlerFactory.scala index 12997c4de96..f17c490f580 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/highlighter/usages/ScalaHighlightUsagesHandlerFactory.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/highlighter/usages/ScalaHighlightUsagesHandlerFactory.scala @@ -30,8 +30,9 @@ class ScalaHighlightUsagesHandlerFactory editor: Editor, file: PsiFile): HighlightUsagesHandlerBase[_ <: PsiElement] = { if (!file.isInstanceOf[ScalaFile]) return null - val offset = TargetElementUtil - .adjustOffset(file, editor.getDocument, editor.getCaretModel.getOffset) + val offset = TargetElementUtil.adjustOffset(file, + editor.getDocument, + editor.getCaretModel.getOffset) val element: PsiElement = file.findElementAt(offset) if (element == null || element.getNode == null) return null element.getNode.getElementType match { @@ -54,8 +55,7 @@ class ScalaHighlightUsagesHandlerFactory case _ => } case ScalaTokenTypes.kVAL => - PsiTreeUtil - .getParentOfType(element, classOf[ScPatternDefinition]) match { + PsiTreeUtil.getParentOfType(element, classOf[ScPatternDefinition]) match { case pattern @ ScPatternDefinition.expr(expr) if pattern.pList.allPatternsSimple && pattern.pList.patterns.length == 1 => @@ -66,8 +66,7 @@ class ScalaHighlightUsagesHandlerFactory case _ => } case ScalaTokenTypes.kVAR => - PsiTreeUtil - .getParentOfType(element, classOf[ScVariableDefinition]) match { + PsiTreeUtil.getParentOfType(element, classOf[ScVariableDefinition]) match { case pattern @ ScPatternDefinition.expr(expr) if pattern.pList.allPatternsSimple && pattern.pList.patterns.length == 1 => diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/injection/ScalaInjectedStringLiteralManipulator.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/injection/ScalaInjectedStringLiteralManipulator.scala index 869f3453c7c..a84ea651212 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/injection/ScalaInjectedStringLiteralManipulator.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/injection/ScalaInjectedStringLiteralManipulator.scala @@ -47,8 +47,9 @@ class ScalaInjectedStringLiteralManipulator "cannot handle content change") } case str if str.isString => - val newExpr = ScalaPsiElementFactory - .createExpressionFromText(newText, str.getManager) + val newExpr = ScalaPsiElementFactory.createExpressionFromText( + newText, + str.getManager) val firstChild = str.getFirstChild val newElement = newExpr.getFirstChild diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/TokenSets.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/TokenSets.scala index a4dce13b84f..6de75206a08 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/TokenSets.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/TokenSets.scala @@ -138,8 +138,8 @@ object TokenSets { val VARIABLES = TokenSet.create(ScalaElementTypes.VARIABLE_DECLARATION, ScalaElementTypes.VARIABLE_DEFINITION) - val TEMPLATE_PARENTS = TokenSet - .create(ScalaElementTypes.CLASS_PARENTS, ScalaElementTypes.TRAIT_PARENTS) + val TEMPLATE_PARENTS = TokenSet.create(ScalaElementTypes.CLASS_PARENTS, + ScalaElementTypes.TRAIT_PARENTS) val MEMBERS = TokenSet.orSet( TokenSet.orSet( diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/completion/SameSignatureCallParametersProvider.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/completion/SameSignatureCallParametersProvider.scala index 5b6dab41fc7..44e2e30ccba 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/completion/SameSignatureCallParametersProvider.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/completion/SameSignatureCallParametersProvider.scala @@ -151,8 +151,7 @@ class SameSignatureCallParametersProvider extends ScalaCompletionContributor { val position = positionFromParameters(parameters) val elementType = position.getNode.getElementType if (elementType != ScalaTokenTypes.tIDENTIFIER) return - PsiTreeUtil - .getContextOfType(position, classOf[ScTemplateDefinition]) match { + PsiTreeUtil.getContextOfType(position, classOf[ScTemplateDefinition]) match { case c: ScClass => val args = PsiTreeUtil.getContextOfType(position, classOf[ScArgumentExprList]) diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/completion/ScalaAfterNewCompletionUtil.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/completion/ScalaAfterNewCompletionUtil.scala index 196fb10f53a..2adb270dbcc 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/completion/ScalaAfterNewCompletionUtil.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/completion/ScalaAfterNewCompletionUtil.scala @@ -62,8 +62,9 @@ object ScalaAfterNewCompletionUtil { val data = if (isAfter) { val element = position - val newExpr: ScNewTemplateDefinition = PsiTreeUtil - .getContextOfType(element, classOf[ScNewTemplateDefinition]) + val newExpr: ScNewTemplateDefinition = PsiTreeUtil.getContextOfType( + element, + classOf[ScNewTemplateDefinition]) newExpr.expectedTypes().map { case ScAbstractType(_, lower, upper) => upper case tp => tp diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/completion/ScalaAotCompletionContributor.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/completion/ScalaAotCompletionContributor.scala index 924437fdd27..c1472dfaf57 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/completion/ScalaAotCompletionContributor.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/completion/ScalaAotCompletionContributor.scala @@ -121,8 +121,10 @@ private object ScalaAotCompletionContributor { if (s.length == 0) s else s.substring(0, 1).toUpperCase + s.substring(1) def createParameterFrom(text: String, original: PsiElement): ScParameter = { - val clauses = ScalaPsiElementFactory - .createParamClausesWithContext(s"($text)", original.getContext, original) + val clauses = ScalaPsiElementFactory.createParamClausesWithContext( + s"($text)", + original.getContext, + original) clauses.params.head } @@ -145,11 +147,13 @@ private class MyConsumer(prefix: String, val name = suggestNameFor(prefix, element.getLookupString) - val renderingDecorator = LookupElementDecorator - .withRenderer(element, new MyElementRenderer(name, typed)) + val renderingDecorator = LookupElementDecorator.withRenderer( + element, + new MyElementRenderer(name, typed)) - val insertionDecorator = LookupElementDecorator - .withInsertHandler(renderingDecorator, new MyInsertHandler(name, typed)) + val insertionDecorator = LookupElementDecorator.withInsertHandler( + renderingDecorator, + new MyInsertHandler(name, typed)) if (typed) { resultSet.consume(insertionDecorator) diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/completion/ScalaClassNameCompletionContributor.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/completion/ScalaClassNameCompletionContributor.scala index c4d694bf507..95aa292b3d8 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/completion/ScalaClassNameCompletionContributor.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/completion/ScalaClassNameCompletionContributor.scala @@ -110,8 +110,9 @@ object ScalaClassNameCompletionContributor { val expectedTypesAfterNew: Array[ScType] = if (afterNewPattern.accepts(dummyPosition, context)) { val element = dummyPosition - val newExpr = PsiTreeUtil - .getContextOfType(element, classOf[ScNewTemplateDefinition]) + val newExpr = PsiTreeUtil.getContextOfType( + element, + classOf[ScNewTemplateDefinition]) //todo: probably we need to remove all abstracts here according to variance newExpr.expectedTypes().map { case ScAbstractType(_, lower, upper) => upper @@ -138,12 +139,14 @@ object ScalaClassNameCompletionContributor { val lookingForAnnotations: Boolean = psiElement.afterLeaf("@").accepts(position) val isInImport = - ScalaPsiUtil - .getContextOfType(position, false, classOf[ScImportStmt]) != null - val stableRefElement = ScalaPsiUtil - .getContextOfType(position, false, classOf[ScStableCodeReferenceElement]) - val refElement = ScalaPsiUtil - .getContextOfType(position, false, classOf[ScReferenceElement]) + ScalaPsiUtil.getContextOfType(position, false, classOf[ScImportStmt]) != null + val stableRefElement = ScalaPsiUtil.getContextOfType( + position, + false, + classOf[ScStableCodeReferenceElement]) + val refElement = ScalaPsiUtil.getContextOfType(position, + false, + classOf[ScReferenceElement]) val onlyClasses = stableRefElement != null && !stableRefElement.getContext.isInstanceOf[ScConstructorPattern] diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/completion/ScalaOverrideContributor.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/completion/ScalaOverrideContributor.scala index 1fdaffa310a..a44761f400d 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/completion/ScalaOverrideContributor.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/completion/ScalaOverrideContributor.scala @@ -87,8 +87,9 @@ class ScalaOverrideContributor extends ScalaCompletionContributor { parameters: CompletionParameters): Unit = { val position = positionFromParameters(parameters) - val clazz = PsiTreeUtil - .getParentOfType(position, classOf[ScTemplateDefinition], false) + val clazz = PsiTreeUtil.getParentOfType(position, + classOf[ScTemplateDefinition], + false) if (clazz == null) return val classMembers = @@ -234,8 +235,9 @@ class ScalaOverrideContributor extends ScalaCompletionContributor { ICON_FLAG_VISIBILITY | ICON_FLAG_READ_STATUS)) .withInsertHandler(insertionHandler(mm)) - val renderingDecorator = LookupElementDecorator - .withRenderer(lookupItem, new MyElementRenderer(mm)) + val renderingDecorator = + LookupElementDecorator.withRenderer(lookupItem, + new MyElementRenderer(mm)) resultSet.consume(renderingDecorator) case _ => } diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/completion/ScalaSmartCompletionContributor.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/completion/ScalaSmartCompletionContributor.scala index 1a621e89892..77b59a59057 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/completion/ScalaSmartCompletionContributor.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/completion/ScalaSmartCompletionContributor.scala @@ -373,8 +373,7 @@ class ScalaSmartCompletionContributor extends ScalaCompletionContributor { typez.foreach { case ScParameterizedType(tp, Seq(arg)) if !elementAdded => - ScType - .extractClass(tp, Some(place.getProject)) match { + ScType.extractClass(tp, Some(place.getProject)) match { case Some(clazz) if clazz.qualifiedName == "scala.Option" || clazz.qualifiedName == "scala.Some" => @@ -896,8 +895,10 @@ class ScalaSmartCompletionContributor extends ScalaCompletionContributor { result: CompletionResultSet) { val element = positionFromParameters(parameters) - val refElement = ScalaPsiUtil - .getContextOfType(element, false, classOf[ScReferenceElement]) + val refElement = + ScalaPsiUtil.getContextOfType(element, + false, + classOf[ScReferenceElement]) val renamesMap = new mutable.HashMap[String, (String, PsiNamedElement)]() @@ -920,8 +921,9 @@ class ScalaSmartCompletionContributor extends ScalaCompletionContributor { } val addedClasses = new mutable.HashSet[String] - val newExpr = PsiTreeUtil - .getContextOfType(element, classOf[ScNewTemplateDefinition]) + val newExpr = + PsiTreeUtil.getContextOfType(element, + classOf[ScNewTemplateDefinition]) val types: Array[ScType] = newExpr.expectedTypes().map { case ScAbstractType(_, lower, upper) => upper case tp => tp diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/completion/filters/expression/YieldFilter.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/completion/filters/expression/YieldFilter.scala index 3c5c1946445..ed90cc4075f 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/completion/filters/expression/YieldFilter.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/completion/filters/expression/YieldFilter.scala @@ -40,10 +40,12 @@ class YieldFilter extends ElementFilter { if (leafText(i, context) == "yield") return false for (child <- parent.getParent.getNode.getChildren(null) if child.getElementType == ScalaTokenTypes.kYIELD) return false - return ScalaCompletionUtil - .checkAnyWith(parent.getParent, "yield true", context.getManager) || - ScalaCompletionUtil - .checkReplace(parent.getParent, "yield", context.getManager) + return ScalaCompletionUtil.checkAnyWith(parent.getParent, + "yield true", + context.getManager) || + ScalaCompletionUtil.checkReplace(parent.getParent, + "yield", + context.getManager) } } false diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/completion/handlers/ScalaConstructorInsertHandler.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/completion/handlers/ScalaConstructorInsertHandler.scala index e6c096737e6..dcab043840a 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/completion/handlers/ScalaConstructorInsertHandler.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/completion/handlers/ScalaConstructorInsertHandler.scala @@ -115,8 +115,9 @@ class ScalaConstructorInsertHandler extends InsertHandler[LookupElement] { .commitDocument(document) val file = context.getFile val element = file.findElementAt(endOffset - 1) - val newT = PsiTreeUtil - .getParentOfType(element, classOf[ScNewTemplateDefinition]) + val newT = PsiTreeUtil.getParentOfType( + element, + classOf[ScNewTemplateDefinition]) if (newT != null) { newT.extendsBlock.templateParents match { case Some(tp: ScTemplateParents) => @@ -138,8 +139,9 @@ class ScalaConstructorInsertHandler extends InsertHandler[LookupElement] { val newRefText = clazz.qualifiedName.split('.').takeRight(2).mkString(".") val newRef = - ScalaPsiElementFactory - .createReferenceFromText(newRefText, clazz.getManager) + ScalaPsiElementFactory.createReferenceFromText( + newRefText, + clazz.getManager) val replaced = ref .replace(newRef) .asInstanceOf[ScStableCodeReferenceElement] diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/completion/handlers/ScalaInsertHandler.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/completion/handlers/ScalaInsertHandler.scala index 3e8e4008342..9bf124bb6fb 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/completion/handlers/ScalaInsertHandler.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/completion/handlers/ScalaInsertHandler.scala @@ -100,8 +100,9 @@ class ScalaInsertHandler extends InsertHandler[LookupElement] { val (startOffset, _) = res.get val tailOffset = context.getTailOffset document.insertString(tailOffset, "}") - document - .insertString(startOffset + literal.getTextRange.getStartOffset, "{") + document.insertString( + startOffset + literal.getTextRange.getStartOffset, + "{") context.commitDocument() (startOffset + 1, tailOffset - startOffset) } else (contextStartOffset, context.getTailOffset - contextStartOffset) @@ -129,8 +130,9 @@ class ScalaInsertHandler extends InsertHandler[LookupElement] { item.getAllLookupStrings.size() > 1 => val ref = elem.getParent.asInstanceOf[ScReferenceExpression] val newRefText = ref.getText - val newRef = ScalaPsiElementFactory - .createExpressionFromText(newRefText, ref.getManager) + val newRef = ScalaPsiElementFactory.createExpressionFromText( + newRefText, + ref.getManager) ref.getParent.replace(newRef).getFirstChild case elem => elem } @@ -408,8 +410,9 @@ class ScalaInsertHandler extends InsertHandler[LookupElement] { val blockEndOffset = block.getTextRange.getEndOffset val blockStartOffset = block.getTextRange.getStartOffset document.replaceString(blockEndOffset - 1, blockEndOffset, "") - document - .replaceString(blockStartOffset, blockStartOffset + 1, "") + document.replaceString(blockStartOffset, + blockStartOffset + 1, + "") item.isInSimpleStringNoBraces = true case _ => } diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/completion/lookups/LookupElementManager.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/completion/lookups/LookupElementManager.scala index 605156debd7..e1c273ea673 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/completion/lookups/LookupElementManager.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/completion/lookups/LookupElementManager.scala @@ -55,8 +55,7 @@ object LookupElementManager { qualifierType match { case _ if !isPredef && !usedImportForElement => - ScType - .extractDesignated(qualifierType, withoutAliases = false) match { + ScType.extractDesignated(qualifierType, withoutAliases = false) match { case Some((named, _)) => val clazz: Option[PsiClass] = named match { case cl: PsiClass => Some(cl) diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/completion/lookups/ScalaLookupItem.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/completion/lookups/ScalaLookupItem.scala index 6685daf8cb4..2972e06db6d 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/completion/lookups/ScalaLookupItem.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/completion/lookups/ScalaLookupItem.scala @@ -299,8 +299,7 @@ class ScalaLookupItem(val element: PsiNamedElement, false) val useFullyQualifiedName = PsiTreeUtil.getParentOfType(ref, classOf[ScImportStmt]) != null && - PsiTreeUtil - .getParentOfType(ref, classOf[ScImportSelectors]) == null //do not complete in sel + PsiTreeUtil.getParentOfType(ref, classOf[ScImportSelectors]) == null //do not complete in sel if (ref == null) return while (ref.getParent != null && ref.getParent.isInstanceOf[ScReferenceElement] && @@ -326,15 +325,15 @@ class ScalaLookupItem(val element: PsiNamedElement, val parts = cl.qualifiedName.split('.') if (parts.length > 1) { val newRefText = parts.takeRight(2).mkString(".") - ScalaPsiElementFactory - .createReferenceFromText(newRefText, ref.getManager) + ScalaPsiElementFactory.createReferenceFromText(newRefText, + ref.getManager) } else { ref.createReplacingElementWithClassName(useFullyQualifiedName, cl) } case _ => - ref - .createReplacingElementWithClassName(useFullyQualifiedName, cl) + ref.createReplacingElementWithClassName(useFullyQualifiedName, + cl) } ref.getNode.getTreeParent.replaceChild(ref.getNode, newRef.getNode) newRef.bindToElement(cl.element) diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/completion/postfix/templates/selector/ScalaPostfixTemplatePsiInfo.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/completion/postfix/templates/selector/ScalaPostfixTemplatePsiInfo.scala index 9555cd57630..1a3604f547b 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/completion/postfix/templates/selector/ScalaPostfixTemplatePsiInfo.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/completion/postfix/templates/selector/ScalaPostfixTemplatePsiInfo.scala @@ -61,6 +61,7 @@ class ScalaPostfixTemplatePsiInfo extends PostfixTemplatePsiInfo { override def createExpression(context: PsiElement, prefix: String, suffix: String): PsiElement = - ScalaPsiElementFactory - .createExpressionFromText(prefix + context.getText + suffix, context) + ScalaPsiElementFactory.createExpressionFromText( + prefix + context.getText + suffix, + context) } diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/formatting/processors/ScalaSpacingProcessor.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/formatting/processors/ScalaSpacingProcessor.scala index 576d9da5f4f..7b453a36d9e 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/formatting/processors/ScalaSpacingProcessor.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/formatting/processors/ScalaSpacingProcessor.scala @@ -693,8 +693,11 @@ object ScalaSpacingProcessor extends ScalaTokenTypes { if (rightPsi.isInstanceOf[ScPackaging]) { if (leftPsi.isInstanceOf[ScStableCodeReferenceElement] || leftElementType == tLBRACE) - return Spacing - .createSpacing(0, 0, 1, keepLineBreaks, keepBlankLinesInCode) + return Spacing.createSpacing(0, + 0, + 1, + keepLineBreaks, + keepBlankLinesInCode) else return Spacing.createSpacing(0, 0, @@ -741,8 +744,11 @@ object ScalaSpacingProcessor extends ScalaTokenTypes { if (leftPsi.isInstanceOf[ScImportStmt] || rightPsi.isInstanceOf[ScImportStmt]) { - return Spacing - .createSpacing(0, 0, 1, keepLineBreaks, keepBlankLinesInDeclarations) + return Spacing.createSpacing(0, + 0, + 1, + keepLineBreaks, + keepBlankLinesInDeclarations) } if (leftPsi.isInstanceOf[ScTypeDefinition]) { @@ -842,8 +848,11 @@ object ScalaSpacingProcessor extends ScalaTokenTypes { return if (scalaSettings.SPACES_IN_IMPORTS) WITH_SPACING else WITHOUT_SPACING case _ => - return Spacing - .createSpacing(0, 0, 0, keepLineBreaks, keepBlankLinesBeforeRBrace) + return Spacing.createSpacing(0, + 0, + 0, + keepLineBreaks, + keepBlankLinesBeforeRBrace) } } @@ -941,8 +950,11 @@ object ScalaSpacingProcessor extends ScalaTokenTypes { return if (scalaSettings.SPACES_IN_IMPORTS) WITH_SPACING else WITHOUT_SPACING case _ => - return Spacing - .createSpacing(0, 0, 0, keepLineBreaks, keepBlankLinesBeforeRBrace) + return Spacing.createSpacing(0, + 0, + 0, + keepLineBreaks, + keepBlankLinesBeforeRBrace) } } diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/parser/parsing/CommonUtils.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/parser/parsing/CommonUtils.scala index 5c6c1f004d0..0effa98ae2c 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/parser/parsing/CommonUtils.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/parser/parsing/CommonUtils.scala @@ -34,8 +34,9 @@ object CommonUtils { if (!Pattern.parse(builder)) builder.error("Wrong pattern") else if (builder.getTokenType != ScalaTokenTypes.tRBRACE) { builder.error("'}' is expected") - ParserUtils - .parseLoopUntilRBrace(builder, () => (), braceReported = true) + ParserUtils.parseLoopUntilRBrace(builder, + () => (), + braceReported = true) } else builder.advanceLexer() } } else if (!BlockExpr.parse(builder)) { diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/parser/parsing/CompilationUnit.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/parser/parsing/CompilationUnit.scala index 945f16ade59..6d553925e53 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/parser/parsing/CompilationUnit.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/parser/parsing/CompilationUnit.scala @@ -25,8 +25,7 @@ object CompilationUnit { def parsePackagingBody(hasPackage: Boolean) = { while (builder.getTokenType != null) { - TopStatSeq - .parse(builder, waitBrace = false, hasPackage = hasPackage) match { + TopStatSeq.parse(builder, waitBrace = false, hasPackage = hasPackage) match { case ParserState.EMPTY_STATE => case ParserState.SCRIPT_STATE => Stats.trigger("scala.file.script.parsed") diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/parser/parsing/base/ImportSelectors.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/parser/parsing/base/ImportSelectors.scala index 9c586c8f562..ccfc6b19eec 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/parser/parsing/base/ImportSelectors.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/parser/parsing/base/ImportSelectors.scala @@ -48,8 +48,7 @@ object ImportSelectors extends ParserNode { return true } case _ => { - ParserUtils - .parseLoopUntilRBrace(builder, () => {}) //we need to find closing brace, otherwise we can miss important things + ParserUtils.parseLoopUntilRBrace(builder, () => {}) //we need to find closing brace, otherwise we can miss important things builder.restoreNewlinesState importSelectorMarker.done(ScalaElementTypes.IMPORT_SELECTORS) return true diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/PresentationUtil.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/PresentationUtil.scala index 8f36c200c5b..d7694953450 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/PresentationUtil.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/PresentationUtil.scala @@ -35,8 +35,9 @@ object PresentationUtil { buffer.append(")") buffer.toString() case param: ScParameter => - ScalaDocumentationProvider - .parseParameter(param, presentationString(_, substitutor)) + ScalaDocumentationProvider.parseParameter( + param, + presentationString(_, substitutor)) case param: Parameter => val builder = new StringBuilder builder.append(param.name) diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/ScImportsHolder.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/ScImportsHolder.scala index 81c1f40141a..c1fe26c1cb7 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/ScImportsHolder.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/ScImportsHolder.scala @@ -207,8 +207,10 @@ trait ScImportsHolder extends ScalaPsiElement { documentManager.doPostponedOperationsAndUnblockDocument(document) val newRange = new TextRange(rangeMarker.getStartOffset, rangeMarker.getEndOffset) - optimizer - .replaceWithNewImportInfos(newRange, infosToAdd, settings, document) + optimizer.replaceWithNewImportInfos(newRange, + infosToAdd, + settings, + document) documentManager.commitDocument(document) } @@ -229,8 +231,9 @@ trait ScImportsHolder extends ScalaPsiElement { else refsContainer == null && hasCodeBeforeImports if (needToInsertFirst) { - val dummyImport = ScalaPsiElementFactory - .createImportFromText("import dummy._", getManager) + val dummyImport = ScalaPsiElementFactory.createImportFromText( + "import dummy._", + getManager) val usedNames = collectUsedImportedNames(this) val inserted = insertFirstImport(dummyImport, getFirstChild) .asInstanceOf[ScImportStmt] @@ -332,8 +335,9 @@ trait ScImportsHolder extends ScalaPsiElement { def shortenWhitespace(node: ASTNode) { if (node == null) return if (node.getText.count(_ == '\n') >= 2) { - val nl = ScalaPsiElementFactory - .createNewLine(getManager, node.getText.replaceFirst("[\n]", "")) + val nl = ScalaPsiElementFactory.createNewLine( + getManager, + node.getText.replaceFirst("[\n]", "")) getNode.replaceChild(node, nl.getNode) } } diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/TypeAdjuster.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/TypeAdjuster.scala index 4191f3a54c7..9ef839db9a4 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/TypeAdjuster.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/TypeAdjuster.scala @@ -84,8 +84,9 @@ object TypeAdjuster extends ApplicationAdapter { } private def newTypeElem(name: String, position: PsiElement) = - ScalaPsiElementFactory - .createTypeElementFromText(name, position.getContext, position) + ScalaPsiElementFactory.createTypeElementFromText(name, + position.getContext, + position) private def toReplacementInfos( typeElements: Seq[ScTypeElement], @@ -284,8 +285,7 @@ object TypeAdjuster extends ApplicationAdapter { val holder = importHolders.get(info) if (info.pathsToImport.nonEmpty && holder.isDefined) { val pathsToAdd = - holderToPaths - .getOrElseUpdate(holder.get, Set.empty) ++ info.pathsToImport + holderToPaths.getOrElseUpdate(holder.get, Set.empty) ++ info.pathsToImport holderToPaths += holder.get -> pathsToAdd } } diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/api/FileDeclarationsHolder.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/api/FileDeclarationsHolder.scala index ec7fe00b19e..ab8ee469346 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/api/FileDeclarationsHolder.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/api/FileDeclarationsHolder.scala @@ -59,8 +59,10 @@ trait FileDeclarationsHolder .processDeclarations(processor, state, lastParent, place)) return false - if (!super[ScImportsHolder] - .processDeclarations(processor, state, lastParent, place)) + if (!super[ScImportsHolder].processDeclarations(processor, + state, + lastParent, + place)) return false if (context != null) { @@ -259,8 +261,10 @@ trait FileDeclarationsHolder } if (ScalaFileImpl.isProcessLocalClasses(lastParent) && - !super[ScDeclarationSequenceHolder] - .processDeclarations(processor, state, lastParent, place)) + !super[ScDeclarationSequenceHolder].processDeclarations(processor, + state, + lastParent, + place)) return false true diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/api/InferUtil.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/api/InferUtil.scala index 936b787b625..513cb3d5bff 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/api/InferUtil.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/api/InferUtil.scala @@ -258,8 +258,7 @@ object InferUtil { val evaluator = ScalaMacroEvaluator.getInstance(place.getProject) evaluator.isMacro(results.head.getElement) match { case Some(m) => - evaluator - .checkMacro(m, MacroContext(place, Some(paramType))) match { + evaluator.checkMacro(m, MacroContext(place, Some(paramType))) match { case Some(tp) => exprs += new Expression(polymorphicSubst subst tp) case None => updateExpr() @@ -644,15 +643,17 @@ object InferUtil { if (tp.lowerType() != Nothing) { val substedLowerType = unSubst.subst(tp.lowerType()) if (!hasRecursiveTypeParameters(substedLowerType)) { - un = un - .addLower(name, substedLowerType, additional = true) + un = un.addLower(name, + substedLowerType, + additional = true) } } if (tp.upperType() != Any) { val substedUpperType = unSubst.subst(tp.upperType()) if (!hasRecursiveTypeParameters(substedUpperType)) { - un = un - .addUpper(name, substedUpperType, additional = true) + un = un.addUpper(name, + substedUpperType, + additional = true) } } } diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/api/ScControlFlowOwner.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/api/ScControlFlowOwner.scala index 9b057a48cd5..9c6e7980a5d 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/api/ScControlFlowOwner.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/api/ScControlFlowOwner.scala @@ -35,8 +35,9 @@ trait ScControlFlowOwner extends ScalaPsiElement { def getControlFlow( policy: ScControlFlowPolicy = AllVariablesControlFlowPolicy) : Seq[Instruction] = { - val provider = myControlFlowCache - .getOrElseUpdate(policy, new ControlFlowCacheProvider(policy)) + val provider = myControlFlowCache.getOrElseUpdate( + policy, + new ControlFlowCacheProvider(policy)) provider.compute().getValue } diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/api/base/ScPrimaryConstructor.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/api/base/ScPrimaryConstructor.scala index b5e425cb5cd..2264aaaab74 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/api/base/ScPrimaryConstructor.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/api/base/ScPrimaryConstructor.scala @@ -63,8 +63,9 @@ trait ScPrimaryConstructor @CachedInsidePsiElement(this, ModCount.getBlockModificationCount) def effectiveParameterClauses: Seq[ScParameterClause] = { def emptyParameterList: ScParameterClause = - ScalaPsiElementFactory - .createEmptyClassParamClauseWithContext(getManager, parameterList) + ScalaPsiElementFactory.createEmptyClassParamClauseWithContext( + getManager, + parameterList) val clausesWithInitialEmpty = parameterList.clauses match { case Seq() => Seq(emptyParameterList) case Seq(clause) if clause.isImplicit => Seq(emptyParameterList, clause) diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/api/base/ScReferenceElement.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/api/base/ScReferenceElement.scala index f64a0b7c0ed..f524be2a36d 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/api/base/ScReferenceElement.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/api/base/ScReferenceElement.scala @@ -365,8 +365,9 @@ trait ScReferenceElement extensions.inWriteAction { val refText = if (addImport) { - val importHolder = ScalaImportTypeFix - .getImportHolder(ref = this, project = getProject) + val importHolder = ScalaImportTypeFix.getImportHolder(ref = this, + project = + getProject) val imported = importHolder.getAllImportUsed.exists { case ImportExprUsed(expr) => expr.reference.exists { ref => diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/api/base/patterns/ScPattern.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/api/base/patterns/ScPattern.scala index 6a558fc9fe1..d21417cd459 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/api/base/patterns/ScPattern.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/api/base/patterns/ScPattern.scala @@ -217,8 +217,10 @@ trait ScPattern extends ScalaPsiElement { (p.name, ScalaPsiUtil.getPsiElementId(p)), ScUndefinedType(new ScTypeParameterType(p, substitutor))) } - val clazz = ScalaPsiUtil - .getContextOfType(this, true, classOf[ScTemplateDefinition]) + val clazz = ScalaPsiUtil.getContextOfType( + this, + true, + classOf[ScTemplateDefinition]) clazz match { case clazz: ScTemplateDefinition => undefSubst = diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/api/expr/ScAnnotations.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/api/expr/ScAnnotations.scala index 9147fd6aa87..28da898d959 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/api/expr/ScAnnotations.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/api/expr/ScAnnotations.scala @@ -55,8 +55,7 @@ trait ScAnnotations extends ScalaPsiElement with PsiReferenceList { ScType.extractClass(tp, Some(getProject)) match { case Some(clazz) if clazz.qualifiedName == "java.lang.Class" => - ScType - .extractClass(arg(0), Some(getProject)) match { + ScType.extractClass(arg(0), Some(getProject)) match { case Some(p) => JavaPsiFacade .getInstance(getProject) diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/api/expr/ScBlock.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/api/expr/ScBlock.scala index aa867cb3461..16763956ba0 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/api/expr/ScBlock.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/api/expr/ScBlock.scala @@ -281,8 +281,10 @@ trait ScBlock processor, state, lastParent, - place) && super[ScImportsHolder] - .processDeclarations(processor, state, lastParent, place) + place) && super[ScImportsHolder].processDeclarations(processor, + state, + lastParent, + place) def needCheckExpectedType = true } diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/api/expr/ScExpression.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/api/expr/ScExpression.scala index f8ef3efb64e..7de5be7f7a2 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/api/expr/ScExpression.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/api/expr/ScExpression.scala @@ -217,8 +217,9 @@ trait ScExpression private def getTypeWithoutImplicitsImpl( ignoreBaseTypes: Boolean, fromUnderscore: Boolean): TypeResult[ScType] = { - val inner = ScExpression.this - .getNonValueType(TypingContext.empty, ignoreBaseTypes, fromUnderscore) + val inner = ScExpression.this.getNonValueType(TypingContext.empty, + ignoreBaseTypes, + fromUnderscore) inner match { case Success(rtp, _) => var res = rtp @@ -543,8 +544,9 @@ trait ScExpression } val newExpr: ScExpression = if (ScalaPsiUtil.needParentheses(this, expr)) { - ScalaPsiElementFactory - .createExpressionFromText("(" + expr.getText + ")", getManager) + ScalaPsiElementFactory.createExpressionFromText( + "(" + expr.getText + ")", + getManager) } else expr val parentNode = oldParent.getNode val newNode = newExpr.copy.getNode @@ -575,8 +577,8 @@ trait ScExpression ModCount.getBlockModificationCount) def expectedTypesEx(fromUnderscore: Boolean = true) : Array[(ScType, Option[ScTypeElement])] = { - ExpectedTypes - .expectedExprTypes(ScExpression.this, fromUnderscore = fromUnderscore) + ExpectedTypes.expectedExprTypes(ScExpression.this, + fromUnderscore = fromUnderscore) } @CachedMappedWithRecursionGuard(this, @@ -725,8 +727,9 @@ trait ScExpression state.put(ScImplicitlyConvertible.IMPLICIT_RESOLUTION_KEY, cl) case _ => } - applyProc - .processType(res.getTypeWithDependentSubstitutor, expr, state) + applyProc.processType(res.getTypeWithDependentSubstitutor, + expr, + state) cand = applyProc.candidates case _ => } diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/api/expr/ScModificationTrackerOwner.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/api/expr/ScModificationTrackerOwner.scala index 4fbfa57e85e..eb043580cd0 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/api/expr/ScModificationTrackerOwner.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/api/expr/ScModificationTrackerOwner.scala @@ -97,8 +97,9 @@ trait ScModificationTrackerOwner } def createMirror(text: String): PsiElement = { - ScalaPsiElementFactory - .createExpressionWithContextFromText(text, getContext, this) + ScalaPsiElementFactory.createExpressionWithContextFromText(text, + getContext, + this) } @Cached(synchronized = true, ModCount.getBlockModificationCount, this) diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/api/statements/ScFunction.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/api/statements/ScFunction.scala index 0e32828aa9a..f8ce659126d 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/api/statements/ScFunction.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/api/statements/ScFunction.scala @@ -352,8 +352,8 @@ trait ScFunction fun.returnTypeInner case _ => } - parent - .putUserData(ScFunction.calculatedBlockKey, java.lang.Boolean.TRUE) + parent.putUserData(ScFunction.calculatedBlockKey, + java.lang.Boolean.TRUE) returnTypeInner } } else returnTypeInner @@ -381,15 +381,17 @@ trait ScFunction case owner: ScTypeParametersOwner => if (hasImplicit) None else - ScalaPsiUtil - .syntheticParamClause(owner, paramClauses, classParam = false) + ScalaPsiUtil.syntheticParamClause(owner, + paramClauses, + classParam = false) case _ => None } } else { if (hasImplicit) None else - ScalaPsiUtil - .syntheticParamClause(this, paramClauses, classParam = false) + ScalaPsiUtil.syntheticParamClause(this, + paramClauses, + classParam = false) } } diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/api/toplevel/ScNamedElement.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/api/toplevel/ScNamedElement.scala index a8b385082c0..4c64baeb9ff 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/api/toplevel/ScNamedElement.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/api/toplevel/ScNamedElement.scala @@ -21,7 +21,8 @@ import org.jetbrains.plugins.scala.lang.psi.impl.toplevel.synthetic.JavaIdentifi import org.jetbrains.plugins.scala.lang.refactoring.util.ScalaNamesUtil trait ScNamedElement - extends ScalaPsiElement with PsiNameIdentifierOwner + extends ScalaPsiElement + with PsiNameIdentifierOwner with NavigatablePsiElement { def name: String = { this match { @@ -92,16 +93,21 @@ trait ScNamedElement abstract override def getUseScope: SearchScope = { ScalaPsiUtil.intersectScopes( - super.getUseScope, ScalaPsiUtil.nameContext(this) match { - case member: ScMember if member != this => Some(member.getUseScope) - case caseClause: ScCaseClause => Some(new LocalSearchScope(caseClause)) - case elem @ (_: ScEnumerator | _: ScGenerator) => - Option( + super.getUseScope, + ScalaPsiUtil.nameContext(this) match { + case member: ScMember if member != this => Some(member.getUseScope) + case caseClause: ScCaseClause => Some(new LocalSearchScope(caseClause)) + case elem @ (_: ScEnumerator | _: ScGenerator) => + Option( PsiTreeUtil.getContextOfType(elem, true, classOf[ScForStatement])) - .orElse(Option(PsiTreeUtil.getContextOfType( - elem, true, classOf[ScBlock], classOf[ScMember]))) - .map(new LocalSearchScope(_)) - case _ => None - }) + .orElse( + Option( + PsiTreeUtil.getContextOfType(elem, + true, + classOf[ScBlock], + classOf[ScMember]))) + .map(new LocalSearchScope(_)) + case _ => None + }) } } diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/api/toplevel/typedef/ScMember.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/api/toplevel/typedef/ScMember.scala index 0c6ae032378..f43e9551c83 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/api/toplevel/typedef/ScMember.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/api/toplevel/typedef/ScMember.scala @@ -192,8 +192,10 @@ trait ScMember val accessModifier = Option(getModifierList).flatMap(_.accessModifier) def fromContainingBlockOrMember(): Option[SearchScope] = { - val blockOrMember = PsiTreeUtil - .getContextOfType(this, true, classOf[ScBlock], classOf[ScMember]) + val blockOrMember = PsiTreeUtil.getContextOfType(this, + true, + classOf[ScBlock], + classOf[ScMember]) blockOrMember match { case null => None case block: ScBlock => Some(new LocalSearchScope(block)) diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/api/toplevel/typedef/ScTypeDefinition.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/api/toplevel/typedef/ScTypeDefinition.scala index a53f3f74e4d..e14d11719ae 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/api/toplevel/typedef/ScTypeDefinition.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/api/toplevel/typedef/ScTypeDefinition.scala @@ -120,8 +120,9 @@ trait ScTypeDefinition .mkString("(", ", ", ")") }.mkString("(", " => ", s" => $name)") val typeElement = - ScalaPsiElementFactory - .createTypeElementFromText(typeElementText, getManager) + ScalaPsiElementFactory.createTypeElementFromText( + typeElementText, + getManager) s" extends ${typeElement.getText}" } else { "" diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/impl/ScPackageImpl.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/impl/ScPackageImpl.scala index 494e42e1fbb..66e053e25b8 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/impl/ScPackageImpl.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/impl/ScPackageImpl.scala @@ -93,8 +93,9 @@ class ScPackageImpl private (val pack: PsiPackage) case _ => place.getResolveScope } if (getQualifiedName == "scala") { - ScPackageImpl - .implicitlyImportedObject(place.getManager, scope, "scala") match { + ScPackageImpl.implicitlyImportedObject(place.getManager, + scope, + "scala") match { case Some(obj: ScObject) => var newState = state obj.getType(TypingContext.empty).foreach { @@ -146,8 +147,8 @@ class ScPackageImpl private (val pack: PsiPackage) if (lastDot < 0) { ScPackageImpl.findPackage(getProject, "") } else { - ScPackageImpl - .findPackage(getProject, myQualifiedName.substring(0, lastDot)) + ScPackageImpl.findPackage(getProject, + myQualifiedName.substring(0, lastDot)) } } diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/impl/ScalaPsiElementFactory.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/impl/ScalaPsiElementFactory.scala index 21be9ae3517..ecaaccb3fa8 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/impl/ScalaPsiElementFactory.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/impl/ScalaPsiElementFactory.scala @@ -140,13 +140,13 @@ class ScalaPsiElementFactoryImpl(manager: PsiManager) throw new IncorrectOperationException def createConstructor(): PsiMethod = { - ScalaPsiElementFactory - .createMethodFromText("def this() {\nthis()\n}", manager) + ScalaPsiElementFactory.createMethodFromText("def this() {\nthis()\n}", + manager) } def createConstructor(name: String): PsiMethod = { - ScalaPsiElementFactory - .createMethodFromText("def this() {\nthis()\n}", manager) + ScalaPsiElementFactory.createMethodFromText("def this() {\nthis()\n}", + manager) } def createClassInitializer(): PsiClassInitializer = @@ -214,8 +214,9 @@ class ScalaPsiElementFactoryImpl(manager: PsiManager) def createExpressionFromText(text: String, context: PsiElement): PsiElement = { try { - ScalaPsiElementFactory - .createExpressionWithContextFromText(text, context, context) + ScalaPsiElementFactory.createExpressionWithContextFromText(text, + context, + context) } catch { case e: Throwable => throw new IncorrectOperationException } diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/impl/ScalaPsiManager.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/impl/ScalaPsiManager.scala index e850ea83e15..dd9ffd0e04d 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/impl/ScalaPsiManager.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/impl/ScalaPsiManager.scala @@ -253,8 +253,8 @@ class ScalaPsiManager(project: Project) extends ProjectComponent { self => p.isInstanceOf[PsiClassWrapper] } - ArrayUtil - .mergeArrays(classes, SyntheticClassProducer.getAllClasses(fqn, scope)) + ArrayUtil.mergeArrays(classes, + SyntheticClassProducer.getAllClasses(fqn, scope)) } if (DumbService.getInstance(project).isDumb) return Array.empty diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/impl/base/ScModifierListImpl.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/impl/base/ScModifierListImpl.scala index fcbf21a6b93..0b3182ca7aa 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/impl/base/ScModifierListImpl.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/impl/base/ScModifierListImpl.scala @@ -134,16 +134,16 @@ class ScModifierListImpl private (stub: StubElement[ScModifierList], name match { case "override" => if (value) { - val node = ScalaPsiElementFactory - .createModifierFromText("override", getManager) + val node = ScalaPsiElementFactory.createModifierFromText("override", + getManager) addBefore(node) } else getNode.removeChild( findChildByType[PsiElement](ScalaTokenTypes.kOVERRIDE).getNode) case "private" => if (value) { - val node = ScalaPsiElementFactory - .createModifierFromText("private", getManager) + val node = ScalaPsiElementFactory.createModifierFromText("private", + getManager) addBefore(node) } else { for (child <- getChildren if child.isInstanceOf[ScAccessModifier] && @@ -154,8 +154,8 @@ class ScModifierListImpl private (stub: StubElement[ScModifierList], } case "protected" => if (value) { - val node = ScalaPsiElementFactory - .createModifierFromText("protected", getManager) + val node = ScalaPsiElementFactory.createModifierFromText("protected", + getManager) addBefore(node) } else { for (child <- getChildren if child.isInstanceOf[ScAccessModifier] && @@ -174,16 +174,16 @@ class ScModifierListImpl private (stub: StubElement[ScModifierList], findChildByType[PsiElement](ScalaTokenTypes.kFINAL).getNode) case "implicit" => if (value) { - val node = ScalaPsiElementFactory - .createModifierFromText("implicit", getManager) + val node = ScalaPsiElementFactory.createModifierFromText("implicit", + getManager) addBefore(node) } else getNode.removeChild( findChildByType[PsiElement](ScalaTokenTypes.kIMPLICIT).getNode) case "abstract" => if (value) { - val node = ScalaPsiElementFactory - .createModifierFromText("abstract", getManager) + val node = ScalaPsiElementFactory.createModifierFromText("abstract", + getManager) addBefore(node) } else getNode.removeChild( @@ -248,8 +248,7 @@ class ScModifierListImpl private (stub: StubElement[ScModifierList], getAnnotations.find(_.getQualifiedName == name) match { case None if name == "java.lang.Override" => val factory = JavaPsiFacade.getInstance(getProject).getElementFactory - factory - .createAnnotationFromText("@" + name, this); // hack to disable AddOverrideAnnotationAction, + factory.createAnnotationFromText("@" + name, this); // hack to disable AddOverrideAnnotationAction, case None => null case Some(x) => x } diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/impl/base/ScStableCodeReferenceElementImpl.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/impl/base/ScStableCodeReferenceElementImpl.scala index 9c9f529eb1f..c73b745b2de 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/impl/base/ScStableCodeReferenceElementImpl.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/impl/base/ScStableCodeReferenceElementImpl.scala @@ -172,8 +172,9 @@ class ScStableCodeReferenceElementImpl(node: ASTNode) else getText }) if (nameId.getText != c.name) { - val ref = ScalaPsiElementFactory - .createReferenceFromText(c.name, getManager) + val ref = ScalaPsiElementFactory.createReferenceFromText( + c.name, + getManager) return this .replace(ref) .asInstanceOf[ScStableCodeReferenceElement] @@ -184,11 +185,14 @@ class ScStableCodeReferenceElementImpl(node: ASTNode) .getInstance(getProject) .hasImportWithPrefix(qname) if (qualifier.isDefined && !isPredefined) { - val ref = ScalaPsiElementFactory - .createReferenceFromText(c.name, getContext, this) + val ref = ScalaPsiElementFactory.createReferenceFromText( + c.name, + getContext, + this) if (ref.isReferenceTo(element)) { - val ref = ScalaPsiElementFactory - .createReferenceFromText(c.name, getManager) + val ref = ScalaPsiElementFactory.createReferenceFromText( + c.name, + getManager) return this.replace(ref) } } @@ -238,8 +242,9 @@ class ScStableCodeReferenceElementImpl(node: ASTNode) if (qualifier != None) { //let's make our reference unqualified val ref: ScStableCodeReferenceElement = - ScalaPsiElementFactory - .createReferenceFromText(c.name, getManager) + ScalaPsiElementFactory.createReferenceFromText( + c.name, + getManager) this.replace(ref).asInstanceOf[ScReferenceElement] } this diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/impl/base/types/ScFunctionalTypeElementImpl.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/impl/base/types/ScFunctionalTypeElementImpl.scala index 12a9326a9aa..a21fd98a23b 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/impl/base/types/ScFunctionalTypeElementImpl.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/impl/base/types/ScFunctionalTypeElementImpl.scala @@ -37,8 +37,10 @@ class ScFunctionalTypeElementImpl(node: ASTNode) val newTypeText = s"_root_.scala.Function$n[${paramTypes.map(_.getText).mkString(",")}${if (n == 0) "" else ", "}" + s"${returnTypeElement.map(_.getText).getOrElse("Any")}]" - val newTypeElement = ScalaPsiElementFactory - .createTypeElementFromText(newTypeText, getContext, this) + val newTypeElement = ScalaPsiElementFactory.createTypeElementFromText( + newTypeText, + getContext, + this) newTypeElement match { case p: ScParameterizedTypeElement => Some(p) case _ => None diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/impl/base/types/ScInfixTypeElementImpl.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/impl/base/types/ScInfixTypeElementImpl.scala index 1b0a5294b6f..dc41b001612 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/impl/base/types/ScInfixTypeElementImpl.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/impl/base/types/ScInfixTypeElementImpl.scala @@ -34,8 +34,10 @@ class ScInfixTypeElementImpl(node: ASTNode) def desugarizedInfixType: Option[ScParameterizedTypeElement] = { val newTypeText = s"${ref.getText}[${lOp.getText}, ${rOp.map(_.getText).getOrElse("Nothing")}}]" - val newTypeElement = ScalaPsiElementFactory - .createTypeElementFromText(newTypeText, getContext, this) + val newTypeElement = ScalaPsiElementFactory.createTypeElementFromText( + newTypeText, + getContext, + this) newTypeElement match { case p: ScParameterizedTypeElement => Some(p) case _ => None diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/impl/base/types/ScParameterizedTypeElementImpl.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/impl/base/types/ScParameterizedTypeElementImpl.scala index b80768f0ecd..b79d8fe0ed6 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/impl/base/types/ScParameterizedTypeElementImpl.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/impl/base/types/ScParameterizedTypeElementImpl.scala @@ -92,8 +92,10 @@ class ScParameterizedTypeElementImpl(node: ASTNode) } val lambdaText = s"({type $typeName[$paramText] = ${ret.getText}})#$typeName" - val newTE = ScalaPsiElementFactory - .createTypeElementFromText(lambdaText, getContext, this) + val newTE = ScalaPsiElementFactory.createTypeElementFromText( + lambdaText, + getContext, + this) Option(newTE) case _ => None } @@ -128,8 +130,9 @@ class ScParameterizedTypeElementImpl(node: ASTNode) val typeName = "Λ$" val inlineText = s"({type $typeName$paramText = ${typeElement.getText}$bodyText})#$typeName" - val newTE = ScalaPsiElementFactory - .createTypeElementFromText(inlineText, getContext, this) + val newTE = ScalaPsiElementFactory.createTypeElementFromText(inlineText, + getContext, + this) Option(newTE) } @@ -153,8 +156,10 @@ class ScParameterizedTypeElementImpl(node: ASTNode) forSomeBuilder.append("}") val newTypeText = s"(${typeElement.getText}${typeElements.mkString("[", ", ", "]")} ${forSomeBuilder.toString()})" - val newTypeElement = ScalaPsiElementFactory - .createTypeElementFromText(newTypeText, getContext, this) + val newTypeElement = ScalaPsiElementFactory.createTypeElementFromText( + newTypeText, + getContext, + this) Option(newTypeElement) } @@ -306,8 +311,9 @@ class ScParameterizedTypeElementImpl(node: ASTNode) Any), state) } else if (upperBound > 0 && lowerBound > 0) { - val actualText = text - .substring(0, math.min(lowerBound, upperBound)) + val actualText = text.substring( + 0, + math.min(lowerBound, upperBound)) processor.execute( new ScSyntheticClass(getManager, actualText, diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/impl/base/types/ScSimpleTypeElementImpl.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/impl/base/types/ScSimpleTypeElementImpl.scala index 47fc0525088..43168b088b7 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/impl/base/types/ScSimpleTypeElementImpl.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/impl/base/types/ScSimpleTypeElementImpl.scala @@ -491,13 +491,14 @@ class ScSimpleTypeElementImpl(node: ASTNode) typeForConstructor(ref, method, subst, r.getActualElement), Some(this)) case _ => - ScSimpleTypeElementImpl - .calculateReferenceType(ref, shapesOnly = false) + ScSimpleTypeElementImpl.calculateReferenceType(ref, + shapesOnly = + false) } } case None => - ScSimpleTypeElementImpl - .calculateReferenceType(pathElement, shapesOnly = false) + ScSimpleTypeElementImpl.calculateReferenceType(pathElement, + shapesOnly = false) } } @@ -636,8 +637,10 @@ object ScSimpleTypeElementImpl { case _ => resolvedElement match { case self: ScSelfTypeElement => - val td = PsiTreeUtil - .getContextOfType(self, true, classOf[ScTemplateDefinition]) + val td = PsiTreeUtil.getContextOfType( + self, + true, + classOf[ScTemplateDefinition]) Success(ScThisType(td), Some(ref)) case _ => if (fromType.isEmpty) diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/impl/base/types/ScTupleTypeElementImpl.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/impl/base/types/ScTupleTypeElementImpl.scala index f290f1f5387..cbb7238c2de 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/impl/base/types/ScTupleTypeElementImpl.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/impl/base/types/ScTupleTypeElementImpl.scala @@ -30,8 +30,10 @@ class ScTupleTypeElementImpl(node: ASTNode) val n = components.length val newTypeText = s"_root_.scala.Tuple$n[${components.map(_.getText).mkString(", ")}]" - val newTypeElement = ScalaPsiElementFactory - .createTypeElementFromText(newTypeText, getContext, this) + val newTypeElement = ScalaPsiElementFactory.createTypeElementFromText( + newTypeText, + getContext, + this) newTypeElement match { case p: ScParameterizedTypeElement => Some(p) case _ => None diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/impl/expr/ScAssignStmtImpl.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/impl/expr/ScAssignStmtImpl.scala index 077d5a4dba2..f1f88f4f82c 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/impl/expr/ScAssignStmtImpl.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/impl/expr/ScAssignStmtImpl.scala @@ -68,8 +68,10 @@ class ScAssignStmtImpl(node: ASTNode) val text = s"${ref.refName}_=(${getRExpression.map(_.getText).getOrElse("")})" val mirrorExpr = - ScalaPsiElementFactory - .createExpressionWithContextFromText(text, getContext, this) + ScalaPsiElementFactory.createExpressionWithContextFromText( + text, + getContext, + this) mirrorExpr match { case call: ScMethodCall => call.getInvokedExpr @@ -87,8 +89,10 @@ class ScAssignStmtImpl(node: ASTNode) s"${invokedExpr.getText}.update(${methodCall.args.exprs.map(_.getText).mkString(",")}," + s" ${getRExpression.map(_.getText).getOrElse("")}" val mirrorExpr = - ScalaPsiElementFactory - .createExpressionWithContextFromText(text, getContext, this) + ScalaPsiElementFactory.createExpressionWithContextFromText( + text, + getContext, + this) //todo: improve performance: do not re-evaluate resolve to "update" method mirrorExpr match { case call: ScMethodCall => Some(call) diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/impl/expr/ScConstrBlockImpl.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/impl/expr/ScConstrBlockImpl.scala index c3ed467855f..82e9c742436 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/impl/expr/ScConstrBlockImpl.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/impl/expr/ScConstrBlockImpl.scala @@ -29,7 +29,8 @@ class ScConstrBlockImpl(node: ASTNode) } override def createMirror(text: String): PsiElement = { - ScalaPsiElementFactory - .createConstructorBodyWithContextFromText(text, getContext, this) + ScalaPsiElementFactory.createConstructorBodyWithContextFromText(text, + getContext, + this) } } diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/impl/expr/ScConstrExprImpl.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/impl/expr/ScConstrExprImpl.scala index 101f00ffca8..c681e0c2a67 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/impl/expr/ScConstrExprImpl.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/impl/expr/ScConstrExprImpl.scala @@ -16,8 +16,9 @@ class ScConstrExprImpl(node: ASTNode) extends ScalaPsiElementImpl(node) with ScConstrExpr { override def createMirror(text: String): PsiElement = { - ScalaPsiElementFactory - .createConstructorBodyWithContextFromText(text, getContext, this) + ScalaPsiElementFactory.createConstructorBodyWithContextFromText(text, + getContext, + this) } override def toString: String = "ConstructorExpression" diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/impl/expr/ScIfStmtImpl.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/impl/expr/ScIfStmtImpl.scala index 3f8cc5b35ff..7970e14f1ff 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/impl/expr/ScIfStmtImpl.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/impl/expr/ScIfStmtImpl.scala @@ -50,8 +50,8 @@ class ScIfStmtImpl(node: ASTNode) getLastChild match { case expression: ScExpression => expression case _ => - PsiTreeUtil - .getPrevSiblingOfType(getLastChild, classOf[ScExpression]) + PsiTreeUtil.getPrevSiblingOfType(getLastChild, + classOf[ScExpression]) } if (t == null) None else diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/impl/expr/ScInfixExprImpl.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/impl/expr/ScInfixExprImpl.scala index 1ae3531d1b5..76be8ae400c 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/impl/expr/ScInfixExprImpl.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/impl/expr/ScInfixExprImpl.scala @@ -50,8 +50,10 @@ class ScInfixExprImpl(node: ASTNode) val rText = rOp.getText val exprText = s"$lText = $lText ${r.element.name} $rText" val newExpr = - ScalaPsiElementFactory - .createExpressionWithContextFromText(exprText, getContext, this) + ScalaPsiElementFactory.createExpressionWithContextFromText( + exprText, + getContext, + this) newExpr.getType(TypingContext.empty) case _ => super.innerType(ctx) } diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/impl/expr/ScNewTemplateDefinitionImpl.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/impl/expr/ScNewTemplateDefinitionImpl.scala index e238cf61feb..5fd95c3b8cc 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/impl/expr/ScNewTemplateDefinitionImpl.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/impl/expr/ScNewTemplateDefinitionImpl.scala @@ -133,8 +133,10 @@ class ScNewTemplateDefinitionImpl private ( state: ResolveState, lastParent: PsiElement, place: PsiElement): Boolean = { - super[ScNewTemplateDefinition] - .processDeclarations(processor, state, lastParent, place) + super[ScNewTemplateDefinition].processDeclarations(processor, + state, + lastParent, + place) } override def getExtendsListTypes: Array[PsiClassType] = innerExtendsListTypes diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/impl/expr/ScReferenceExpressionImpl.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/impl/expr/ScReferenceExpressionImpl.scala index 6e14a4c7a4a..f82fba2606f 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/impl/expr/ScReferenceExpressionImpl.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/impl/expr/ScReferenceExpressionImpl.scala @@ -329,8 +329,9 @@ class ScReferenceExpressionImpl(node: ASTNode) s.subst(fun.polymorphicType) //prevent infinite recursion for recursive pattern reference case Some(ScalaResolveResult(self: ScSelfTypeElement, _)) => - val clazz = PsiTreeUtil - .getContextOfType(self, true, classOf[ScTemplateDefinition]) + val clazz = PsiTreeUtil.getContextOfType(self, + true, + classOf[ScTemplateDefinition]) ScThisReferenceImpl.getThisTypeForTypeDefinition(clazz, this) match { case success: Success[ScType] => success.get case failure => return failure @@ -537,8 +538,10 @@ class ScReferenceExpressionImpl(node: ASTNode) } yield qualifier } } - ResolveUtils - .javaPolymorphicType(method, s, getResolveScope, returnType) + ResolveUtils.javaPolymorphicType(method, + s, + getResolveScope, + returnType) } else { ResolveUtils.javaPolymorphicType(method, s, getResolveScope) } diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/impl/search/ScalaOverridingMemberSearcher.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/impl/search/ScalaOverridingMemberSearcher.scala index f06339059b7..2f7a40c8ee0 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/impl/search/ScalaOverridingMemberSearcher.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/impl/search/ScalaOverridingMemberSearcher.scala @@ -193,8 +193,9 @@ object ScalaOverridingMemberSearcher { } if (withSelfType) { - val inheritors = ScalaStubsUtil - .getSelfTypeInheritors(parentClass, parentClass.getResolveScope) + val inheritors = ScalaStubsUtil.getSelfTypeInheritors( + parentClass, + parentClass.getResolveScope) break = false for (clazz <- inheritors if !break) { break = !process(clazz) diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/impl/statements/ScFunctionDefinitionImpl.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/impl/statements/ScFunctionDefinitionImpl.scala index b25b5e47895..9d3123a1747 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/impl/statements/ScFunctionDefinitionImpl.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/impl/statements/ScFunctionDefinitionImpl.scala @@ -46,8 +46,10 @@ class ScFunctionDefinitionImpl protected (stub: StubElement[ScFunction], lastParent: PsiElement, place: PsiElement): Boolean = { //process function's parameters for dependent method types, and process type parameters - if (!super[ScFunctionImpl] - .processDeclarations(processor, state, lastParent, place)) + if (!super[ScFunctionImpl].processDeclarations(processor, + state, + lastParent, + place)) return false //do not process parameters for default parameters, only for function body diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/impl/statements/ScFunctionImpl.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/impl/statements/ScFunctionImpl.scala index 1689f43b9ee..49821e4c7e3 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/impl/statements/ScFunctionImpl.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/impl/statements/ScFunctionImpl.scala @@ -54,8 +54,10 @@ abstract class ScFunctionImpl protected (stub: StubElement[ScFunction], lastParent: PsiElement, place: PsiElement): Boolean = { // process function's process type parameters - if (!super[ScTypeParametersOwner] - .processDeclarations(processor, state, lastParent, place)) + if (!super[ScTypeParametersOwner].processDeclarations(processor, + state, + lastParent, + place)) return false lazy val parameterIncludingSynthetic: Seq[ScParameter] = diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/impl/statements/ScMacroDefinitionImpl.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/impl/statements/ScMacroDefinitionImpl.scala index e1f3caf1242..cfc4cde0567 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/impl/statements/ScMacroDefinitionImpl.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/impl/statements/ScMacroDefinitionImpl.scala @@ -41,8 +41,10 @@ class ScMacroDefinitionImpl private (stub: StubElement[ScFunction], lastParent: PsiElement, place: PsiElement): Boolean = { //process function's parameters for dependent method types, and process type parameters - if (!super[ScFunctionImpl] - .processDeclarations(processor, state, lastParent, place)) + if (!super[ScFunctionImpl].processDeclarations(processor, + state, + lastParent, + place)) return false //do not process parameters for default parameters, only for function body diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/impl/toplevel/imports/ScImportSelectorImpl.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/impl/toplevel/imports/ScImportSelectorImpl.scala index 713b78515f6..ca459194ffb 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/impl/toplevel/imports/ScImportSelectorImpl.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/impl/toplevel/imports/ScImportSelectorImpl.scala @@ -80,8 +80,9 @@ class ScImportSelectorImpl private (stub: StubElement[ScImportSelector], case Seq(sel: ScImportSelector) if !sel.isAliasedImport => val withoutBracesText = expr.qualifier.getText + "." + sel.reference.getText - val newImportExpr = ScalaPsiElementFactory - .createImportExprFromText(withoutBracesText, expr.getManager) + val newImportExpr = ScalaPsiElementFactory.createImportExprFromText( + withoutBracesText, + expr.getManager) expr.replace(newImportExpr) case _ => } diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/impl/toplevel/imports/ScImportStmtImpl.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/impl/toplevel/imports/ScImportStmtImpl.scala index 04779193a20..838d0fd93ff 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/impl/toplevel/imports/ScImportStmtImpl.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/impl/toplevel/imports/ScImportStmtImpl.scala @@ -142,8 +142,8 @@ class ScImportStmtImpl private (stub: StubElement[ScImportStmt], } val exprQualRefType = () => - ScSimpleTypeElementImpl - .calculateReferenceType(exprQual, shapesOnly = false) + ScSimpleTypeElementImpl.calculateReferenceType(exprQual, + shapesOnly = false) def checkResolve(resolve: ResolveResult): Boolean = { resolve match { diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/impl/toplevel/packaging/ScPackagingImpl.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/impl/toplevel/packaging/ScPackagingImpl.scala index 1060c1a15e9..d704408dcf3 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/impl/toplevel/packaging/ScPackagingImpl.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/impl/toplevel/packaging/ScPackagingImpl.scala @@ -170,13 +170,17 @@ class ScPackagingImpl private (stub: StubElement[ScPackageContainer], } if (lastParent != null && lastParent.getContext == this) { - if (!super[ScImportsHolder] - .processDeclarations(processor, state, lastParent, place)) + if (!super[ScImportsHolder].processDeclarations(processor, + state, + lastParent, + place)) return false if (ScalaFileImpl.isProcessLocalClasses(lastParent) && - !super[ScDeclarationSequenceHolder] - .processDeclarations(processor, state, lastParent, place)) + !super[ScDeclarationSequenceHolder].processDeclarations(processor, + state, + lastParent, + place)) return false } diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/impl/toplevel/templates/ScExtendsBlockImpl.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/impl/toplevel/templates/ScExtendsBlockImpl.scala index db2a175d07e..a5c0df25fc6 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/impl/toplevel/templates/ScExtendsBlockImpl.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/impl/toplevel/templates/ScExtendsBlockImpl.scala @@ -233,9 +233,9 @@ class ScExtendsBlockImpl private (stub: StubElement[ScExtendsBlock], addClass(t) } case _ => - ScTemplateParents - .extractSupers(syntheticTypeElements, getProject) foreach { t => - addClass(t) + ScTemplateParents.extractSupers(syntheticTypeElements, getProject) foreach { + t => + addClass(t) } } if (isUnderCaseClass) { diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/impl/toplevel/typedef/ScClassImpl.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/impl/toplevel/typedef/ScClassImpl.scala index 06af3e64835..efa2d2672b3 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/impl/toplevel/typedef/ScClassImpl.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/impl/toplevel/typedef/ScClassImpl.scala @@ -129,16 +129,20 @@ class ScClassImpl private (stub: StubElement[ScTemplateDefinition], } } - super[ScTypeParametersOwner] - .processDeclarations(processor, state, lastParent, place) + super[ScTypeParametersOwner].processDeclarations(processor, + state, + lastParent, + place) } override def processDeclarations(processor: PsiScopeProcessor, state: ResolveState, lastParent: PsiElement, place: PsiElement): Boolean = { - super[ScTemplateDefinition] - .processDeclarations(processor, state, lastParent, place) + super[ScTemplateDefinition].processDeclarations(processor, + state, + lastParent, + place) } override def isCase: Boolean = hasModifierProperty("case") @@ -244,8 +248,10 @@ class ScClassImpl private (stub: StubElement[ScTemplateDefinition], !hasCopy && !x.parameterList.clauses.exists(_.hasRepeatedParam) if (addCopy) { try { - val method = ScalaPsiElementFactory - .createMethodWithContext(copyMethodText, this, this) + val method = ScalaPsiElementFactory.createMethodWithContext( + copyMethodText, + this, + this) method.setSynthetic(this) buf += method } catch { diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/impl/toplevel/typedef/ScObjectImpl.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/impl/toplevel/typedef/ScObjectImpl.scala index 76dd28e11a7..e3e3202f84c 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/impl/toplevel/typedef/ScObjectImpl.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/impl/toplevel/typedef/ScObjectImpl.scala @@ -156,8 +156,10 @@ class ScObjectImpl protected (stub: StubElement[ScTemplateDefinition], import org.jetbrains.plugins.scala.lang.psi.impl.ScPackageImpl._ startPackageObjectProcessing() try { - super[ScTemplateDefinition] - .processDeclarations(processor, state, lastParent, place) + super[ScTemplateDefinition].processDeclarations(processor, + state, + lastParent, + place) } catch { case ignore: DoNotProcessPackageObjectException => true //do nothing, just let's move on @@ -165,8 +167,10 @@ class ScObjectImpl protected (stub: StubElement[ScTemplateDefinition], stopPackageObjectProcessing() } } else { - super[ScTemplateDefinition] - .processDeclarations(processor, state, lastParent, place) + super[ScTemplateDefinition].processDeclarations(processor, + state, + lastParent, + place) } } @@ -179,8 +183,10 @@ class ScObjectImpl protected (stub: StubElement[ScTemplateDefinition], val res = new ArrayBuffer[PsiMethod] c.getSyntheticMethodsText.foreach(s => { try { - val method = ScalaPsiElementFactory - .createMethodWithContext(s, c.getContext, c) + val method = + ScalaPsiElementFactory.createMethodWithContext(s, + c.getContext, + c) method.setSynthetic(this) method.syntheticCaseClass = Some(c) res += method @@ -319,8 +325,8 @@ class ScObjectImpl protected (stub: StubElement[ScTemplateDefinition], TypeDefinitionMembers.TypeNodes.Map] = new mutable.WeakHashMap[Project, TypeDefinitionMembers.TypeNodes.Map] def getHardTypes: TypeDefinitionMembers.TypeNodes.Map = { - hardTypes - .getOrElseUpdate(getProject, TypeDefinitionMembers.TypeNodes.build(this)) + hardTypes.getOrElseUpdate(getProject, + TypeDefinitionMembers.TypeNodes.build(this)) } private val hardSignatures: mutable.WeakHashMap[ diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/impl/toplevel/typedef/ScTraitImpl.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/impl/toplevel/typedef/ScTraitImpl.scala index b9f0a70af8f..22513c44680 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/impl/toplevel/typedef/ScTraitImpl.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/impl/toplevel/typedef/ScTraitImpl.scala @@ -67,16 +67,21 @@ class ScTraitImpl private (stub: StubElement[ScTemplateDefinition], processor, state, lastParent, - place) && super[ScTemplateDefinition] - .processDeclarationsForTemplateBody(processor, state, lastParent, place) + place) && super[ScTemplateDefinition].processDeclarationsForTemplateBody( + processor, + state, + lastParent, + place) } override def processDeclarations(processor: PsiScopeProcessor, state: ResolveState, lastParent: PsiElement, place: PsiElement): Boolean = { - super[ScTemplateDefinition] - .processDeclarations(processor, state, lastParent, place) + super[ScTemplateDefinition].processDeclarations(processor, + state, + lastParent, + place) } override def isInterface: Boolean = true diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/impl/toplevel/typedef/SyntheticMembersInjector.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/impl/toplevel/typedef/SyntheticMembersInjector.scala index 316e727db43..c62578a83e1 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/impl/toplevel/typedef/SyntheticMembersInjector.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/impl/toplevel/typedef/SyntheticMembersInjector.scala @@ -105,8 +105,9 @@ object SyntheticMembersInjector { ScalaPsiUtil.getCompanionModule(o).getOrElse(source) case _ => source } - val function = ScalaPsiElementFactory - .createMethodWithContext(template, context, source) + val function = ScalaPsiElementFactory.createMethodWithContext(template, + context, + source) function.setSynthetic(context) function.syntheticContainingClass = Some(source) if (withOverride ^ !function.hasModifierProperty("override")) @@ -132,8 +133,9 @@ object SyntheticMembersInjector { ScalaPsiUtil.getCompanionModule(o).getOrElse(source) case _ => source }).extendsBlock - val td = ScalaPsiElementFactory - .createTypeDefinitionWithContext(template, context, source) + val td = ScalaPsiElementFactory.createTypeDefinitionWithContext(template, + context, + source) td.syntheticContainingClass = Some(source) def updateSynthetic(element: ScMember): Unit = { element match { @@ -172,8 +174,9 @@ object SyntheticMembersInjector { ScalaPsiUtil.getCompanionModule(o).getOrElse(source) case _ => source } - buffer += ScalaPsiElementFactory - .createTypeElementFromText(supers, context, source) + buffer += ScalaPsiElementFactory.createTypeElementFromText(supers, + context, + source) } catch { case p: ProcessCanceledException => throw p case e: Throwable => diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/impl/toplevel/typedef/TypeDefinitionMembers.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/impl/toplevel/typedef/TypeDefinitionMembers.scala index d730d067511..d6bedb82443 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/impl/toplevel/typedef/TypeDefinitionMembers.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/impl/toplevel/typedef/TypeDefinitionMembers.scala @@ -995,8 +995,10 @@ object TypeDefinitionMembers { if processValsForScala && !p.isVar && !p.isVal && (checkName(p.name) || checkNameGetSetIs(p.name)) && isScalaProcessor => - val clazz = PsiTreeUtil - .getContextOfType(p, true, classOf[ScTemplateDefinition]) + val clazz = PsiTreeUtil.getContextOfType( + p, + true, + classOf[ScTemplateDefinition]) if (clazz != null && clazz.isInstanceOf[ScClass] && !p.isEffectiveVal) { //this is member only for class scope @@ -1115,8 +1117,8 @@ object TypeDefinitionMembers { val (_, n) = iterator.next() def addMethod(method: PsiNamedElement): Boolean = { val substitutor = n.substitutor followed subst - processor - .execute(method, state.put(ScSubstitutor.key, substitutor)) + processor.execute(method, + state.put(ScSubstitutor.key, substitutor)) } n.info match { diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/implicits/ImplicitCollector.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/implicits/ImplicitCollector.scala index eb20b1cd54d..24e0557f6c7 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/implicits/ImplicitCollector.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/implicits/ImplicitCollector.scala @@ -257,8 +257,10 @@ class ImplicitCollector( getImports(state), implicitSearchState = Some(collectorState))) case f: ScFieldId => - val memb = ScalaPsiUtil - .getContextOfType(f, true, classOf[ScValue], classOf[ScVariable]) + val memb = ScalaPsiUtil.getContextOfType(f, + true, + classOf[ScValue], + classOf[ScVariable]) memb match { case memb: ScMember if memb.hasModifierProperty("implicit") => placeCalculated = true diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/implicits/ScImplicitlyConvertible.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/implicits/ScImplicitlyConvertible.scala index 9dbbe2a75a5..066516b9fac 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/implicits/ScImplicitlyConvertible.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/implicits/ScImplicitlyConvertible.scala @@ -185,8 +185,9 @@ class ScImplicitlyConvertible(place: PsiElement, case Some(substitutor) => exp match { case Some(expected) => - val additionalUSubst = Conformance - .undefinedSubst(expected, newSubst.subst(retTp)) + val additionalUSubst = + Conformance.undefinedSubst(expected, + newSubst.subst(retTp)) (uSubst + additionalUSubst).getSubstitutor match { case Some(innerSubst) => result += @@ -493,8 +494,8 @@ class ScImplicitlyConvertible(place: PsiElement, }) //todo: pass implicit parameters - ScalaPsiUtil - .debug(s"Implicit $r is ok for type $typez", LOG) + ScalaPsiUtil.debug(s"Implicit $r is ok for type $typez", + LOG) ImplicitMapResult(condition = true, r, tp, diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/light/LightScalaMethod.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/light/LightScalaMethod.scala index 5d112a6af4c..75f148600d0 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/light/LightScalaMethod.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/light/LightScalaMethod.scala @@ -43,8 +43,9 @@ class LightMethodAdapter(manager: PsiManager, override def findSuperMethodSignaturesIncludingStatic( checkAccess: Boolean): util.List[MethodSignatureBackedByPsiMethod] = - PsiSuperMethodImplUtil - .findSuperMethodSignaturesIncludingStatic(this, checkAccess) + PsiSuperMethodImplUtil.findSuperMethodSignaturesIncludingStatic( + this, + checkAccess) override def getHierarchicalMethodSignature: HierarchicalMethodSignature = PsiSuperMethodImplUtil.getHierarchicalMethodSignature(this) diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/light/PsiClassWrapper.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/light/PsiClassWrapper.scala index 13e3514ea86..e3e9cb59f71 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/light/PsiClassWrapper.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/light/PsiClassWrapper.scala @@ -196,8 +196,9 @@ class PsiClassWrapper(val definition: ScTemplateDefinition, def findMethodsAndTheirSubstitutorsByName( name: String, checkBases: Boolean): util.List[Pair[PsiMethod, PsiSubstitutor]] = { - PsiClassImplUtil - .findMethodsAndTheirSubstitutorsByName(this, name, checkBases) + PsiClassImplUtil.findMethodsAndTheirSubstitutorsByName(this, + name, + checkBases) } def getAllMethodsAndTheirSubstitutors: util.List[ diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/light/PsiTypedDefinitionWrapper.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/light/PsiTypedDefinitionWrapper.scala index 7ea0e22fc26..8b071c3fcce 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/light/PsiTypedDefinitionWrapper.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/light/PsiTypedDefinitionWrapper.scala @@ -55,8 +55,10 @@ class PsiTypedDefinitionWrapper( } result } - val methodText = PsiTypedDefinitionWrapper - .methodText(typedDefinition, isStatic, isInterface, role) + val methodText = PsiTypedDefinitionWrapper.methodText(typedDefinition, + isStatic, + isInterface, + role) val method: PsiMethod = { try { elementFactory.createMethodFromText(methodText, containingClass) diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/light/ScFunctionWrapper.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/light/ScFunctionWrapper.scala index a7e09593e86..c01b4b5db90 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/light/ScFunctionWrapper.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/light/ScFunctionWrapper.scala @@ -200,15 +200,17 @@ with LightScalaMethod { generifySubst subst ScFunctionWrapper .getSubstitutor(cClass, function) .subst(param.getType(TypingContext.empty).getOrAny) - returnType = ScType - .toPsi(scalaType, function.getProject, function.getResolveScope) + returnType = ScType.toPsi(scalaType, + function.getProject, + function.getResolveScope) case None => val scalaType = generifySubst subst ScFunctionWrapper .getSubstitutor(cClass, function) .subst(function.returnType.getOrAny) - returnType = ScType - .toPsi(scalaType, function.getProject, function.getResolveScope) + returnType = ScType.toPsi(scalaType, + function.getProject, + function.getResolveScope) } } returnType diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/light/StaticPsiTypedDefinitionWrapper.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/light/StaticPsiTypedDefinitionWrapper.scala index c90c5398b49..08cc0b75bbb 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/light/StaticPsiTypedDefinitionWrapper.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/light/StaticPsiTypedDefinitionWrapper.scala @@ -25,8 +25,9 @@ class StaticPsiTypedDefinitionWrapper( extends { val elementFactory = JavaPsiFacade.getInstance(typedDefinition.getProject).getElementFactory - val methodText = StaticPsiTypedDefinitionWrapper - .methodText(typedDefinition, role, containingClass) + val methodText = StaticPsiTypedDefinitionWrapper.methodText(typedDefinition, + role, + containingClass) val method: PsiMethod = { try { elementFactory.createMethodFromText(methodText, containingClass) diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/light/StaticTraitScFunctionWrapper.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/light/StaticTraitScFunctionWrapper.scala index b150f40a360..4d27822a477 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/light/StaticTraitScFunctionWrapper.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/light/StaticTraitScFunctionWrapper.scala @@ -16,8 +16,9 @@ class StaticTraitScFunctionWrapper(val function: ScFunction, extends { val elementFactory = JavaPsiFacade.getInstance(function.getProject).getElementFactory - val methodText = StaticTraitScFunctionWrapper - .methodText(function, containingClass: PsiClassWrapper) + val methodText = StaticTraitScFunctionWrapper.methodText( + function, + containingClass: PsiClassWrapper) val method: PsiMethod = { try { elementFactory.createMethodFromText(methodText, containingClass) @@ -76,8 +77,10 @@ object StaticTraitScFunctionWrapper { case Success(tp, _) => if (param.isCallByNameParameter) builder.append("scala.Function0<") - builder.append(JavaConversionUtil - .typeText(tp, function.getProject, function.getResolveScope)) + builder.append( + JavaConversionUtil.typeText(tp, + function.getProject, + function.getResolveScope)) if (param.isCallByNameParameter) builder.append(">") case _ => builder.append("java.lang.Object") } diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/stubs/impl/ScAnnotationStubImpl.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/stubs/impl/ScAnnotationStubImpl.scala index 77b6d427875..b65fd96159d 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/stubs/impl/ScAnnotationStubImpl.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/stubs/impl/ScAnnotationStubImpl.scala @@ -36,8 +36,10 @@ class ScAnnotationStubImpl[ParentPsi <: PsiElement]( if (typeElement != null && (typeElement.getContext eq getPsi)) return typeElement } - val res: ScTypeElement = ScalaPsiElementFactory - .createTypeElementFromText(getTypeText, getPsi, null) + val res: ScTypeElement = ScalaPsiElementFactory.createTypeElementFromText( + getTypeText, + getPsi, + null) myTypeElement = new SofterReference[ScTypeElement](res) res } diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/stubs/impl/ScTypeAliasStubImpl.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/stubs/impl/ScTypeAliasStubImpl.scala index 9ce8a549c6d..8d14341feb3 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/stubs/impl/ScTypeAliasStubImpl.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/stubs/impl/ScTypeAliasStubImpl.scala @@ -69,8 +69,10 @@ class ScTypeAliasStubImpl[ParentPsi <: PsiElement]( return typeElement } if (getTypeElementText == "") return null - val res: ScTypeElement = ScalaPsiElementFactory - .createTypeElementFromText(getTypeElementText, getPsi, null) + val res: ScTypeElement = ScalaPsiElementFactory.createTypeElementFromText( + getTypeElementText, + getPsi, + null) myTypeElement = new SofterReference[ScTypeElement](res) res } @@ -84,8 +86,10 @@ class ScTypeAliasStubImpl[ParentPsi <: PsiElement]( return upperTypeElement } if (getUpperBoundElementText == "") return null - val res: ScTypeElement = ScalaPsiElementFactory - .createTypeElementFromText(getUpperBoundElementText, getPsi, null) + val res: ScTypeElement = ScalaPsiElementFactory.createTypeElementFromText( + getUpperBoundElementText, + getPsi, + null) myUpperTypeElement = new SofterReference[ScTypeElement](res) res } @@ -99,8 +103,10 @@ class ScTypeAliasStubImpl[ParentPsi <: PsiElement]( return lowerTypeElement } if (getLowerBoundElementText == "") return null - val res: ScTypeElement = ScalaPsiElementFactory - .createTypeElementFromText(getLowerBoundElementText, getPsi, null) + val res: ScTypeElement = ScalaPsiElementFactory.createTypeElementFromText( + getLowerBoundElementText, + getPsi, + null) myLowerTypeElement = new SofterReference[ScTypeElement](res) res } diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/types/Compatibility.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/types/Compatibility.scala index a7ba6ba3c79..f5d305019f2 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/types/Compatibility.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/types/Compatibility.scala @@ -361,8 +361,9 @@ object Compatibility { } else { matched ::= (param, expr) matchedTypes ::= (param, exprType) - undefSubst += Conformance - .undefinedSubst(paramType, exprType, checkWeak = true) + undefSubst += Conformance.undefinedSubst(paramType, + exprType, + checkWeak = true) } } case _ => diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/types/Conformance.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/types/Conformance.scala index b0727b62719..70b924358d7 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/types/Conformance.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/types/Conformance.scala @@ -119,15 +119,23 @@ object Conformance { case _ => argsPair match { case (u: ScUndefinedType, rt) => - undefinedSubst = undefinedSubst - .addLower((u.tpt.name, u.tpt.getId), rt, variance = 0) - undefinedSubst = undefinedSubst - .addUpper((u.tpt.name, u.tpt.getId), rt, variance = 0) + undefinedSubst = undefinedSubst.addLower( + (u.tpt.name, u.tpt.getId), + rt, + variance = 0) + undefinedSubst = undefinedSubst.addUpper( + (u.tpt.name, u.tpt.getId), + rt, + variance = 0) case (lt, u: ScUndefinedType) => - undefinedSubst = undefinedSubst - .addLower((u.tpt.name, u.tpt.getId), lt, variance = 0) - undefinedSubst = undefinedSubst - .addUpper((u.tpt.name, u.tpt.getId), lt, variance = 0) + undefinedSubst = undefinedSubst.addLower( + (u.tpt.name, u.tpt.getId), + lt, + variance = 0) + undefinedSubst = undefinedSubst.addUpper( + (u.tpt.name, u.tpt.getId), + lt, + variance = 0) case (ScAbstractType(tpt, lower, upper), r) => val (right, alternateRight) = if (tpt.args.length > 0 && @@ -864,15 +872,23 @@ object Conformance { undefinedSubst = t._2 } case (u: ScUndefinedType, rt) => - undefinedSubst = undefinedSubst - .addLower((u.tpt.name, u.tpt.getId), rt, variance = 0) - undefinedSubst = undefinedSubst - .addUpper((u.tpt.name, u.tpt.getId), rt, variance = 0) + undefinedSubst = undefinedSubst.addLower( + (u.tpt.name, u.tpt.getId), + rt, + variance = 0) + undefinedSubst = undefinedSubst.addUpper( + (u.tpt.name, u.tpt.getId), + rt, + variance = 0) case (lt, u: ScUndefinedType) => - undefinedSubst = undefinedSubst - .addLower((u.tpt.name, u.tpt.getId), lt, variance = 0) - undefinedSubst = undefinedSubst - .addUpper((u.tpt.name, u.tpt.getId), lt, variance = 0) + undefinedSubst = undefinedSubst.addLower( + (u.tpt.name, u.tpt.getId), + lt, + variance = 0) + undefinedSubst = undefinedSubst.addUpper( + (u.tpt.name, u.tpt.getId), + lt, + variance = 0) case (tp, _) if tp.isAliasType != None && tp.isAliasType.get.ta.isExistentialTypeAlias => @@ -969,15 +985,23 @@ object Conformance { undefinedSubst = t._2 } case (u: ScUndefinedType, rt) => - undefinedSubst = undefinedSubst - .addLower((u.tpt.name, u.tpt.getId), rt, variance = 0) - undefinedSubst = undefinedSubst - .addUpper((u.tpt.name, u.tpt.getId), rt, variance = 0) + undefinedSubst = undefinedSubst.addLower( + (u.tpt.name, u.tpt.getId), + rt, + variance = 0) + undefinedSubst = undefinedSubst.addUpper( + (u.tpt.name, u.tpt.getId), + rt, + variance = 0) case (lt, u: ScUndefinedType) => - undefinedSubst = undefinedSubst - .addLower((u.tpt.name, u.tpt.getId), lt, variance = 0) - undefinedSubst = undefinedSubst - .addUpper((u.tpt.name, u.tpt.getId), lt, variance = 0) + undefinedSubst = undefinedSubst.addLower( + (u.tpt.name, u.tpt.getId), + lt, + variance = 0) + undefinedSubst = undefinedSubst.addUpper( + (u.tpt.name, u.tpt.getId), + lt, + variance = 0) case (tp, _) if tp.isAliasType != None && tp.isAliasType.get.ta.isExistentialTypeAlias => @@ -1250,15 +1274,23 @@ object Conformance { undefinedSubst = t._2 } case (u: ScUndefinedType, rt) => - undefinedSubst = undefinedSubst - .addLower((u.tpt.name, u.tpt.getId), rt, variance = 0) - undefinedSubst = undefinedSubst - .addUpper((u.tpt.name, u.tpt.getId), rt, variance = 0) + undefinedSubst = undefinedSubst.addLower( + (u.tpt.name, u.tpt.getId), + rt, + variance = 0) + undefinedSubst = undefinedSubst.addUpper( + (u.tpt.name, u.tpt.getId), + rt, + variance = 0) case (lt, u: ScUndefinedType) => - undefinedSubst = undefinedSubst - .addLower((u.tpt.name, u.tpt.getId), lt, variance = 0) - undefinedSubst = undefinedSubst - .addUpper((u.tpt.name, u.tpt.getId), lt, variance = 0) + undefinedSubst = undefinedSubst.addLower( + (u.tpt.name, u.tpt.getId), + lt, + variance = 0) + undefinedSubst = undefinedSubst.addUpper( + (u.tpt.name, u.tpt.getId), + lt, + variance = 0) case (tp, _) if tp.isAliasType.isDefined && tp.isAliasType.get.ta.isExistentialTypeAlias => @@ -1340,8 +1372,9 @@ object Conformance { return } } - undefinedSubst = undefinedSubst - .addUpper((owner2.tpt.name, owner2.tpt.getId), anotherType) + undefinedSubst = undefinedSubst.addUpper( + (owner2.tpt.name, owner2.tpt.getId), + anotherType) result = checkParameterizedType( owner2.tpt.args.map(_.param).iterator, args1replace, @@ -1378,8 +1411,9 @@ object Conformance { return } } - undefinedSubst = undefinedSubst - .addLower((owner1.tpt.name, owner1.tpt.getId), anotherType) + undefinedSubst = undefinedSubst.addLower( + (owner1.tpt.name, owner1.tpt.getId), + anotherType) result = checkParameterizedType( owner1.tpt.args.map(_.param).iterator, args1, @@ -1416,8 +1450,9 @@ object Conformance { return } } - undefinedSubst = undefinedSubst - .addUpper((owner2.tpt.name, owner2.tpt.getId), anotherType) + undefinedSubst = undefinedSubst.addUpper( + (owner2.tpt.name, owner2.tpt.getId), + anotherType) result = checkParameterizedType( owner2.tpt.args.map(_.param).iterator, args1, diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/types/ScExistentialType.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/types/ScExistentialType.scala index a2a0b1dec4e..3664603126a 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/types/ScExistentialType.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/types/ScExistentialType.scala @@ -142,8 +142,9 @@ case class ScExistentialType(quantified: ScType, case (_, _, newData) => try { ScExistentialType( - quantified - .recursiveVarianceUpdateModifiable(newData, update, variance), + quantified.recursiveVarianceUpdateModifiable(newData, + update, + variance), wildcards.map( _.recursiveVarianceUpdateModifiable(newData, update, variance))) } catch { @@ -213,8 +214,10 @@ case class ScExistentialType(quantified: ScType, case ex: ScExistentialType => val simplified = ex.simplify() if (ex != simplified) - return Equivalence - .equivInner(this, simplified, undefinedSubst, falseUndef) + return Equivalence.equivInner(this, + simplified, + undefinedSubst, + falseUndef) val list = wildcards.zip(ex.wildcards) val iterator = list.iterator while (iterator.hasNext) { @@ -223,8 +226,7 @@ case class ScExistentialType(quantified: ScType, if (!t._1) return (false, undefinedSubst) undefinedSubst = t._2 } - Equivalence - .equivInner(skolem, ex.skolem, undefinedSubst, falseUndef) //todo: probable problems with different positions of skolemized types. + Equivalence.equivInner(skolem, ex.skolem, undefinedSubst, falseUndef) //todo: probable problems with different positions of skolemized types. case _ => (false, undefinedSubst) } } diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/types/ScFunctionType.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/types/ScFunctionType.scala index ecc01bd0c72..abc8dda82c0 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/types/ScFunctionType.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/types/ScFunctionType.scala @@ -72,8 +72,7 @@ object ScPartialFunctionType { } def unapply(tp: ScType): Option[(ScType, ScType)] = { - ScSynteticSugarClassesUtil - .extractForPrefix(tp, "scala.PartialFunction") match { + ScSynteticSugarClassesUtil.extractForPrefix(tp, "scala.PartialFunction") match { case Some((clazz, typeArgs)) if typeArgs.length == 2 => Some(typeArgs(1), typeArgs(0)) case _ => None diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/types/ScParameterizedType.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/types/ScParameterizedType.scala index ae1480f0d69..3cec4614823 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/types/ScParameterizedType.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/types/ScParameterizedType.scala @@ -230,8 +230,9 @@ class ScParameterizedType private (val designator: ScType, case _ => Seq.empty } ScParameterizedType( - designator - .recursiveVarianceUpdateModifiable(newData, update, variance), + designator.recursiveVarianceUpdateModifiable(newData, + update, + variance), typeArgs.zipWithIndex.map { case (ta, i) => val v = if (i < des.length) des(i) else 0 @@ -284,8 +285,10 @@ class ScParameterizedType private (val designator: ScType, } case (ScParameterizedType(_, _), ScParameterizedType(designator1, typeArgs1)) => - var t = Equivalence - .equivInner(designator, designator1, undefinedSubst, falseUndef) + var t = Equivalence.equivInner(designator, + designator1, + undefinedSubst, + falseUndef) if (!t._1) return (false, undefinedSubst) undefinedSubst = t._2 if (typeArgs.length != typeArgs1.length) return (false, undefinedSubst) diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/types/ScProjectionType.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/types/ScProjectionType.scala index bdf1777cc65..1104d37b9a0 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/types/ScProjectionType.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/types/ScProjectionType.scala @@ -348,8 +348,10 @@ class ScProjectionType private ( new ScSubstitutor(Map.empty, Map.empty, Some(projected)) followed actualSubst t.getType(TypingContext.empty) match { case Success(tp, _) if ScType.isSingletonType(tp) => - return Equivalence - .equivInner(s.subst(tp), r, uSubst, falseUndef) + return Equivalence.equivInner(s.subst(tp), + r, + uSubst, + falseUndef) case _ => } case _ => @@ -361,8 +363,10 @@ class ScProjectionType private ( new ScSubstitutor(Map.empty, Map.empty, Some(p1)) followed proj2.actualSubst t.getType(TypingContext.empty) match { case Success(tp, _) if ScType.isSingletonType(tp) => - return Equivalence - .equivInner(s.subst(tp), this, uSubst, falseUndef) + return Equivalence.equivInner(s.subst(tp), + this, + uSubst, + falseUndef) case _ => } case _ => @@ -378,8 +382,10 @@ class ScProjectionType private ( case Success(singl, _) if ScType.isSingletonType(singl) => val newSubst = actualSubst.followed( new ScSubstitutor(Map.empty, Map.empty, Some(projected))) - Equivalence - .equivInner(r, newSubst.subst(singl), uSubst, falseUndef) + Equivalence.equivInner(r, + newSubst.subst(singl), + uSubst, + falseUndef) case _ => (false, uSubst) } case _ => (false, uSubst) @@ -453,8 +459,10 @@ case class ScThisType(clazz: ScTemplateDefinition) extends ValueType { case Success(singl, _) if ScType.isSingletonType(singl) => val newSubst = p.actualSubst.followed( new ScSubstitutor(Map.empty, Map.empty, Some(tp))) - Equivalence - .equivInner(this, newSubst.subst(singl), uSubst, falseUndef) + Equivalence.equivInner(this, + newSubst.subst(singl), + uSubst, + falseUndef) case _ => (false, uSubst) } case _ => (false, uSubst) diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/types/ScTypePresentation.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/types/ScTypePresentation.scala index 23b28837e59..08e7712e2e2 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/types/ScTypePresentation.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/types/ScTypePresentation.scala @@ -339,8 +339,9 @@ trait ScTypePresentation { case ScExistentialType(q, wilds) => val wildsWithBounds = wilds.map(w => existentialArgWithBounds(w, "type " + w.name)) - wildsWithBounds - .mkString(s"(${innerTypeText(q)}) forSome {", "; ", "}") + wildsWithBounds.mkString(s"(${innerTypeText(q)}) forSome {", + "; ", + "}") } } diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/types/ScTypePsiTypeBridge.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/types/ScTypePsiTypeBridge.scala index 60600e947b7..61e68c733da 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/types/ScTypePsiTypeBridge.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/types/ScTypePsiTypeBridge.scala @@ -161,8 +161,10 @@ trait ScTypePsiTypeBridge { visitedRawTypes) else types.Any) case _ if psiType != null => - ScType - .create(psiType, project, scope, visitedRawTypes) + ScType.create(psiType, + project, + scope, + visitedRawTypes) case _ => ScalaPsiManager.typeVariable(tp) } }) diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/types/ScUndefinedType.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/types/ScUndefinedType.scala index ece1776ca41..7f9ee14251d 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/types/ScUndefinedType.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/types/ScUndefinedType.scala @@ -152,8 +152,9 @@ case class ScAbstractType(tpt: ScTypeParameterType, tpt .recursiveVarianceUpdateModifiable(newData, update, variance) .asInstanceOf[ScTypeParameterType], - lower - .recursiveVarianceUpdateModifiable(newData, update, -variance), + lower.recursiveVarianceUpdateModifiable(newData, + update, + -variance), upper.recursiveVarianceUpdateModifiable(newData, update, variance)) } catch { case cce: ClassCastException => throw new RecursiveUpdateException diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/types/nonvalue/ScMethodType.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/types/nonvalue/ScMethodType.scala index 9aff1760bfd..613c87b95b7 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/types/nonvalue/ScMethodType.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/psi/types/nonvalue/ScMethodType.scala @@ -261,8 +261,10 @@ case class ScMethodType( r match { case m: ScMethodType => if (m.params.length != params.length) return (false, undefinedSubst) - var t = Equivalence - .equivInner(m.returnType, returnType, undefinedSubst, falseUndef) + var t = Equivalence.equivInner(m.returnType, + returnType, + undefinedSubst, + falseUndef) if (!t._1) return (false, undefinedSubst) undefinedSubst = t._2 var i = 0 @@ -466,8 +468,9 @@ case class ScTypePolymorphicType(internalType: ScType, case (true, res, _) => res case (_, _, newData) => ScTypePolymorphicType( - internalType - .recursiveVarianceUpdateModifiable(newData, update, variance), + internalType.recursiveVarianceUpdateModifiable(newData, + update, + variance), typeParameters.map(tp => { TypeParameter(tp.name, tp.typeParams /* todo: ? */, diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/rearranger/ScalaArrangementVisitor.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/rearranger/ScalaArrangementVisitor.scala index c8af1c52f40..922d3f3ea55 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/rearranger/ScalaArrangementVisitor.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/rearranger/ScalaArrangementVisitor.scala @@ -276,8 +276,9 @@ class ScalaArrangementVisitor(parseInfo: ScalaArrangementParseInfo, entry: ScalaArrangementEntry) { genUnseparableRanges(psiRoot, entry) val top = arrangementEntries.top - val queue = unseparableRanges - .getOrElse(entry, mutable.Queue[ScalaArrangementEntry]()) + val queue = unseparableRanges.getOrElse( + entry, + mutable.Queue[ScalaArrangementEntry]()) // var unseparable = def next() = if (queue.isEmpty) null else queue.dequeue() psiRoot.getChildren.foldLeft( diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/rearranger/package.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/rearranger/package.scala index 949e0ee5c1b..749a51f4b4f 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/rearranger/package.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/rearranger/package.scala @@ -60,24 +60,34 @@ package object rearranger { //other modifiers val SEALED_ID = "SCALA_SEALED" val SEALED_UI = "sealed" - val SEALED: ArrangementSettingsToken = StdArrangementSettingsToken - .token(SEALED_ID, SEALED_UI, StdArrangementTokenType.MODIFIER) + val SEALED: ArrangementSettingsToken = StdArrangementSettingsToken.token( + SEALED_ID, + SEALED_UI, + StdArrangementTokenType.MODIFIER) val IMPLICIT_ID = "SCALA_IMPLICIT" val IMPLICIT_UI = "implicit" - val IMPLICIT: ArrangementSettingsToken = StdArrangementSettingsToken - .token(IMPLICIT_ID, IMPLICIT_UI, StdArrangementTokenType.MODIFIER) + val IMPLICIT: ArrangementSettingsToken = StdArrangementSettingsToken.token( + IMPLICIT_ID, + IMPLICIT_UI, + StdArrangementTokenType.MODIFIER) val CASE_ID = "SCALA_CASE" val CASE_UI = "case" - val CASE: ArrangementSettingsToken = StdArrangementSettingsToken - .token(CASE_ID, CASE_UI, StdArrangementTokenType.MODIFIER) + val CASE: ArrangementSettingsToken = StdArrangementSettingsToken.token( + CASE_ID, + CASE_UI, + StdArrangementTokenType.MODIFIER) val OVERRIDE_ID = "SCALA_OVERRIDE" val OVERRIDE_UI = "override" - val OVERRIDE: ArrangementSettingsToken = StdArrangementSettingsToken - .token(OVERRIDE_ID, OVERRIDE_UI, StdArrangementTokenType.MODIFIER) + val OVERRIDE: ArrangementSettingsToken = StdArrangementSettingsToken.token( + OVERRIDE_ID, + OVERRIDE_UI, + StdArrangementTokenType.MODIFIER) val LAZY_ID = "SCALA_LAZY" val LAZY_UI = "lazy" - val LAZY: ArrangementSettingsToken = StdArrangementSettingsToken - .token(LAZY_ID, LAZY_UI, StdArrangementTokenType.MODIFIER) + val LAZY: ArrangementSettingsToken = StdArrangementSettingsToken.token( + LAZY_ID, + LAZY_UI, + StdArrangementTokenType.MODIFIER) val scalaOtherModifiersByName = immutable.ListMap(SEALED_UI -> SEALED, IMPLICIT_UI -> IMPLICIT, "abstract" -> ABSTRACT, @@ -95,29 +105,39 @@ package object rearranger { //types val TYPE_ID = "SCALA_TYPE" val TYPE_UI = "type" - val TYPE: ArrangementSettingsToken = StdArrangementSettingsToken - .token(TYPE_ID, TYPE_UI, StdArrangementTokenType.ENTRY_TYPE) + val TYPE: ArrangementSettingsToken = StdArrangementSettingsToken.token( + TYPE_ID, + TYPE_UI, + StdArrangementTokenType.ENTRY_TYPE) val FUNCTION_ID = "SCALA_FUNCTION" val FUNCTION_UI = "function" - val FUNCTION: ArrangementSettingsToken = StdArrangementSettingsToken - .token(FUNCTION_ID, FUNCTION_UI, StdArrangementTokenType.ENTRY_TYPE) + val FUNCTION: ArrangementSettingsToken = StdArrangementSettingsToken.token( + FUNCTION_ID, + FUNCTION_UI, + StdArrangementTokenType.ENTRY_TYPE) val VAL_ID = "SCALA_VAL" val VAL_UI = "val" - val VAL: ArrangementSettingsToken = StdArrangementSettingsToken - .token(VAL_ID, VAL_UI, StdArrangementTokenType.ENTRY_TYPE) + val VAL: ArrangementSettingsToken = StdArrangementSettingsToken.token( + VAL_ID, + VAL_UI, + StdArrangementTokenType.ENTRY_TYPE) val MACRO_ID = "SCALA_MACRO" val MACRO_UI = "macro" - val MACRO: ArrangementSettingsToken = StdArrangementSettingsToken - .token(MACRO_ID, MACRO_UI, StdArrangementTokenType.ENTRY_TYPE) + val MACRO: ArrangementSettingsToken = StdArrangementSettingsToken.token( + MACRO_ID, + MACRO_UI, + StdArrangementTokenType.ENTRY_TYPE) val OBJECT_ID = "SCALA_OBJECT" val OBJECT_UI = "object" - val OBJECT: ArrangementSettingsToken = StdArrangementSettingsToken - .token(OBJECT_ID, OBJECT_UI, StdArrangementTokenType.ENTRY_TYPE) + val OBJECT: ArrangementSettingsToken = StdArrangementSettingsToken.token( + OBJECT_ID, + OBJECT_UI, + StdArrangementTokenType.ENTRY_TYPE) //this is a special token that is not used in arrangement GUI and always has canBeMatched = false val UNSEPARABLE_RANGE_ID = "SCALA_UNSEPARABLE_RANGE" val UNSEPARABLE_RANGE: ArrangementSettingsToken = - StdArrangementSettingsToken - .tokenById(UNSEPARABLE_RANGE_ID, StdArrangementTokenType.ENTRY_TYPE) + StdArrangementSettingsToken.tokenById(UNSEPARABLE_RANGE_ID, + StdArrangementTokenType.ENTRY_TYPE) //maps and sets of tokens val scalaTypesValues = immutable.HashSet(TYPE, diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/refactoring/changeSignature/ScalaChangeSignatureDialog.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/refactoring/changeSignature/ScalaChangeSignatureDialog.scala index e7028fe5ed9..9a71b60c4c8 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/refactoring/changeSignature/ScalaChangeSignatureDialog.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/refactoring/changeSignature/ScalaChangeSignatureDialog.scala @@ -158,8 +158,9 @@ class ScalaChangeSignatureDialog(val project: Project, val ed: TableCellEditor = parametersTable.getCellEditor if (ed != null) { val editorValue: AnyRef = ed.getCellEditorValue - myParametersTableModel - .setValueAtWithoutUpdate(editorValue, row, column) + myParametersTableModel.setValueAtWithoutUpdate(editorValue, + row, + column) updateSignature() } } @@ -348,8 +349,9 @@ class ScalaChangeSignatureDialog(val project: Project, if (myReturnTypeCodeFragment == null) StdType.ANY else { val fragment = myReturnTypeCodeFragment - ScalaPsiElementFactory - .createTypeFromText(fragment.getText, fragment.getContext, fragment) + ScalaPsiElementFactory.createTypeFromText(fragment.getText, + fragment.getContext, + fragment) } } diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/refactoring/changeSignature/ScalaChangeSignatureHandler.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/refactoring/changeSignature/ScalaChangeSignatureHandler.scala index 47b7a5a5644..1fa6d4f4052 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/refactoring/changeSignature/ScalaChangeSignatureHandler.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/refactoring/changeSignature/ScalaChangeSignatureHandler.scala @@ -47,8 +47,11 @@ class ScalaChangeSignatureHandler extends ChangeSignatureHandler { element: PsiElement): Unit = { def showErrorHint(message: String) = { val name = ChangeSignatureHandler.REFACTORING_NAME - CommonRefactoringUtil - .showErrorHint(project, editor, message, name, HelpID.CHANGE_SIGNATURE) + CommonRefactoringUtil.showErrorHint(project, + editor, + message, + name, + HelpID.CHANGE_SIGNATURE) } def isSupportedFor(fun: ScMethodLike): Boolean = { fun match { @@ -91,8 +94,9 @@ class ScalaChangeSignatureHandler extends ChangeSignatureHandler { case _ => } - val newMethod = SuperMethodWarningUtil - .checkSuperMethod(method, RefactoringBundle.message("to.refactor")) + val newMethod = SuperMethodWarningUtil.checkSuperMethod( + method, + RefactoringBundle.message("to.refactor")) unwrapMethod(newMethod) match { case Some(fun: ScMethodLike) => if (isSupportedFor(fun)) invokeWithDialog(project, fun) diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/refactoring/changeSignature/ScalaChangeSignatureUsageHandler.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/refactoring/changeSignature/ScalaChangeSignatureUsageHandler.scala index bb14c3d7f3f..13e28bb6f41 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/refactoring/changeSignature/ScalaChangeSignatureUsageHandler.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/refactoring/changeSignature/ScalaChangeSignatureUsageHandler.scala @@ -82,8 +82,9 @@ private[changeSignature] trait ScalaChangeSignatureUsageHandler { case Some(result) => result case None => return } - val newTypeElem = ScalaPsiElementFactory - .createTypeElementFromText(substType.canonicalText, element.getManager) + val newTypeElem = ScalaPsiElementFactory.createTypeElementFromText( + substType.canonicalText, + element.getManager) val oldTypeElem = element match { case fun: ScFunction => fun.returnTypeElement @@ -106,8 +107,9 @@ private[changeSignature] trait ScalaChangeSignatureUsageHandler { case cp: ScClassParameter => (cp.getParent, cp) case ctx => (ctx, ctx.getLastChild) } - AddOnlyStrategy.withoutEditor - .addTypeAnnotation(substType, context, anchor) + AddOnlyStrategy.withoutEditor.addTypeAnnotation(substType, + context, + anchor) } } @@ -145,8 +147,9 @@ private[changeSignature] trait ScalaChangeSignatureUsageHandler { NameSuggester.suggestNamesByType(param.paramType)(0) } paramsBuf = paramsBuf :+ paramName - val text = ScalaPsiElementFactory - .createExpressionFromText(paramName, arg.getManager) + val text = ScalaPsiElementFactory.createExpressionFromText( + paramName, + arg.getManager) arg.replaceExpression(text, removeParenthesis = true) } (paramsBuf, inv.getText) @@ -166,8 +169,9 @@ private[changeSignature] trait ScalaChangeSignatureUsageHandler { } else names val clause = params.mkString("(", ", ", ")") val newFunExprText = s"$clause => $exprText" - val funExpr = ScalaPsiElementFactory - .createExpressionFromText(newFunExprText, usage.expr.getManager) + val funExpr = ScalaPsiElementFactory.createExpressionFromText( + newFunExprText, + usage.expr.getManager) val replaced = usage.expr .replaceExpression(funExpr, removeParenthesis = true) .asInstanceOf[ScFunctionExpr] @@ -213,11 +217,12 @@ private[changeSignature] trait ScalaChangeSignatureUsageHandler { val nameId = named.nameId val newClauses = named match { case cl: ScClass => - ScalaPsiElementFactory - .createClassParamClausesWithContext(paramsText, cl) + ScalaPsiElementFactory.createClassParamClausesWithContext(paramsText, + cl) case _ => - ScalaPsiElementFactory - .createParamClausesWithContext(paramsText, named, nameId) + ScalaPsiElementFactory.createParamClausesWithContext(paramsText, + named, + nameId) } val result = usage.paramClauses match { case Some(p) => p.replace(newClauses) @@ -248,8 +253,10 @@ private[changeSignature] trait ScalaChangeSignatureUsageHandler { case t: ScTuple if !hasSeveralClauses(change) => val tupleText = argsText(change, usage) val newTuple = - ScalaPsiElementFactory - .createExpressionWithContextFromText(tupleText, infix, t) + ScalaPsiElementFactory.createExpressionWithContextFromText( + tupleText, + infix, + t) t.replaceExpression(newTuple, removeParenthesis = false) case _ => val qualText = infix.getBaseExpr.getText @@ -268,8 +275,10 @@ private[changeSignature] trait ScalaChangeSignatureUsageHandler { case Some(Seq(text)) => text case _ => "()" } - val expr = ScalaPsiElementFactory - .createExpressionWithContextFromText(argText, infix, infix.getArgExpr) + val expr = ScalaPsiElementFactory.createExpressionWithContextFromText( + argText, + infix, + infix.getArgExpr) infix.getArgExpr.replaceExpression(expr, removeParenthesis = true) } } @@ -279,8 +288,10 @@ private[changeSignature] trait ScalaChangeSignatureUsageHandler { val constr = usage.constr val typeElem = constr.typeElement val text = typeElem.getText + argsText(change, usage) - val newConstr = ScalaPsiElementFactory - .createConstructorFromText(text, constr.getContext, constr) + val newConstr = ScalaPsiElementFactory.createConstructorFromText( + text, + constr.getContext, + constr) constr.replace(newConstr) } @@ -291,8 +302,10 @@ private[changeSignature] trait ScalaChangeSignatureUsageHandler { val ref = usage.refExpr val text = ref.getText + argsText(change, usage) - val call = ScalaPsiElementFactory - .createExpressionWithContextFromText(text, ref.getContext, ref) + val call = ScalaPsiElementFactory.createExpressionWithContextFromText( + text, + ref.getContext, + ref) ref.replaceExpression(call, removeParenthesis = true) } @@ -303,8 +316,10 @@ private[changeSignature] trait ScalaChangeSignatureUsageHandler { val postfix = usage.postfix val qualRef = ScalaPsiElementFactory.createEquivQualifiedReference(postfix) val text = qualRef.getText + argsText(change, usage) - val call = ScalaPsiElementFactory - .createExpressionWithContextFromText(text, postfix.getContext, postfix) + val call = ScalaPsiElementFactory.createExpressionWithContextFromText( + text, + postfix.getContext, + postfix) postfix.replaceExpression(call, removeParenthesis = true) } @@ -313,8 +328,10 @@ private[changeSignature] trait ScalaChangeSignatureUsageHandler { usage: MethodCallUsageInfo): Unit = { val call = usage.call val newText = usage.ref.getText + argsText(change, usage) - val newCall = ScalaPsiElementFactory - .createExpressionWithContextFromText(newText, call.getContext, call) + val newCall = ScalaPsiElementFactory.createExpressionWithContextFromText( + newText, + call.getContext, + call) call.replace(newCall) } @@ -521,8 +538,10 @@ private[changeSignature] trait ScalaChangeSignatureUsageHandler { } def paramText(p: ParameterInfo) = { - val typedName = ScalaExtractMethodUtils - .typedName(newParamName(p), paramType(p), project, byName = false) + val typedName = ScalaExtractMethodUtils.typedName(newParamName(p), + paramType(p), + project, + byName = false) val default = scalaDefaultValue(p).fold("")(" = " + _) val keywordsAndAnnots = p match { case spi: ScalaParameterInfo => spi.keywordsAndAnnotations diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/refactoring/changeSignature/ScalaChangeSignatureUsageProcessor.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/refactoring/changeSignature/ScalaChangeSignatureUsageProcessor.scala index 9d83ae1c957..5c572f82f92 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/refactoring/changeSignature/ScalaChangeSignatureUsageProcessor.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/refactoring/changeSignature/ScalaChangeSignatureUsageProcessor.scala @@ -148,8 +148,10 @@ class ScalaChangeSignatureUsageProcessor element.delete() case _: ScVariableDeclaration | _: ScValueDeclaration => val newElement = - ScalaPsiElementFactory - .createDeclarationFromText(text, element.getContext, element) + ScalaPsiElementFactory.createDeclarationFromText( + text, + element.getContext, + element) element.getParent.addAfter(newElement, element) element.delete() case _ => @@ -230,8 +232,9 @@ class ScalaChangeSignatureUsageProcessor case ScalaNamedElementUsageInfo(u: OverriderValUsageInfo) => ConflictsUtil.addBindingPatternConflicts(u.namedElement, info, result) case javaOverriderUsage: OverriderUsageInfo => - ConflictsUtil - .addJavaOverriderConflicts(javaOverriderUsage, info, result) + ConflictsUtil.addJavaOverriderConflicts(javaOverriderUsage, + info, + result) case p: PatternUsageInfo => ConflictsUtil.addUnapplyUsagesConflicts(p, info, result) case _ => diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/refactoring/changeSignature/ScalaParameterInfo.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/refactoring/changeSignature/ScalaParameterInfo.scala index 2a1e3d78025..ccf3952676c 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/refactoring/changeSignature/ScalaParameterInfo.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/refactoring/changeSignature/ScalaParameterInfo.scala @@ -63,10 +63,12 @@ class ScalaParameterInfo(@BeanProperty var name: String, val functionType = ScFunctionType(scType, Seq())(project, allScope) ScType.toPsi(functionType, project, allScope) } else if (isRepeatedParameter) { - val seqType = ScDesignatorType - .fromClassFqn("scala.collection.Seq", project, allScope) - ScType - .toPsi(ScParameterizedType(seqType, Seq(scType)), project, allScope) + val seqType = ScDesignatorType.fromClassFqn("scala.collection.Seq", + project, + allScope) + ScType.toPsi(ScParameterizedType(seqType, Seq(scType)), + project, + allScope) } else ScType.toPsi(scType, project, allScope) } diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/refactoring/changeSignature/ScalaParameterTableModel.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/refactoring/changeSignature/ScalaParameterTableModel.scala index d1e1fc9ea99..e9c2bb2f572 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/refactoring/changeSignature/ScalaParameterTableModel.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/refactoring/changeSignature/ScalaParameterTableModel.scala @@ -68,8 +68,8 @@ class ScalaParameterTableModel(typeContext: PsiElement, .forceRootHighlighting(_, FileHighlightingSetting.SKIP_HIGHLIGHTING)) paramTypeCodeFragment.setContext(typeContext.getParent, typeContext) - defaultValueCodeFragment - .setContext(defaultValueContext.getParent, defaultValueContext) + defaultValueCodeFragment.setContext(defaultValueContext.getParent, + defaultValueContext) defaultValueCodeFragment.setVisibilityChecker( JavaCodeFragment.VisibilityChecker.EVERYTHING_VISIBLE) diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/refactoring/changeSignature/ScalaParameterTableModelItem.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/refactoring/changeSignature/ScalaParameterTableModelItem.scala index c7dc3f5d067..36cc2d18d70 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/refactoring/changeSignature/ScalaParameterTableModelItem.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/refactoring/changeSignature/ScalaParameterTableModelItem.scala @@ -40,8 +40,9 @@ class ScalaParameterTableModelItem(parameter: ScalaParameterInfo, } if (typeText.isEmpty) { - problems += RefactoringBundle - .message("changeSignature.no.type.for.parameter", parameter.getName) + problems += RefactoringBundle.message( + "changeSignature.no.type.for.parameter", + parameter.getName) return } diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/refactoring/delete/SafeDeleteProcessorUtil.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/refactoring/delete/SafeDeleteProcessorUtil.scala index 63610cf53e0..385e7823445 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/refactoring/delete/SafeDeleteProcessorUtil.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/refactoring/delete/SafeDeleteProcessorUtil.scala @@ -243,8 +243,8 @@ object SafeDeleteProcessorUtil { !constructorsToRefs.containsKey(overridingConstructor)) { val overridingConstructorReferences: util.Collection[PsiReference] = referenceSearch(overridingConstructor).findAll - constructorsToRefs - .put(overridingConstructor, overridingConstructorReferences) + constructorsToRefs.put(overridingConstructor, + overridingConstructorReferences) passConstructors.add(overridingConstructor) } } diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/refactoring/extractMethod/InnerClassSettings.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/refactoring/extractMethod/InnerClassSettings.scala index 78198992a3e..aa535b1e5ab 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/refactoring/extractMethod/InnerClassSettings.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/refactoring/extractMethod/InnerClassSettings.scala @@ -14,8 +14,10 @@ case class InnerClassSettings(needClass: Boolean, val tp = output.returnType val typeText = if (canonTextForTypes) tp.canonicalText else tp.presentableText - val typed = ScalaExtractMethodUtils - .typedName(output.paramName, typeText, output.fromElement.getProject) + val typed = ScalaExtractMethodUtils.typedName( + output.paramName, + typeText, + output.fromElement.getProject) if (isCase) typed else s"val $typed" } val paramsText = outputs.map(paramText).mkString("(", ", ", ")") diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/refactoring/extractMethod/ScalaExtractMethodHandler.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/refactoring/extractMethod/ScalaExtractMethodHandler.scala index 296b02e4ed5..2915ea01a59 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/refactoring/extractMethod/ScalaExtractMethodHandler.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/refactoring/extractMethod/ScalaExtractMethodHandler.scala @@ -96,8 +96,11 @@ class ScalaExtractMethodHandler extends RefactoringActionHandler { val elements: Seq[PsiElement] = ScalaRefactoringUtil.selectedElements(editor, file, trimComments = false) - val hasWarnings = ScalaRefactoringUtil - .showNotPossibleWarnings(elements, project, editor, REFACTORING_NAME) + val hasWarnings = ScalaRefactoringUtil.showNotPossibleWarnings( + elements, + project, + editor, + REFACTORING_NAME) if (hasWarnings) return def checkLastReturn(elem: PsiElement): Boolean = { @@ -115,8 +118,8 @@ class ScalaExtractMethodHandler extends RefactoringActionHandler { } def returnType: Option[ScType] = { - val fun = PsiTreeUtil - .getParentOfType(elements.head, classOf[ScFunctionDefinition]) + val fun = PsiTreeUtil.getParentOfType(elements.head, + classOf[ScFunctionDefinition]) if (fun == null) return None var result: Option[ScType] = None val visitor = new ScalaRecursiveElementVisitor { @@ -357,8 +360,7 @@ class ScalaExtractMethodHandler extends RefactoringActionHandler { def local(text: String) = ScalaBundle.message("extract.local.method", text) element.getParent match { case tbody: ScTemplateBody => - PsiTreeUtil - .getParentOfType(tbody, classOf[ScTemplateDefinition]) match { + PsiTreeUtil.getParentOfType(tbody, classOf[ScTemplateDefinition]) match { case o: ScObject => s"Extract method to object ${o.name}" case c: ScClass => s"Extract method to class ${c.name}" case t: ScTrait => s"Extract method to trait ${t.name}" diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/refactoring/extractMethod/ScalaExtractMethodUtils.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/refactoring/extractMethod/ScalaExtractMethodUtils.scala index bec0cf20794..ef1a5a91f38 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/refactoring/extractMethod/ScalaExtractMethodUtils.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/refactoring/extractMethod/ScalaExtractMethodUtils.scala @@ -153,8 +153,9 @@ object ScalaExtractMethodUtils { ) case None => "" //should not occur } - val retElem = ScalaPsiElementFactory - .createExpressionFromText(s"return $newText", ret.getManager) + val retElem = ScalaPsiElementFactory.createExpressionFromText( + s"return $newText", + ret.getManager) ret.replace(retElem) } } @@ -523,8 +524,12 @@ object ScalaExtractMethodUtils { } val expr = ScalaPsiElementFactory.createExpressionFromText(exprText, manager) - val declaration = ScalaPsiElementFactory - .createDeclaration(pattern, "", isVariable = !isVal, expr, manager) + val declaration = ScalaPsiElementFactory.createDeclaration(pattern, + "", + isVariable = + !isVal, + expr, + manager) val result = elements.head.replace(declaration) TypeAdjuster.markToAdjust(result) result @@ -567,8 +572,9 @@ object ScalaExtractMethodUtils { if (allVals || allVars) { val patternArgsText = outputTypedNames.mkString("(", ", ", ")") val patternText = ics.className + patternArgsText - val expr = ScalaPsiElementFactory - .createExpressionFromText(mFreshName, manager) + val expr = ScalaPsiElementFactory.createExpressionFromText( + mFreshName, + manager) val stmt = ScalaPsiElementFactory.createDeclaration(patternText, "", isVariable = diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/refactoring/extractTrait/ExtractSuperUtil.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/refactoring/extractTrait/ExtractSuperUtil.scala index a340b63643a..8b9d4215b8d 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/refactoring/extractTrait/ExtractSuperUtil.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/refactoring/extractTrait/ExtractSuperUtil.scala @@ -104,15 +104,17 @@ object ExtractSuperUtil { val templParents = oldExtBlock.templateParents match { case Some(tp: ScTemplateParents) => val tpText = s"${tp.getText} with $text" - val (_, newTp) = ScalaPsiElementFactory - .createClassTemplateParents(tpText, clazz.getManager) + val (_, newTp) = ScalaPsiElementFactory.createClassTemplateParents( + tpText, + clazz.getManager) tp.replace(newTp).asInstanceOf[ScTemplateParents] case None => val (extKeyword, newTp) = - ScalaPsiElementFactory - .createClassTemplateParents(text, clazz.getManager) - oldExtBlock - .addRangeBefore(extKeyword, newTp, oldExtBlock.getFirstChild) + ScalaPsiElementFactory.createClassTemplateParents(text, + clazz.getManager) + oldExtBlock.addRangeBefore(extKeyword, + newTp, + oldExtBlock.getFirstChild) oldExtBlock.templateParents.get } templParents.typeElementsWithoutConstructor.foreach { @@ -161,8 +163,9 @@ object ExtractSuperUtil { val dir: PsiDirectory = ExtractSuperUtil.getDirUnderSameSourceRoot(sourceClass, dirs) - val cantCreateFile: String = RefactoringMessageUtil - .checkCanCreateFile(dir, targetClassName + ".scala") + val cantCreateFile: String = RefactoringMessageUtil.checkCanCreateFile( + dir, + targetClassName + ".scala") if (cantCreateFile != null) return cantCreateFile null diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/refactoring/extractTrait/ScalaExtractTraitHandler.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/refactoring/extractTrait/ScalaExtractTraitHandler.scala index 733314ea6c5..fb108413117 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/refactoring/extractTrait/ScalaExtractTraitHandler.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/refactoring/extractTrait/ScalaExtractTraitHandler.scala @@ -58,8 +58,9 @@ class ScalaExtractTraitHandler extends RefactoringActionHandler { case Array(clazz: ScTemplateDefinition) => clazz case _ => val parent = PsiTreeUtil.findCommonParent(elements: _*) - PsiTreeUtil - .getParentOfType(parent, classOf[ScTemplateDefinition], false) + PsiTreeUtil.getParentOfType(parent, + classOf[ScTemplateDefinition], + false) } if (dataContext != null) { @@ -90,8 +91,10 @@ class ScalaExtractTraitHandler extends RefactoringActionHandler { if (messages.nonEmpty) throw new RuntimeException(messages.mkString("\n")) inWriteCommandAction(project, "Extract trait") { val traitText = "trait ExtractedTrait {\n\n}" - val newTrt = ScalaPsiElementFactory - .createTemplateDefinitionFromText(traitText, clazz.getContext, clazz) + val newTrt = ScalaPsiElementFactory.createTemplateDefinitionFromText( + traitText, + clazz.getContext, + clazz) val newTrtAdded = clazz match { case anon: ScNewTemplateDefinition => val tBody = @@ -120,8 +123,9 @@ class ScalaExtractTraitHandler extends RefactoringActionHandler { val extractInfo = new ExtractInfo(clazz, memberInfos) extractInfo.collect() - val isOk = ExtractSuperClassUtil - .showConflicts(dialog, extractInfo.conflicts, clazz.getProject) + val isOk = ExtractSuperClassUtil.showConflicts(dialog, + extractInfo.conflicts, + clazz.getProject) if (!isOk) return val name = dialog.getTraitName @@ -151,8 +155,10 @@ class ScalaExtractTraitHandler extends RefactoringActionHandler { case Some(selfTpe) => val traitText = s"trait ${trt.name} {\n$selfTpe\n}" val dummyTrait = - ScalaPsiElementFactory - .createTemplateDefinitionFromText(traitText, trt.getParent, trt) + ScalaPsiElementFactory.createTemplateDefinitionFromText( + traitText, + trt.getParent, + trt) val selfTypeElem = dummyTrait.extendsBlock.selfTypeElement.get val extendsBlock = trt.extendsBlock val templateBody = extendsBlock.templateBody match { @@ -344,8 +350,9 @@ class ScalaExtractTraitHandler extends RefactoringActionHandler { classesForSelfType.foreach { case cl: PsiClass if cl.getTypeParameters.nonEmpty => - val message = ScalaBundle - .message("type.parameters.for.self.type.not.supported", cl.name) + val message = + ScalaBundle.message("type.parameters.for.self.type.not.supported", + cl.name) conflicts.putValue(cl, message) case _ => } diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/refactoring/inline/ScalaInlineHandler.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/refactoring/inline/ScalaInlineHandler.scala index 5cda8c9dcba..ed2f355770e 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/refactoring/inline/ScalaInlineHandler.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/refactoring/inline/ScalaInlineHandler.scala @@ -80,8 +80,7 @@ class ScalaInlineHandler extends InlineHandler { element match { case rp: ScBindingPattern => - PsiTreeUtil - .getParentOfType(rp, classOf[ScDeclaredElementsHolder]) match { + PsiTreeUtil.getParentOfType(rp, classOf[ScDeclaredElementsHolder]) match { case v @ (_: ScValue | _: ScVariable) if v.declaredElements.length == 1 => removeElementWithNonSignificantSibilings(v) @@ -100,8 +99,7 @@ class ScalaInlineHandler extends InlineHandler { settings: InlineHandler.Settings): InlineHandler.Inliner = { val replacementValue = element match { case rp: ScBindingPattern => - PsiTreeUtil - .getParentOfType(rp, classOf[ScDeclaredElementsHolder]) match { + PsiTreeUtil.getParentOfType(rp, classOf[ScDeclaredElementsHolder]) match { case v @ ScPatternDefinition.expr(e) if v.declaredElements == Seq(element) => e.getText @@ -151,8 +149,10 @@ class ScalaInlineHandler extends InlineHandler { val project = newValue.getProject val manager = FileEditorManager.getInstance(project) val editor = manager.getSelectedTextEditor - occurrenceHighlighters = ScalaRefactoringUtil - .highlightOccurrences(project, Array[PsiElement](newValue), editor) + occurrenceHighlighters = ScalaRefactoringUtil.highlightOccurrences( + project, + Array[PsiElement](newValue), + editor) CodeStyleManager .getInstance(project) .reformatRange( @@ -325,8 +325,10 @@ class ScalaInlineHandler extends InlineHandler { .findAll .asScala .forall { ref => - member.containingClass == null || PsiTreeUtil - .isAncestor(member.containingClass, ref.getElement, true) + member.containingClass == null || PsiTreeUtil.isAncestor( + member.containingClass, + ref.getElement, + true) } case _ => true } diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/refactoring/introduceField/ScalaIntroduceFieldFromExpressionHandler.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/refactoring/introduceField/ScalaIntroduceFieldFromExpressionHandler.scala index 6e4fafabcfa..449410eff61 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/refactoring/introduceField/ScalaIntroduceFieldFromExpressionHandler.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/refactoring/introduceField/ScalaIntroduceFieldFromExpressionHandler.scala @@ -143,8 +143,10 @@ class ScalaIntroduceFieldFromExpressionHandler val manager = aClass.getManager val name = settings.name val typeName = Option(settings.scType).map(_.canonicalText).getOrElse("") - val replacedOccurences = ScalaRefactoringUtil - .replaceOccurences(occurrencesToReplace, name, ifc.file) + val replacedOccurences = ScalaRefactoringUtil.replaceOccurences( + occurrencesToReplace, + name, + ifc.file) val anchor = anchorForNewDeclaration(expression, replacedOccurences, aClass) @@ -238,8 +240,10 @@ class ScalaIntroduceFieldFromExpressionHandler val occCount = ifc.occurrences.length // Add occurrences highlighting if (occCount > 1) - occurrenceHighlighters = ScalaRefactoringUtil - .highlightOccurrences(ifc.project, ifc.occurrences, ifc.editor) + occurrenceHighlighters = ScalaRefactoringUtil.highlightOccurrences( + ifc.project, + ifc.occurrences, + ifc.editor) val dialog = new ScalaIntroduceFieldDialog(ifc, settings) dialog.show() diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/refactoring/introduceField/ScalaIntroduceFieldHandlerBase.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/refactoring/introduceField/ScalaIntroduceFieldHandlerBase.scala index 19dd48f4acb..ab872e2d40a 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/refactoring/introduceField/ScalaIntroduceFieldHandlerBase.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/refactoring/introduceField/ScalaIntroduceFieldHandlerBase.scala @@ -88,8 +88,9 @@ abstract class ScalaIntroduceFieldHandlerBase expr: ScExpression, occurrences: Array[TextRange], aClass: ScTemplateDefinition): PsiElement = { - val commonParent = ScalaRefactoringUtil - .commonParent(aClass.getContainingFile, occurrences: _*) + val commonParent = ScalaRefactoringUtil.commonParent( + aClass.getContainingFile, + occurrences: _*) val firstOccOffset = occurrences.map(_.getStartOffset).min val anchor = ScalaRefactoringUtil .statementsAndMembersInClass(aClass) @@ -151,8 +152,9 @@ object ScalaIntroduceFieldHandlerBase { if (parExpr == null) return None val container: PsiElement = ScalaRefactoringUtil.container(parExpr, file) val needBraces = - !parExpr.isInstanceOf[ScBlock] && ScalaRefactoringUtil - .needBraces(parExpr, ScalaRefactoringUtil.nextParent(parExpr, file)) + !parExpr.isInstanceOf[ScBlock] && ScalaRefactoringUtil.needBraces( + parExpr, + ScalaRefactoringUtil.nextParent(parExpr, file)) val parent = if (needBraces) { firstRange = firstRange.shiftRight(1) diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/refactoring/introduceParameter/ScalaIntroduceParameterDialog.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/refactoring/introduceParameter/ScalaIntroduceParameterDialog.scala index 795798a3c1b..189b0a11dc8 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/refactoring/introduceParameter/ScalaIntroduceParameterDialog.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/refactoring/introduceParameter/ScalaIntroduceParameterDialog.scala @@ -208,8 +208,8 @@ class ScalaIntroduceParameterDialog(project: Project, defaultForIntroducedTextField.getText.trim) } }) - IJSwingUtilities - .adjustComponentsOnMac(label, defaultForIntroducedTextField) + IJSwingUtilities.adjustComponentsOnMac(label, + defaultForIntroducedTextField) panel.add(defaultForIntroducedTextField, BorderLayout.CENTER) val optionsPanel = new JPanel(new BorderLayout()) replaceOccurrencesChb = new JCheckBox("Replace all occurrences") diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/refactoring/introduceParameter/ScalaIntroduceParameterHandler.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/refactoring/introduceParameter/ScalaIntroduceParameterHandler.scala index f188b6005f5..ca9968efa85 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/refactoring/introduceParameter/ScalaIntroduceParameterHandler.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/refactoring/introduceParameter/ScalaIntroduceParameterHandler.scala @@ -178,8 +178,11 @@ class ScalaIntroduceParameterHandler (selModel.getSelectionStart, selModel.getSelectionEnd) ScalaRefactoringUtil.checkFile(file, project, editor, REFACTORING_NAME) - val exprWithTypes = ScalaRefactoringUtil - .getExpression(project, editor, file, startOffset, endOffset) + val exprWithTypes = ScalaRefactoringUtil.getExpression(project, + editor, + file, + startOffset, + endOffset) val elems = exprWithTypes match { case Some((e, _)) => Seq(e) case None => @@ -188,8 +191,11 @@ class ScalaIntroduceParameterHandler trimComments = false) } - val hasWarnings = ScalaRefactoringUtil - .showNotPossibleWarnings(elems, project, editor, REFACTORING_NAME) + val hasWarnings = ScalaRefactoringUtil.showNotPossibleWarnings( + elems, + project, + editor, + REFACTORING_NAME) if (hasWarnings) return None if (haveReturnStmts(elems)) { showErrorHint( @@ -269,8 +275,10 @@ class ScalaIntroduceParameterHandler ScalaRefactoringUtil.unparExpr(expr), occurrencesScope) if (occurrences.length > 1) - occurrenceHighlighters = ScalaRefactoringUtil - .highlightOccurrences(project, occurrences, editor) + occurrenceHighlighters = ScalaRefactoringUtil.highlightOccurrences( + project, + occurrences, + editor) (occurrences, expr.getTextRange) case _ => diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/refactoring/introduceVariable/IntroduceExpressions.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/refactoring/introduceVariable/IntroduceExpressions.scala index a8616eaf278..4968c48bea1 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/refactoring/introduceVariable/IntroduceExpressions.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/refactoring/introduceVariable/IntroduceExpressions.scala @@ -55,8 +55,10 @@ trait IntroduceExpressions { this: ScalaIntroduceVariableHandler => UsageTrigger.trigger(ScalaBundle.message("introduce.variable.id")) PsiDocumentManager.getInstance(project).commitAllDocuments() - ScalaRefactoringUtil - .checkFile(file, project, editor, INTRODUCE_VARIABLE_REFACTORING_NAME) + ScalaRefactoringUtil.checkFile(file, + project, + editor, + INTRODUCE_VARIABLE_REFACTORING_NAME) val (expr: ScExpression, types: Array[ScType]) = ScalaRefactoringUtil .getExpression(project, editor, file, startOffset, endOffset) .getOrElse( @@ -325,8 +327,9 @@ trait IntroduceExpressions { this: ScalaIntroduceVariableHandler => oneLineSelected && !insideExpression } - val revertInfo = ScalaRefactoringUtil - .RevertInfo(file.getText, editor.getCaretModel.getOffset) + val revertInfo = ScalaRefactoringUtil.RevertInfo( + file.getText, + editor.getCaretModel.getOffset) editor.putUserData(ScalaIntroduceVariableHandler.REVERT_INFO, revertInfo) val typeName = if (varType != null) varType.canonicalText else "" @@ -579,8 +582,10 @@ trait IntroduceExpressions { this: ScalaIntroduceVariableHandler => endOffset: Int, replaceAll: Boolean) { PsiDocumentManager.getInstance(project).commitAllDocuments() - ScalaRefactoringUtil - .checkFile(file, project, editor, INTRODUCE_VARIABLE_REFACTORING_NAME) + ScalaRefactoringUtil.checkFile(file, + project, + editor, + INTRODUCE_VARIABLE_REFACTORING_NAME) val (expr: ScExpression, types: Array[ScType]) = ScalaRefactoringUtil .getExpression(project, editor, file, startOffset, endOffset) diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/refactoring/introduceVariable/IntroduceTypeAlias.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/refactoring/introduceVariable/IntroduceTypeAlias.scala index 8e07fc3044b..5423ebb0854 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/refactoring/introduceVariable/IntroduceTypeAlias.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/refactoring/introduceVariable/IntroduceTypeAlias.scala @@ -75,8 +75,10 @@ trait IntroduceTypeAlias { this: ScalaIntroduceVariableHandler => UsageTrigger.trigger(ScalaBundle.message("introduce.type.alias.id")) PsiDocumentManager.getInstance(project).commitAllDocuments() - ScalaRefactoringUtil - .checkFile(file, project, editor, INTRODUCE_TYPEALIAS_REFACTORING_NAME) + ScalaRefactoringUtil.checkFile(file, + project, + editor, + INTRODUCE_TYPEALIAS_REFACTORING_NAME) val typeElement: ScTypeElement = ScalaRefactoringUtil .checkTypeElement(inTypeElement) @@ -319,16 +321,19 @@ trait IntroduceTypeAlias { this: ScalaIntroduceVariableHandler => typeElement.getContext, typeElement) - val resultTypeAlias = ScalaPsiUtil - .addTypeAliasBefore(definition, parent, getAhchor(parent, typeElement)) + val resultTypeAlias = ScalaPsiUtil.addTypeAliasBefore( + definition, + parent, + getAhchor(parent, typeElement)) ScalaPsiUtil.adjustTypes(resultTypeAlias, addImports = true, useTypeAliases = false) resultTypeAlias } - val revertInfo = ScalaRefactoringUtil - .RevertInfo(file.getText, editor.getCaretModel.getOffset) + val revertInfo = ScalaRefactoringUtil.RevertInfo( + file.getText, + editor.getCaretModel.getOffset) editor.putUserData(ScalaIntroduceVariableHandler.REVERT_INFO, revertInfo) val parent = scope match { @@ -458,8 +463,10 @@ trait IntroduceTypeAlias { this: ScalaIntroduceVariableHandler => typeAlias: ScTypeAlias) = { def replaceHelper(typeElement: ScTypeElement, inName: String): ScTypeElement = { - val replacement = ScalaPsiElementFactory - .createTypeElementFromText(inName, typeElement.getContext, typeElement) + val replacement = ScalaPsiElementFactory.createTypeElementFromText( + inName, + typeElement.getContext, + typeElement) //remove parethesis around typeElement if (typeElement.getParent.isInstanceOf[ScParenthesisedTypeElement]) { typeElement.getNextSibling.delete() @@ -525,8 +532,9 @@ trait IntroduceTypeAlias { this: ScalaIntroduceVariableHandler => JListCompatibility.addElement(model, element) } val list = JListCompatibility.createJListFromModel(model) - JListCompatibility - .setCellRenderer(list, new DefaultListCellRendererAdapter { + JListCompatibility.setCellRenderer( + list, + new DefaultListCellRendererAdapter { def getListCellRendererComponentAdapter( container: JListCompatibility.JListContainer, value: Object, @@ -627,8 +635,10 @@ trait IntroduceTypeAlias { this: ScalaIntroduceVariableHandler => } if (occurrences.length > 1) - occurrenceHighlighters = ScalaRefactoringUtil - .highlightOccurrences(project, occurrences.map(_.getTextRange), editor) + occurrenceHighlighters = ScalaRefactoringUtil.highlightOccurrences( + project, + occurrences.map(_.getTextRange), + editor) val dialog = new ScalaIntroduceTypeAliasDialog(project, typeElement, diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/refactoring/introduceVariable/ScalaInplaceTypeAliasIntroducer.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/refactoring/introduceVariable/ScalaInplaceTypeAliasIntroducer.scala index bd0673e61e9..258da46618c 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/refactoring/introduceVariable/ScalaInplaceTypeAliasIntroducer.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/refactoring/introduceVariable/ScalaInplaceTypeAliasIntroducer.scala @@ -114,8 +114,9 @@ class ScalaInplaceTypeAliasIntroducer(scNamedElement: ScNamedElement, val myFile: PsiFile = PsiDocumentManager .getInstance(myEditor.getProject) .getPsiFile(myEditor.getDocument) - myEditor.getDocument - .replaceString(0, myFile.getTextLength, revertInfo.fileText) + myEditor.getDocument.replaceString(0, + myFile.getTextLength, + revertInfo.fileText) } myEditor.getCaretModel.moveToOffset(revertInfo.caretOffset) myEditor.getScrollingModel.scrollToCaret(ScrollType.MAKE_VISIBLE) diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/refactoring/introduceVariable/ScalaInplaceVariableIntroducer.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/refactoring/introduceVariable/ScalaInplaceVariableIntroducer.scala index 16ff332d688..00730b66a76 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/refactoring/introduceVariable/ScalaInplaceVariableIntroducer.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/refactoring/introduceVariable/ScalaInplaceVariableIntroducer.scala @@ -276,8 +276,8 @@ class ScalaInplaceVariableIntroducer(project: Project, .createExpressionFromText("1 + 1", myFile.getManager) .findElementAt(1) val newWhiteSpace = holder.addBefore(whiteSpace, assign) - holder.getNode - .removeRange(colon.getNode, newWhiteSpace.getNode) + holder.getNode.removeRange(colon.getNode, + newWhiteSpace.getNode) setDeclaration(holder) commitDocument() case enum: ScEnumerator @@ -376,8 +376,9 @@ class ScalaInplaceVariableIntroducer(project: Project, myEditor.getUserData(ScalaIntroduceVariableHandler.REVERT_INFO) if (revertInfo != null) { extensions.inWriteAction { - myEditor.getDocument - .replaceString(0, myFile.getTextLength, revertInfo.fileText) + myEditor.getDocument.replaceString(0, + myFile.getTextLength, + revertInfo.fileText) } myEditor.getCaretModel.moveToOffset(revertInfo.caretOffset) myEditor.getScrollingModel.scrollToCaret(ScrollType.MAKE_VISIBLE) diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/refactoring/introduceVariable/ScalaIntroduceVariableHandler.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/refactoring/introduceVariable/ScalaIntroduceVariableHandler.scala index 30ba4eb99a2..ddc802a51e2 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/refactoring/introduceVariable/ScalaIntroduceVariableHandler.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/refactoring/introduceVariable/ScalaIntroduceVariableHandler.scala @@ -41,8 +41,11 @@ class ScalaIntroduceVariableHandler def selectionEnd = editor.getSelectionModel.getSelectionEnd val selectedElement: Option[PsiElement] = { - val typeElem = ScalaRefactoringUtil - .getTypeElement(project, editor, file, selectionStart, selectionEnd) + val typeElem = ScalaRefactoringUtil.getTypeElement(project, + editor, + file, + selectionStart, + selectionEnd) val expr = ScalaRefactoringUtil .getExpression(project, editor, file, selectionStart, selectionEnd) .map(_._1) diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/refactoring/introduceVariable/ScopeSuggester.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/refactoring/introduceVariable/ScopeSuggester.scala index 099516a39fe..38a859cfa4a 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/refactoring/introduceVariable/ScopeSuggester.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/refactoring/introduceVariable/ScopeSuggester.scala @@ -82,8 +82,7 @@ object ScopeSuggester { val name = parent match { case fileType: ScalaFile => "file " + fileType.getName case _ => - PsiTreeUtil - .getParentOfType(parent, classOf[ScTemplateDefinition]) match { + PsiTreeUtil.getParentOfType(parent, classOf[ScTemplateDefinition]) match { case classType: ScClass => "class " + classType.name case objectType: ScObject => @@ -168,8 +167,8 @@ object ScopeSuggester { }) if (companion.isDefined) - ScalaRefactoringUtil - .getTypeElementOccurrences(typeElement, companion.get) + ScalaRefactoringUtil.getTypeElementOccurrences(typeElement, + companion.get) else Array[ScTypeElement]() } diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/refactoring/move/ScalaMoveClassesOrPackagesHandler.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/refactoring/move/ScalaMoveClassesOrPackagesHandler.scala index badf110bd54..06688832664 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/refactoring/move/ScalaMoveClassesOrPackagesHandler.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/refactoring/move/ScalaMoveClassesOrPackagesHandler.scala @@ -84,11 +84,12 @@ class ScalaMoveClassesOrPackagesHandler return } val initialTargetPackageName: String = - MoveClassesOrPackagesImpl - .getInitialTargetPackageName(initialTargetElement, adjustedElements) + MoveClassesOrPackagesImpl.getInitialTargetPackageName( + initialTargetElement, + adjustedElements) val initialTargetDirectory: PsiDirectory = - MoveClassesOrPackagesImpl - .getInitialTargetDirectory(initialTargetElement, adjustedElements) + MoveClassesOrPackagesImpl.getInitialTargetDirectory(initialTargetElement, + adjustedElements) val isTargetDirectoryFixed: Boolean = initialTargetDirectory == null val searchTextOccurences: Boolean = adjustedElements.exists( TextOccurrencesUtil.isSearchTextOccurencesEnabled) diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/refactoring/rename/RenameLightProcessor.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/refactoring/rename/RenameLightProcessor.scala index 99ca39f3fd8..0e01cdbf280 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/refactoring/rename/RenameLightProcessor.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/refactoring/rename/RenameLightProcessor.scala @@ -76,7 +76,9 @@ class RenameLightProcessor extends RenamePsiElementProcessor { newName: String, usages: Array[UsageInfo], listener: RefactoringElementListener) { - ScalaRenameUtil - .doRenameGenericNamedElement(element, newName, usages, listener) + ScalaRenameUtil.doRenameGenericNamedElement(element, + newName, + usages, + listener) } } diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/refactoring/rename/RenameScalaClassProcessor.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/refactoring/rename/RenameScalaClassProcessor.scala index 9f0174ca5ae..c967aeceda2 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/refactoring/rename/RenameScalaClassProcessor.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/refactoring/rename/RenameScalaClassProcessor.scala @@ -135,8 +135,10 @@ class RenameScalaClassProcessor newName: String, usages: Array[UsageInfo], listener: RefactoringElementListener) { - ScalaRenameUtil - .doRenameGenericNamedElement(element, newName, usages, listener) + ScalaRenameUtil.doRenameGenericNamedElement(element, + newName, + usages, + listener) } } diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/refactoring/rename/RenameScalaMethodProcessor.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/refactoring/rename/RenameScalaMethodProcessor.scala index de06a78a2ba..e4fbb405bf9 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/refactoring/rename/RenameScalaMethodProcessor.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/refactoring/rename/RenameScalaMethodProcessor.scala @@ -79,8 +79,10 @@ class RenameScalaMethodProcessor newName: String, usages: Array[UsageInfo], listener: RefactoringElementListener) { - ScalaRenameUtil - .doRenameGenericNamedElement(psiElement, newName, usages, listener) + ScalaRenameUtil.doRenameGenericNamedElement(psiElement, + newName, + usages, + listener) } } diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/refactoring/rename/RenameScalaTypeAliasProcessor.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/refactoring/rename/RenameScalaTypeAliasProcessor.scala index ac2246c8b20..d550b173b8d 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/refactoring/rename/RenameScalaTypeAliasProcessor.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/refactoring/rename/RenameScalaTypeAliasProcessor.scala @@ -38,13 +38,15 @@ class RenameScalaTypeAliasProcessor case named: ScNamedElement => named case _ => return } - RenameSuperMembersUtil - .chooseAndProcessSuper(named, new PsiElementProcessor[PsiNamedElement] { + RenameSuperMembersUtil.chooseAndProcessSuper( + named, + new PsiElementProcessor[PsiNamedElement] { def execute(named: PsiNamedElement): Boolean = { renameCallback.pass(named) false } - }, editor) + }, + editor) } override def prepareRenaming(element: PsiElement, diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/refactoring/rename/RenameScalaVariableProcessor.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/refactoring/rename/RenameScalaVariableProcessor.scala index de3644af2b4..fc87ae96c29 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/refactoring/rename/RenameScalaVariableProcessor.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/refactoring/rename/RenameScalaVariableProcessor.scala @@ -99,8 +99,10 @@ class RenameScalaVariableProcessor deep = true)) { val overriderName = elem.name val baseName = namedElement.name - val newOverriderName = RefactoringUtil - .suggestNewOverriderName(overriderName, baseName, newName) + val newOverriderName = RefactoringUtil.suggestNewOverriderName( + overriderName, + baseName, + newName) if (newOverriderName != null) { allRenames.put(elem, newOverriderName) addBeanMethods(elem, newOverriderName) @@ -129,20 +131,24 @@ class RenameScalaVariableProcessor val named = element match { case named: ScNamedElement => named; case _ => return } - RenameSuperMembersUtil - .chooseAndProcessSuper(named, new PsiElementProcessor[PsiNamedElement] { + RenameSuperMembersUtil.chooseAndProcessSuper( + named, + new PsiElementProcessor[PsiNamedElement] { def execute(named: PsiNamedElement): Boolean = { renameCallback.pass(named) false } - }, editor) + }, + editor) } override def renameElement(element: PsiElement, newName: String, usages: Array[UsageInfo], listener: RefactoringElementListener) { - ScalaRenameUtil - .doRenameGenericNamedElement(element, newName, usages, listener) + ScalaRenameUtil.doRenameGenericNamedElement(element, + newName, + usages, + listener) } } diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/refactoring/rename/ScalaRenameUtil.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/refactoring/rename/ScalaRenameUtil.scala index 4c31528ca9c..ce294263db3 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/refactoring/rename/ScalaRenameUtil.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/refactoring/rename/ScalaRenameUtil.scala @@ -188,8 +188,10 @@ object ScalaRenameUtil { } modified.foreach { case UsagesWithName(name, usagez) if usagez.nonEmpty => - RenameUtil - .doRenameGenericNamedElement(namedElement, name, usagez, listener) + RenameUtil.doRenameGenericNamedElement(namedElement, + name, + usagez, + listener) case _ => } //to guarantee correct name of namedElement itself diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/refactoring/rename/inplace/ScalaInplaceRenameHandler.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/refactoring/rename/inplace/ScalaInplaceRenameHandler.scala index af479500398..928b734c50b 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/refactoring/rename/inplace/ScalaInplaceRenameHandler.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/refactoring/rename/inplace/ScalaInplaceRenameHandler.scala @@ -53,8 +53,10 @@ trait ScalaInplaceRenameHandler { project: Project, nameSuggestionContext: PsiElement, editor: Editor): Unit = { - PsiElementRenameHandler - .rename(element, project, nameSuggestionContext, editor) + PsiElementRenameHandler.rename(element, + project, + nameSuggestionContext, + editor) } def afterElementSubstitution(elementToRename: PsiElement, diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/refactoring/rename/inplace/ScalaLocalInplaceRenamer.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/refactoring/rename/inplace/ScalaLocalInplaceRenamer.scala index a3c7cae6d6c..20ea00afbb5 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/refactoring/rename/inplace/ScalaLocalInplaceRenamer.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/refactoring/rename/inplace/ScalaLocalInplaceRenamer.scala @@ -53,20 +53,18 @@ class ScalaLocalInplaceRenamer(elementToRename: PsiNamedElement, .getInstance(myProject) .getPsiFile(myEditor.getDocument) if (stringToSearch != null) { - TextOccurrencesUtil - .processUsagesInStringsAndComments( - elementToRename, - stringToSearch, - true, - new PairProcessor[PsiElement, TextRange] { - def process(psiElement: PsiElement, - textRange: TextRange): Boolean = { - if (psiElement.getContainingFile == currentFile) { - stringUsages.add(Pair.create(psiElement, textRange)) - } - true + TextOccurrencesUtil.processUsagesInStringsAndComments( + elementToRename, + stringToSearch, + true, + new PairProcessor[PsiElement, TextRange] { + def process(psiElement: PsiElement, textRange: TextRange): Boolean = { + if (psiElement.getContainingFile == currentFile) { + stringUsages.add(Pair.create(psiElement, textRange)) } - }) + true + } + }) } } diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/refactoring/rename/inplace/ScalaMemberInplaceRenamer.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/refactoring/rename/inplace/ScalaMemberInplaceRenamer.scala index 0ed5dcb0c78..12f374f3691 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/refactoring/rename/inplace/ScalaMemberInplaceRenamer.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/refactoring/rename/inplace/ScalaMemberInplaceRenamer.scala @@ -80,15 +80,17 @@ class ScalaMemberInplaceRenamer(elementToRename: PsiNamedElement, override def beforeTemplateStart() { super.beforeTemplateStart() - val revertInfo = ScalaRefactoringUtil - .RevertInfo(editor.getDocument.getText, editor.getCaretModel.getOffset) + val revertInfo = ScalaRefactoringUtil.RevertInfo( + editor.getDocument.getText, + editor.getCaretModel.getOffset) editor.putUserData(ScalaMemberInplaceRenamer.REVERT_INFO, revertInfo) val file = PsiDocumentManager .getInstance(myProject) .getPsiFile(myEditor.getDocument) - val offset = TargetElementUtil - .adjustOffset(file, editor.getDocument, editor.getCaretModel.getOffset) + val offset = TargetElementUtil.adjustOffset(file, + editor.getDocument, + editor.getCaretModel.getOffset) val range = file.findElementAt(offset).getTextRange myCaretRangeMarker = myEditor.getDocument.createRangeMarker(range) myCaretRangeMarker.setGreedyToLeft(true) @@ -188,8 +190,9 @@ class ScalaMemberInplaceRenamer(elementToRename: PsiNamedElement, val offset = editor.getCaretModel.getOffset val text = editor.getDocument.getText val aroundCaret = - text.substring(offset - 50, offset) + "" + text - .substring(offset, offset + 50) + text.substring(offset - 50, offset) + "" + text.substring( + offset, + offset + 50) val message = s"""Could not perform inplace rename: |element to rename: $element ${element.getName} |substituted: $subst diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/refactoring/ui/ScalaCodeFragmentTableCellEditor.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/refactoring/ui/ScalaCodeFragmentTableCellEditor.scala index 1468ad1b088..09596ac0ec1 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/refactoring/ui/ScalaCodeFragmentTableCellEditor.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/refactoring/ui/ScalaCodeFragmentTableCellEditor.scala @@ -17,8 +17,9 @@ class ScalaCodeFragmentTableCellEditor(project: Project) override def stopCellEditing: Boolean = { val editor: Editor = myEditorTextField.getEditor if (editor != null) { - JavaReferenceImporter - .autoImportReferenceAtCursor(editor, myCodeFragment, true) + JavaReferenceImporter.autoImportReferenceAtCursor(editor, + myCodeFragment, + true) } super.stopCellEditing } diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/refactoring/util/InplaceRenameHelper.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/refactoring/util/InplaceRenameHelper.scala index 4f1898e6516..2e57bffe131 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/refactoring/util/InplaceRenameHelper.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/refactoring/util/InplaceRenameHelper.scala @@ -65,8 +65,11 @@ class InplaceRenameHelper(parent: PsiElement) { depNames += dependentName val (depElem, depRange) = dependentsWithRanges(index) if (depRange != null) - builder - .replaceElement(depElem, depRange, dependentName, newName, false) + builder.replaceElement(depElem, + depRange, + dependentName, + newName, + false) else builder.replaceElement(depElem, dependentName, newName, false) } primaries += primary diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/refactoring/util/ScalaCompositeTypeValidator.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/refactoring/util/ScalaCompositeTypeValidator.scala index 7ea1302579c..45663c5f454 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/refactoring/util/ScalaCompositeTypeValidator.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/refactoring/util/ScalaCompositeTypeValidator.scala @@ -106,10 +106,10 @@ class ScalaCompositeTypeValidator(conflictsReporter: ConflictsReporter, } private def messageForTypeAliasMember(name: String) = - ScalaBundle - .message("introduced.typealias.will.conflict.with.type.name", name) + ScalaBundle.message("introduced.typealias.will.conflict.with.type.name", + name) private def messageForClassMember(name: String) = - ScalaBundle - .message("introduced.typealias.will.conflict.with.class.name", name) + ScalaBundle.message("introduced.typealias.will.conflict.with.class.name", + name) } diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/refactoring/util/ScalaDirectoryService.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/refactoring/util/ScalaDirectoryService.scala index b83586a1b6b..ac044c47d1b 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/refactoring/util/ScalaDirectoryService.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/refactoring/util/ScalaDirectoryService.scala @@ -46,8 +46,10 @@ object ScalaDirectoryService { null, properties).create else - FileTemplateUtil - .createFromTemplate(template, fileName, properties, dir) + FileTemplateUtil.createFromTemplate(template, + fileName, + properties, + dir) } catch { case e: IncorrectOperationException => throw e case e: Exception => diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/refactoring/util/ScalaRefactoringUtil.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/refactoring/util/ScalaRefactoringUtil.scala index be70aaa8b25..6b80eabcd54 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/refactoring/util/ScalaRefactoringUtil.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/refactoring/util/ScalaRefactoringUtil.scala @@ -127,12 +127,13 @@ object ScalaRefactoringUtil { i = i - 1 } if (hasNlToken) - e = ScalaPsiElementFactory - .createExpressionFromText(text.substring(0, i + 1), e.getManager) + e = ScalaPsiElementFactory.createExpressionFromText( + text.substring(0, i + 1), + e.getManager) e.getParent match { case x: ScMethodCall if x.args.exprs.nonEmpty => - ScalaPsiElementFactory - .createExpressionFromText(e.getText + " _", e.getManager) + ScalaPsiElementFactory.createExpressionFromText(e.getText + " _", + e.getManager) case _ => e } } @@ -164,8 +165,7 @@ object ScalaRefactoringUtil { } def inTemplateParents(typeElement: ScTypeElement): Boolean = { - PsiTreeUtil - .getParentOfType(typeElement, classOf[ScTemplateParents]) != null + PsiTreeUtil.getParentOfType(typeElement, classOf[ScTemplateParents]) != null } def checkTypeElement(element: ScTypeElement): Option[ScTypeElement] = { @@ -191,8 +191,9 @@ object ScalaRefactoringUtil { } def getOwner(typeElement: PsiElement) = - PsiTreeUtil - .getParentOfType(typeElement, classOf[ScTypeParametersOwner], true) + PsiTreeUtil.getParentOfType(typeElement, + classOf[ScTypeParametersOwner], + true) def getTypeParameterOwnerList( typeElement: ScTypeElement): Seq[ScTypeParametersOwner] = { @@ -257,8 +258,9 @@ object ScalaRefactoringUtil { val rangeText = file.getText.substring(startOffset, endOffset) def selectedInfixExpr(): Option[(ScExpression, Array[ScType])] = { - val expr = ScalaPsiElementFactory - .createOptionExpressionFromText(rangeText, file.getManager) + val expr = ScalaPsiElementFactory.createOptionExpressionFromText( + rangeText, + file.getManager) expr match { case Some(expression: ScInfixExpr) => val op1 = expression.operation @@ -306,8 +308,10 @@ object ScalaRefactoringUtil { startOffset, classOf[ScLiteral], false) - val endLit = PsiTreeUtil - .findElementOfClassAtOffset(file, endOffset, classOf[ScLiteral], false) + val endLit = PsiTreeUtil.findElementOfClassAtOffset(file, + endOffset, + classOf[ScLiteral], + false) if (lit == null || !lit.isString || lit != endLit) return None val prefix = lit match { @@ -359,8 +363,8 @@ object ScalaRefactoringUtil { def expressionToIntroduce(expr: ScExpression): ScExpression = { def copyExpr = expr.copy.asInstanceOf[ScExpression] def liftMethod = - ScalaPsiElementFactory - .createExpressionFromText(expr.getText + " _", expr.getManager) + ScalaPsiElementFactory.createExpressionFromText(expr.getText + " _", + expr.getManager) expr match { case ref: ScReferenceExpression => ref.resolve() match { @@ -648,8 +652,9 @@ object ScalaRefactoringUtil { JListCompatibility.addElement(model, element) } val list = JListCompatibility.createJListFromModel(model) - JListCompatibility - .setCellRenderer(list, new DefaultListCellRendererAdapter { + JListCompatibility.setCellRenderer( + list, + new DefaultListCellRendererAdapter { def getListCellRendererComponentAdapter( container: JListCompatibility.JListContainer, value: Object, @@ -986,8 +991,9 @@ object ScalaRefactoringUtil { if (file.asInstanceOf[ScalaFile].isScriptFile()) file else { val elem = file.findElementAt(startOffset) - val result = ScalaPsiUtil - .getParentOfType(elem, classOf[ScExtendsBlock], classOf[PsiFile]) + val result = ScalaPsiUtil.getParentOfType(elem, + classOf[ScExtendsBlock], + classOf[PsiFile]) if (result == null) { for (child <- file.getChildren) { val textRange: TextRange = child.getTextRange @@ -1032,8 +1038,11 @@ object ScalaRefactoringUtil { project: Project, editor: Editor, refactoringName: String): Nothing = { - CommonRefactoringUtil - .showErrorHint(project, editor, text, refactoringName, null) + CommonRefactoringUtil.showErrorHint(project, + editor, + text, + refactoringName, + null) throw new IntroduceException } @@ -1041,8 +1050,11 @@ object ScalaRefactoringUtil { project: Project, editor: Editor, refactoringName: String) = { - CommonRefactoringUtil - .showErrorHint(project, editor, text, refactoringName, null) + CommonRefactoringUtil.showErrorHint(project, + editor, + text, + refactoringName, + null) } def checkFile(file: PsiFile, @@ -1199,10 +1211,14 @@ object ScalaRefactoringUtil { documentManager.commitDocument(document) val newStart = start + shift val newEnd = newStart + newString.length - val newExpr = PsiTreeUtil - .findElementOfClassAtRange(file, newStart, newEnd, classOf[ScExpression]) - val newPattern = PsiTreeUtil - .findElementOfClassAtOffset(file, newStart, classOf[ScPattern], true) + val newExpr = PsiTreeUtil.findElementOfClassAtRange(file, + newStart, + newEnd, + classOf[ScExpression]) + val newPattern = PsiTreeUtil.findElementOfClassAtOffset(file, + newStart, + classOf[ScPattern], + true) Option(newExpr) .orElse(Option(newPattern)) .map(elem => document.createRangeMarker(elem.getTextRange)) @@ -1249,8 +1265,9 @@ object ScalaRefactoringUtil { PsiTreeUtil .getParentOfType(elem, classOf[ScInterpolatedStringLiteral], false)) val expr = - interpolated getOrElse PsiTreeUtil - .getParentOfType(elem, classOf[ScExpression], false) + interpolated getOrElse PsiTreeUtil.getParentOfType(elem, + classOf[ScExpression], + false) val nextPar = nextParent(expr, elem.getContainingFile) nextPar match { case prevExpr: ScExpression if !checkEnd(nextPar, expr) => diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/refactoring/util/ScalaTypeValidator.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/refactoring/util/ScalaTypeValidator.scala index 65479d29f30..c6d7fb16a82 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/refactoring/util/ScalaTypeValidator.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/refactoring/util/ScalaTypeValidator.scala @@ -121,10 +121,10 @@ class ScalaTypeValidator(val conflictsReporter: ConflictsReporter, } private def messageForTypeAliasMember(name: String) = - ScalaBundle - .message("introduced.typealias.will.conflict.with.type.name", name) + ScalaBundle.message("introduced.typealias.will.conflict.with.type.name", + name) private def messageForClassMember(name: String) = - ScalaBundle - .message("introduced.typealias.will.conflict.with.class.name", name) + ScalaBundle.message("introduced.typealias.will.conflict.with.class.name", + name) } diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/refactoring/util/ScalaVariableValidator.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/refactoring/util/ScalaVariableValidator.scala index 0bff5e54448..f90bb66703f 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/refactoring/util/ScalaVariableValidator.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/refactoring/util/ScalaVariableValidator.scala @@ -233,9 +233,10 @@ class ScalaVariableValidator(conflictsReporter: ConflictsReporter, private def messageForLocal(name: String) = ScalaBundle.message("introduced.variable.will.conflict.with.local", name) private def messageForParameter(name: String) = - ScalaBundle - .message("introduced.variable.will.conflict.with.parameter", name) + ScalaBundle.message("introduced.variable.will.conflict.with.parameter", + name) private def messageForClassParameter(name: String) = - ScalaBundle - .message("introduced.variable.will.conflict.with.class.parameter", name) + ScalaBundle.message( + "introduced.variable.will.conflict.with.class.parameter", + name) } diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/references/ScalaReferenceContributor.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/references/ScalaReferenceContributor.scala index e4785562595..64b24653f88 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/references/ScalaReferenceContributor.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/references/ScalaReferenceContributor.scala @@ -50,8 +50,9 @@ class InterpolatedStringReferenceProvider extends PsiReferenceProvider { case s: ScInterpolatedStringLiteral => Array.empty case l: ScLiteral if (l.isString || l.isMultiLineString) && l.getText.contains("$") => - val interpolated = ScalaPsiElementFactory - .createExpressionFromText("s" + l.getText, l.getContext) + val interpolated = ScalaPsiElementFactory.createExpressionFromText( + "s" + l.getText, + l.getContext) interpolated.getChildren.filter { case r: ScInterpolatedStringPartReference => false case ref: ScReferenceExpression => true diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/resolve/ResolvableReferenceExpression.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/resolve/ResolvableReferenceExpression.scala index d36821ce439..b87961f7166 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/resolve/ResolvableReferenceExpression.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/resolve/ResolvableReferenceExpression.scala @@ -232,10 +232,10 @@ trait ResolvableReferenceExpression extends ScReferenceExpression { val state: ResolveState = ResolveState .initial() .put(CachesUtil.NAMED_PARAM_KEY, java.lang.Boolean.TRUE) - processor.execute( - ScalaPsiElementFactory - .createParameterFromText(ref.refName + ": Any", getManager), - state) + processor.execute(ScalaPsiElementFactory.createParameterFromText( + ref.refName + ": Any", + getManager), + state) } case ScalaResolveResult(named, subst) if call.applyOrUpdateElement.exists(_.isDynamic) && @@ -245,10 +245,10 @@ trait ResolvableReferenceExpression extends ScReferenceExpression { val state: ResolveState = ResolveState .initial() .put(CachesUtil.NAMED_PARAM_KEY, java.lang.Boolean.TRUE) - processor.execute( - ScalaPsiElementFactory - .createParameterFromText(ref.refName + ": Any", getManager), - state) + processor.execute(ScalaPsiElementFactory.createParameterFromText( + ref.refName + ": Any", + getManager), + state) } case ScalaResolveResult(fun: ScFunction, subst: ScSubstitutor) => if (!processor.isInstanceOf[CompletionProcessor]) { @@ -391,8 +391,7 @@ trait ResolvableReferenceExpression extends ScReferenceExpression { candidate match { case ScalaResolveResult(fun: ScFunction, subst: ScSubstitutor) => if (!baseProcessor.isInstanceOf[CompletionProcessor]) { - fun - .getParamByName(ref.refName, arguments.indexOf(args)) match { + fun.getParamByName(ref.refName, arguments.indexOf(args)) match { case Some(param) => var state = ResolveState.initial .put(ScSubstitutor.key, subst) @@ -400,8 +399,8 @@ trait ResolvableReferenceExpression extends ScReferenceExpression { java.lang.Boolean.TRUE) if (!ScalaPsiUtil.memberNamesEquals(param.name, ref.refName)) { - state = state - .put(ResolverEnv.nameKey, param.deprecatedName.get) + state = state.put(ResolverEnv.nameKey, + param.deprecatedName.get) } baseProcessor.execute(param, state) case None => @@ -614,8 +613,8 @@ trait ResolvableReferenceExpression extends ScReferenceExpression { } val argumentExpressions = callOption.map(_.argumentExpressions) val emptyStringExpression = - ScalaPsiElementFactory - .createExpressionFromText("\"\"", e.getManager) + ScalaPsiElementFactory.createExpressionFromText("\"\"", + e.getManager) import org.jetbrains.plugins.scala.lang.resolve.ResolvableReferenceExpression._ val name = callOption match { case Some(call) => getDynamicNameForMethodInvocation(call) diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/resolve/ResolvableStableCodeReferenceElement.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/resolve/ResolvableStableCodeReferenceElement.scala index 2fb6dd04e16..a6c087ad77b 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/resolve/ResolvableStableCodeReferenceElement.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/resolve/ResolvableStableCodeReferenceElement.scala @@ -83,8 +83,9 @@ trait ResolvableStableCodeReferenceElement Array.empty, ModCount.getBlockModificationCount) def doResolve(incomplete: Boolean): Array[ResolveResult] = - ImportResolverNoMethods - .resolve(ResolvableStableCodeReferenceElement.this, incomplete) + ImportResolverNoMethods.resolve( + ResolvableStableCodeReferenceElement.this, + incomplete) resolveWithCompiled(incomplete, ImportResolverNoMethods, doResolve) } @@ -95,8 +96,8 @@ trait ResolvableStableCodeReferenceElement Array.empty, ModCount.getBlockModificationCount) def doResolve(incomplete: Boolean): Array[ResolveResult] = - ImportResolverNoTypes - .resolve(ResolvableStableCodeReferenceElement.this, incomplete) + ImportResolverNoTypes.resolve(ResolvableStableCodeReferenceElement.this, + incomplete) resolveWithCompiled(incomplete, ImportResolverNoTypes, doResolve) } @@ -194,8 +195,7 @@ trait ResolvableStableCodeReferenceElement s.subst(ScType.create(field.getType, getProject, getResolveScope)), this) case ScalaResolveResult(clazz: PsiClass, s) => - processor - .processType(new ScDesignatorType(clazz, true), this) //static Java import + processor.processType(new ScDesignatorType(clazz, true), this) //static Java import case ScalaResolveResult(pack: ScPackage, s) => pack.processDeclarations( processor, @@ -219,8 +219,9 @@ trait ResolvableStableCodeReferenceElement PsiTreeUtil.getContextOfType(ref, true, classOf[ScImportStmt]) if (importStmt != null) { - val importHolder = PsiTreeUtil - .getContextOfType(importStmt, true, classOf[ScImportsHolder]) + val importHolder = PsiTreeUtil.getContextOfType(importStmt, + true, + classOf[ScImportsHolder]) if (importHolder != null) { importHolder.getImportStatements.takeWhile(_ != importStmt).foreach { case stmt: ScImportStmt => @@ -344,8 +345,7 @@ trait ResolvableStableCodeReferenceElement true // scala classes are available from default package // Other classes from default package are available only for top-level Scala statements case _ => - PsiTreeUtil - .getContextOfType(this, true, classOf[ScPackaging]) == null + PsiTreeUtil.getContextOfType(this, true, classOf[ScPackaging]) == null } case _ => true } diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/resolve/ResolveUtils.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/resolve/ResolveUtils.scala index 22e4c237a12..58310c334e7 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/resolve/ResolveUtils.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/resolve/ResolveUtils.scala @@ -93,8 +93,9 @@ object ResolveUtils { (kinds contains OBJECT) && isStaticCorrect(c) } case patt: ScBindingPattern => - val parent = ScalaPsiUtil - .getParentOfType(patt, classOf[ScVariable], classOf[ScValue]) + val parent = ScalaPsiUtil.getParentOfType(patt, + classOf[ScVariable], + classOf[ScValue]) parent match { case x: ScVariable => kinds contains VAR case _ => kinds contains VAL @@ -281,8 +282,9 @@ object ResolveUtils { true, classOf[ScTemplateDefinition]) if (enclosing == null) return true - return PsiTreeUtil - .isContextAncestor(enclosing, place, false) + return PsiTreeUtil.isContextAncestor(enclosing, + place, + false) case Some(t: ScThisReference) => val enclosing = PsiTreeUtil.getContextOfType( scMember, @@ -292,8 +294,9 @@ object ResolveUtils { t.refTemplate match { case Some(t) => return t == enclosing case _ => - return PsiTreeUtil - .isContextAncestor(enclosing, place, false) + return PsiTreeUtil.isContextAncestor(enclosing, + place, + false) } case Some(ref: ScReferenceElement) => val enclosing = PsiTreeUtil.getContextOfType( @@ -313,8 +316,9 @@ object ResolveUtils { true, classOf[ScTemplateDefinition]) if (enclosing == null) return true - return PsiTreeUtil - .isContextAncestor(enclosing, place, false) + return PsiTreeUtil.isContextAncestor(enclosing, + place, + false) } } val ref = am.getReference @@ -568,8 +572,8 @@ object ResolveUtils { TypeDefinitionMembers.processSuperDeclarations( c, processor, - ResolveState.initial - .put(ScSubstitutor.key, ScSubstitutor.empty), + ResolveState.initial.put(ScSubstitutor.key, + ScSubstitutor.empty), null, place) case None => @@ -733,8 +737,10 @@ object ResolveUtils { //process subpackages pack match { case s: ScPackageImpl => - s.pack - .processDeclarations(processor, state, lastParent, place) + s.pack.processDeclarations(processor, + state, + lastParent, + place) case _ => pack.processDeclarations(processor, state, lastParent, place) } diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/resolve/ScalaResolveResult.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/resolve/ScalaResolveResult.scala index 78157cdd55c..8b6335db73d 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/resolve/ScalaResolveResult.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/resolve/ScalaResolveResult.scala @@ -234,8 +234,9 @@ class ScalaResolveResult( case clazz: PsiClass => return getClazzPrecedence(clazz) case memb @ (_: ScBindingPattern | _: PsiMember) => - val clazzStub = ScalaPsiUtil - .getContextOfType(getActualElement, false, classOf[PsiClass]) + val clazzStub = ScalaPsiUtil.getContextOfType(getActualElement, + false, + classOf[PsiClass]) val clazz: PsiClass = clazzStub match { case clazz: PsiClass => clazz case _ => null diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/resolve/processor/BaseProcessor.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/resolve/processor/BaseProcessor.scala index 1d65f1390ca..54c92c14b73 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/resolve/processor/BaseProcessor.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/resolve/processor/BaseProcessor.scala @@ -374,8 +374,11 @@ abstract class BaseProcessor(val kinds: Set[ResolveTargets.Value]) } true case comp @ ScCompoundType(components, signaturesMap, typesMap) => - TypeDefinitionMembers - .processDeclarations(comp, this, state, null, place) + TypeDefinitionMembers.processDeclarations(comp, + this, + state, + null, + place) case ex: ScExistentialType => processType(ex.skolem, place, diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/resolve/processor/ExpandedExtractorResolveProcessor.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/resolve/processor/ExpandedExtractorResolveProcessor.scala index 9e1a5cb08d2..93a9780bd75 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/resolve/processor/ExpandedExtractorResolveProcessor.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/resolve/processor/ExpandedExtractorResolveProcessor.scala @@ -68,8 +68,9 @@ class ExpandedExtractorResolveProcessor(ref: ScReferenceElement, if (candidatesSet.isEmpty && levelSet.isEmpty) { buffer.clear() seq = true - proc - .processType(parentSubst.subst(typez), ref, ResolveState.initial) + proc.processType(parentSubst.subst(typez), + ref, + ResolveState.initial) addResults(buffer.toSeq) } } diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/resolve/processor/MostSpecificUtil.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/resolve/processor/MostSpecificUtil.scala index 6050473d5eb..f41f862a49f 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/resolve/processor/MostSpecificUtil.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/resolve/processor/MostSpecificUtil.scala @@ -460,8 +460,9 @@ case class MostSpecificUtil(elem: PsiElement, length: Int) { case f: ScFunction => f.polymorphicType() case p: ScPrimaryConstructor => p.polymorphicType case m: PsiMethod => - ResolveUtils - .javaPolymorphicType(m, ScSubstitutor.empty, elem.getResolveScope) + ResolveUtils.javaPolymorphicType(m, + ScSubstitutor.empty, + elem.getResolveScope) case refPatt: ScReferencePattern => refPatt.getParent /*id list*/ .getParent match { case pd: ScPatternDefinition diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/scaladoc/parser/parsing/MyScaladocParsing.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/scaladoc/parser/parsing/MyScaladocParsing.scala index 4f798bf045a..4cf6e9ef7c2 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/scaladoc/parser/parsing/MyScaladocParsing.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/scaladoc/parser/parsing/MyScaladocParsing.scala @@ -112,8 +112,9 @@ class MyScaladocParsing(private val psiBuilder: PsiBuilder) if (tokenType == DOC_LINK_TAG && builder.getTokenType == ScalaTokenTypes.tIDENTIFIER && !isEndOfComment) { - StableId - .parse(new ScalaPsiBuilderImpl(builder), true, DOC_CODE_LINK_VALUE) + StableId.parse(new ScalaPsiBuilderImpl(builder), + true, + DOC_CODE_LINK_VALUE) } while (!isEndOfComment) { @@ -257,8 +258,9 @@ class MyScaladocParsing(private val psiBuilder: PsiBuilder) if (!isEndOfComment) { builder.advanceLexer() } - StableId - .parse(new ScalaPsiBuilderImpl(builder), true, DOC_TAG_VALUE_TOKEN) + StableId.parse(new ScalaPsiBuilderImpl(builder), + true, + DOC_TAG_VALUE_TOKEN) case PARAM_TAG | TYPE_PARAM_TAG | DEFINE_TAG => if (!ParserUtils.lookAhead(builder, builder.getTokenType, diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/scaladoc/psi/impl/ScDocResolvableCodeReferenceImpl.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/scaladoc/psi/impl/ScDocResolvableCodeReferenceImpl.scala index 4c12d80a1cc..48026c2672c 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/scaladoc/psi/impl/ScDocResolvableCodeReferenceImpl.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/scaladoc/psi/impl/ScDocResolvableCodeReferenceImpl.scala @@ -60,8 +60,8 @@ class ScDocResolvableCodeReferenceImpl(node: ASTNode) clazz: TypeToImport) = if (is2_10plus) super.createReplacingElementWithClassName(true, clazz) else - ScalaPsiElementFactory - .createDocLinkValue(clazz.qualifiedName, clazz.element.getManager) + ScalaPsiElementFactory.createDocLinkValue(clazz.qualifiedName, + clazz.element.getManager) override protected def processQualifier(ref: ScStableCodeReferenceElement, processor: BaseProcessor) { @@ -71,8 +71,10 @@ class ScDocResolvableCodeReferenceImpl(node: ASTNode) case None => val defaultPackage = ScPackageImpl( JavaPsiFacade.getInstance(getProject).findPackage("")) - defaultPackage - .processDeclarations(processor, ResolveState.initial(), null, ref) + defaultPackage.processDeclarations(processor, + ResolveState.initial(), + null, + ref) case Some(q: ScDocResolvableCodeReference) => q.multiResolve(true) .foreach(processQualifierResolveResult(_, processor, ref)) diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/structureView/elements/impl/ScalaFileStructureViewElement.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/structureView/elements/impl/ScalaFileStructureViewElement.scala index 290b1fca8f5..793bc4904d0 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/structureView/elements/impl/ScalaFileStructureViewElement.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/structureView/elements/impl/ScalaFileStructureViewElement.scala @@ -82,8 +82,8 @@ class ScalaFileStructureViewElement(file: ScalaFile, val buffer = new mutable.StringBuilder buffer.append(console.getHistory) buffer.append(file.getText) - val newFile = ScalaPsiElementFactory - .createScalaFile(buffer.toString(), file.getManager) + val newFile = ScalaPsiElementFactory.createScalaFile(buffer.toString(), + file.getManager) newFile } else { file diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/surroundWith/surrounders/scaladoc/ScalaDocWithSyntaxSurrounder.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/surroundWith/surrounders/scaladoc/ScalaDocWithSyntaxSurrounder.scala index 22d10c5409d..e03465557be 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/surroundWith/surrounders/scaladoc/ScalaDocWithSyntaxSurrounder.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/lang/surroundWith/surrounders/scaladoc/ScalaDocWithSyntaxSurrounder.scala @@ -26,8 +26,7 @@ trait ScalaDocWithSyntaxSurrounder extends Surrounder { def getNewExprText(expr: String): String = expr.substring(0, startOffset - offset) + getSyntaxTag + - expr - .substring(startOffset - offset, endOffset - offset) + getSyntaxTag + + expr.substring(startOffset - offset, endOffset - offset) + getSyntaxTag + expr.substring(endOffset - offset) val surroundedText = new StringBuilder() diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/overrideImplement/ScalaGenerationInfo.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/overrideImplement/ScalaGenerationInfo.scala index 6e17c0d4d97..8cd4bdc58fc 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/overrideImplement/ScalaGenerationInfo.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/overrideImplement/ScalaGenerationInfo.scala @@ -223,8 +223,8 @@ object ScalaGenerationInfo { properties.setProperty(FileTemplate.ATTRIBUTE_RETURN_TYPE, ScType.presentableText(returnType)) - properties - .setProperty(FileTemplate.ATTRIBUTE_DEFAULT_RETURN_VALUE, standardValue) + properties.setProperty(FileTemplate.ATTRIBUTE_DEFAULT_RETURN_VALUE, + standardValue) properties.setProperty(FileTemplate.ATTRIBUTE_CALL_SUPER, callSuperText(td, method)) properties.setProperty( diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/overrideImplement/ScalaImplementMethodsHandler.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/overrideImplement/ScalaImplementMethodsHandler.scala index ecff6802d89..5ee5958b6de 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/overrideImplement/ScalaImplementMethodsHandler.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/overrideImplement/ScalaImplementMethodsHandler.scala @@ -14,8 +14,10 @@ class ScalaImplementMethodsHandler extends LanguageCodeInsightActionHandler { def startInWriteAction: Boolean = false def invoke(project: Project, editor: Editor, file: PsiFile) { - ScalaOIUtil - .invokeOverrideImplement(project, editor, file, isImplement = true) + ScalaOIUtil.invokeOverrideImplement(project, + editor, + file, + isImplement = true) } def isValidFor(editor: Editor, file: PsiFile): Boolean = diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/overrideImplement/ScalaMethodImplementor.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/overrideImplement/ScalaMethodImplementor.scala index fa03453259f..f1fbd13989d 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/overrideImplement/ScalaMethodImplementor.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/overrideImplement/ScalaMethodImplementor.scala @@ -85,8 +85,9 @@ private class ScalaPsiMethodGenerationInfo(method: PsiMethod, val sign = new PhysicalSignature(method, ScSubstitutor.empty) val methodMember = new ScMethodMember(sign, isOverride = false) - member = ScalaGenerationInfo - .insertMethod(methodMember, td, findAnchor(td, baseMethod)) + member = ScalaGenerationInfo.insertMethod(methodMember, + td, + findAnchor(td, baseMethod)) case _ => super.insert(aClass, anchor, before) } } diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/overrideImplement/ScalaOIUtil.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/overrideImplement/ScalaOIUtil.scala index 4bf03907646..5fb6a9038b1 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/overrideImplement/ScalaOIUtil.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/overrideImplement/ScalaOIUtil.scala @@ -125,25 +125,24 @@ object ScalaOIUtil { isImplement: Boolean, clazz: ScTemplateDefinition, editor: Editor) { - ScalaUtils - .runWriteAction( - new Runnable { - def run() { - import scala.collection.JavaConversions._ - val sortedMembers = - ScalaMemberChooser.sorted(selectedMembers, clazz) - val genInfos = sortedMembers.map(new ScalaGenerationInfo(_)) - val anchor = getAnchor(editor.getCaretModel.getOffset, clazz) - val inserted = - GenerateMembersUtil.insertMembersBeforeAnchor(clazz, - anchor.orNull, - genInfos.reverse) - inserted.headOption.foreach( - _.positionCaret(editor, toEditMethodBody = true)) - } - }, - clazz.getProject, - if (isImplement) "Implement method" else "Override method") + ScalaUtils.runWriteAction( + new Runnable { + def run() { + import scala.collection.JavaConversions._ + val sortedMembers = + ScalaMemberChooser.sorted(selectedMembers, clazz) + val genInfos = sortedMembers.map(new ScalaGenerationInfo(_)) + val anchor = getAnchor(editor.getCaretModel.getOffset, clazz) + val inserted = + GenerateMembersUtil.insertMembersBeforeAnchor(clazz, + anchor.orNull, + genInfos.reverse) + inserted.headOption.foreach( + _.positionCaret(editor, toEditMethodBody = true)) + } + }, + clazz.getProject, + if (isImplement) "Implement method" else "Override method") } def getMembersToImplement( diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/overrideImplement/ScalaOverrideMethodsHandler.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/overrideImplement/ScalaOverrideMethodsHandler.scala index 85497e28300..ae2a2c8ca79 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/overrideImplement/ScalaOverrideMethodsHandler.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/overrideImplement/ScalaOverrideMethodsHandler.scala @@ -14,8 +14,10 @@ class ScalaOverrideMethodsHandler extends LanguageCodeInsightActionHandler { def startInWriteAction: Boolean = false def invoke(project: Project, editor: Editor, file: PsiFile) { - ScalaOIUtil - .invokeOverrideImplement(project, editor, file, isImplement = false) + ScalaOIUtil.invokeOverrideImplement(project, + editor, + file, + isImplement = false) } def isValidFor(editor: Editor, file: PsiFile): Boolean = diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/project/notification/SetupScalaSdkNotificationProvider.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/project/notification/SetupScalaSdkNotificationProvider.scala index 2152f79dc2c..73dfeb8ce89 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/project/notification/SetupScalaSdkNotificationProvider.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/project/notification/SetupScalaSdkNotificationProvider.scala @@ -71,8 +71,9 @@ object SetupScalaSdkNotificationProvider { private def setupSdk(parent: JComponent, project: Project, file: PsiFile) { Option(ModuleUtilCore.findModuleForPsiElement(file)).foreach { module => - val dialog = AddSupportForSingleFrameworkDialog - .createDialog(module, new ScalaSupportProvider()) + val dialog = AddSupportForSingleFrameworkDialog.createDialog( + module, + new ScalaSupportProvider()) dialog.showAndGet() } } diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/project/notification/source/ScalaEditorFileSwapper.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/project/notification/source/ScalaEditorFileSwapper.scala index d33faad087e..e9926da58a4 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/project/notification/source/ScalaEditorFileSwapper.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/project/notification/source/ScalaEditorFileSwapper.scala @@ -52,9 +52,9 @@ class ScalaEditorFileSwapper extends EditorFileSwapper { def getFileToSwapTo(project: Project, editorWithProviderComposite: EditorWithProviderComposite) : Pair[VirtualFile, Integer] = { - Pair.create( - ScalaEditorFileSwapper - .findSourceFile(project, editorWithProviderComposite.getFile), - null) + Pair.create(ScalaEditorFileSwapper.findSourceFile( + project, + editorWithProviderComposite.getFile), + null) } } diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/project/settings/ScalaCompilerConfiguration.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/project/settings/ScalaCompilerConfiguration.scala index e2e6b35226d..8bfffd8b515 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/project/settings/ScalaCompilerConfiguration.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/project/settings/ScalaCompilerConfiguration.scala @@ -76,8 +76,8 @@ class ScalaCompilerConfiguration(project: Project) if (incrementalityType != IncrementalityType.IDEA) { val incrementalityTypeElement = new Element("option") incrementalityTypeElement.setAttribute("name", "incrementalityType") - incrementalityTypeElement - .setAttribute("value", incrementalityType.toString) + incrementalityTypeElement.setAttribute("value", + incrementalityType.toString) configurationElement.addContent(incrementalityTypeElement) } diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/scalai18n/codeInspection/i18n/ScalaI18nUtil.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/scalai18n/codeInspection/i18n/ScalaI18nUtil.scala index b949e231d51..e4e4fddaa6b 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/scalai18n/codeInspection/i18n/ScalaI18nUtil.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/scalai18n/codeInspection/i18n/ScalaI18nUtil.scala @@ -87,8 +87,8 @@ object ScalaI18nUtil { if (property == NULL) return false if (property != null) return true val annotationParams = new mutable.HashMap[String, AnyRef] - annotationParams - .put(AnnotationUtil.PROPERTY_KEY_RESOURCE_BUNDLE_PARAMETER, null) + annotationParams.put(AnnotationUtil.PROPERTY_KEY_RESOURCE_BUNDLE_PARAMETER, + null) val isI18n: Boolean = mustBePropertyKey(project, expr, annotationParams) if (!isI18n) { expr.putUserData(CACHE, NULL) @@ -462,8 +462,9 @@ object ScalaI18nUtil { @NotNull key: String, @NotNull outResourceBundle: Ref[String]): Boolean = { val annotationAttributeValues = new mutable.HashMap[String, AnyRef] - annotationAttributeValues - .put(AnnotationUtil.PROPERTY_KEY_RESOURCE_BUNDLE_PARAMETER, null) + annotationAttributeValues.put( + AnnotationUtil.PROPERTY_KEY_RESOURCE_BUNDLE_PARAMETER, + null) if (mustBePropertyKey(project, expression, annotationAttributeValues)) { annotationAttributeValues get AnnotationUtil.PROPERTY_KEY_RESOURCE_BUNDLE_PARAMETER exists { case bundleName: PsiElement => diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/scalai18n/codeInspection/i18n/ScalaInvalidPropertyKeyInspection.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/scalai18n/codeInspection/i18n/ScalaInvalidPropertyKeyInspection.scala index 2e44348ff5c..9445b3d49d9 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/scalai18n/codeInspection/i18n/ScalaInvalidPropertyKeyInspection.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/scalai18n/codeInspection/i18n/ScalaInvalidPropertyKeyInspection.scala @@ -69,8 +69,9 @@ class ScalaInvalidPropertyKeyInspection extends LocalInspectionTool { @NotNull manager: InspectionManager, @NotNull problems: util.List[ProblemDescriptor], onTheFly: Boolean) { - val description: String = CodeInsightBundle - .message("inspection.unresolved.property.key.reference.message", key) + val description: String = CodeInsightBundle.message( + "inspection.unresolved.property.key.reference.message", + key) problems.add( manager.createProblemDescriptor( expression, @@ -161,8 +162,9 @@ class ScalaInvalidPropertyKeyInspection extends LocalInspectionTool { case expressions: ScArgumentExprList if expression.getParent.getParent.isInstanceOf[ScMethodCall] => val annotationParams = new mutable.HashMap[String, AnyRef] - annotationParams - .put(AnnotationUtil.PROPERTY_KEY_RESOURCE_BUNDLE_PARAMETER, null) + annotationParams.put( + AnnotationUtil.PROPERTY_KEY_RESOURCE_BUNDLE_PARAMETER, + null) if (!ScalaI18nUtil.mustBePropertyKey(myManager.getProject, expression, annotationParams)) return diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/scalai18n/codeInspection/i18n/folding/ScalaPropertyFoldingBuilder.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/scalai18n/codeInspection/i18n/folding/ScalaPropertyFoldingBuilder.scala index db682439f51..cfac322f16a 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/scalai18n/codeInspection/i18n/folding/ScalaPropertyFoldingBuilder.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/scalai18n/codeInspection/i18n/folding/ScalaPropertyFoldingBuilder.scala @@ -54,8 +54,8 @@ class ScalaPropertyFoldingBuilder extends FoldingBuilderEx { case literal: ScLiteral => return ScalaI18nUtil.getI18nMessage(element.getProject, literal) case methodCall: ScMethodCall => - return ScalaI18nUtil - .formatMethodCallExpression(element.getProject, methodCall) + return ScalaI18nUtil.formatMethodCallExpression(element.getProject, + methodCall) case _ => } element.getText diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/script/ScalaScriptConfugurationProducer.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/script/ScalaScriptConfugurationProducer.scala index 7588fab5599..03245391d23 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/script/ScalaScriptConfugurationProducer.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/script/ScalaScriptConfugurationProducer.scala @@ -59,8 +59,8 @@ class ScalaScriptConfugurationProducer extends { .createRunConfiguration(scalaFile.name, confFactory) val conf: ScalaScriptRunConfiguration = settings.getConfiguration.asInstanceOf[ScalaScriptRunConfiguration] - val module = ModuleUtilCore - .findModuleForFile(scalaFile.getVirtualFile, scalaFile.getProject) + val module = ModuleUtilCore.findModuleForFile(scalaFile.getVirtualFile, + scalaFile.getProject) if (module == null || !module.hasScala) return null conf.setModule(module) conf.setScriptPath(scalaFile.getVirtualFile.getPath) diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/script/ScalaScriptRunConfiguration.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/script/ScalaScriptRunConfiguration.scala index 59025983ae3..aa914519862 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/script/ScalaScriptRunConfiguration.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/script/ScalaScriptRunConfiguration.scala @@ -112,8 +112,8 @@ class ScalaScriptRunConfiguration( params.setMainClass(MAIN_CLASS) params.getProgramParametersList.add("-nocompdaemon") //todo: seems to be a bug in scala compiler. Ticket #1498 params.getProgramParametersList.add("-classpath") - params - .configureByModule(module, JavaParameters.JDK_AND_CLASSES_AND_TESTS) + params.configureByModule(module, + JavaParameters.JDK_AND_CLASSES_AND_TESTS) params.getProgramParametersList.add(params.getClassPath.getPathsString) params.getClassPath.addAllFiles( module.scalaSdk.map(_.compilerClasspath).getOrElse(Seq.empty)) diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/settings/ScalaProjectSettingsUtil.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/settings/ScalaProjectSettingsUtil.scala index 1aebad46e82..15919ea8885 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/settings/ScalaProjectSettingsUtil.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/settings/ScalaProjectSettingsUtil.scala @@ -59,8 +59,8 @@ object ScalaProjectSettingsUtil { def getPackageValidator: InputValidator = new InputValidator { def checkInput(inputString: String): Boolean = { - ScalaProjectSettingsUtil - .isValidPackage(inputString, checkPlaceholder = false) + ScalaProjectSettingsUtil.isValidPackage(inputString, + checkPlaceholder = false) } def canClose(inputString: String): Boolean = { diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/spellchecker/ScLiteralExpressionTokenizer.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/spellchecker/ScLiteralExpressionTokenizer.scala index 3bf4bd436f5..9b59d317ddd 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/spellchecker/ScLiteralExpressionTokenizer.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/spellchecker/ScLiteralExpressionTokenizer.scala @@ -26,10 +26,14 @@ class ScLiteralExpressionTokenizer extends Tokenizer[ScLiteral] { consumer: TokenConsumer) { val unEscapedText: StringBuilder = new StringBuilder val offsets: Array[Int] = new Array[Int](text.length + 1) - PsiLiteralExpressionImpl - .parseStringCharacters(text, unEscapedText, offsets) - EscapeSequenceTokenizer - .processTextWithOffsets(element, consumer, unEscapedText, offsets, 1) + PsiLiteralExpressionImpl.parseStringCharacters(text, + unEscapedText, + offsets) + EscapeSequenceTokenizer.processTextWithOffsets(element, + consumer, + unEscapedText, + offsets, + 1) } def tokenize(element: ScLiteral, consumer: TokenConsumer) { diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/testingSupport/ScalaTestGenerator.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/testingSupport/ScalaTestGenerator.scala index e95165c81bf..396ce6580ff 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/testingSupport/ScalaTestGenerator.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/testingSupport/ScalaTestGenerator.scala @@ -103,8 +103,9 @@ class ScalaTestGenerator extends TestGenerator { val extendsBlock = typeDefinition.extendsBlock def addExtendsRef(refName: String) = { val (extendsToken, classParents) = - ScalaPsiElementFactory - .createClassTemplateParents(refName, typeDefinition.getManager) + ScalaPsiElementFactory.createClassTemplateParents( + refName, + typeDefinition.getManager) val extendsAdded = extendsBlock.addBefore(extendsToken, extendsBlock.getFirstChild) extendsBlock.addAfter(classParents, extendsAdded) @@ -141,8 +142,9 @@ class ScalaTestGenerator extends TestGenerator { generateAfter, typeDef, editor.getProject) - ScalaTestGenerator - .addScalaTestFeatureSpecMethods(methodsList, psiManager, body) + ScalaTestGenerator.addScalaTestFeatureSpecMethods(methodsList, + psiManager, + body) } else if (isInheritor(typeDef, "org.scalatest.FlatSpecLike") || isInheritor(typeDef, "org.scalatest.fixture.FlatSpecLike")) { ScalaTestGenerator.generateScalaTestBeforeAndAfter(generateBefore, @@ -160,8 +162,9 @@ class ScalaTestGenerator extends TestGenerator { generateAfter, typeDef, editor.getProject) - ScalaTestGenerator - .addScalaTestFreeSpecMethods(methodsList, psiManager, body) + ScalaTestGenerator.addScalaTestFreeSpecMethods(methodsList, + psiManager, + body) } else if (isInheritor(typeDef, "org.scalatest.FunSpecLike") || isInheritor(typeDef, "org.scalatest.fixture.FunSpecLike")) { ScalaTestGenerator.generateScalaTestBeforeAndAfter(generateBefore, @@ -178,16 +181,18 @@ class ScalaTestGenerator extends TestGenerator { generateAfter, typeDef, editor.getProject) - ScalaTestGenerator - .addScalaTestFunSuiteMethods(methodsList, psiManager, body) + ScalaTestGenerator.addScalaTestFunSuiteMethods(methodsList, + psiManager, + body) } else if (isInheritor(typeDef, "org.scalatest.PropSpecLike") || isInheritor(typeDef, "org.scalatest.fixture.PropSpecLike")) { ScalaTestGenerator.generateScalaTestBeforeAndAfter(generateBefore, generateAfter, typeDef, editor.getProject) - ScalaTestGenerator - .addScalaTestPropSpecMethods(methodsList, psiManager, body) + ScalaTestGenerator.addScalaTestPropSpecMethods(methodsList, + psiManager, + body) } else if (isInheritor(typeDef, "org.scalatest.WordSpecLike") || isInheritor(typeDef, "org.scalatest.fixture.WordSpecLike")) { ScalaTestGenerator.generateScalaTestBeforeAndAfter(generateBefore, @@ -502,8 +507,9 @@ object ScalaTestGenerator { GlobalSearchScope.allScope(project), ScalaPsiManager.ClassCategory.TYPE)) match { case Some(groupsTypeDef) => - ExtractSuperUtil - .addExtendsTo(typeDef, groupsTypeDef.asInstanceOf[ScTypeDefinition]) + ExtractSuperUtil.addExtendsTo( + typeDef, + groupsTypeDef.asInstanceOf[ScTypeDefinition]) val testNames = methods.map("test" + _.getMember.getName.capitalize) val closingBrace = templateBody.getLastChild val normalIndentString = FormatterUtil.getNormalIndentString(project) @@ -521,14 +527,15 @@ object ScalaTestGenerator { psiManager), closingBrace) if (methods.nonEmpty) { - templateBody - .addBefore(ScalaPsiElementFactory.createExpressionFromText( - testNames - .map("eg := ok //" + _) - .fold("\"" + className + "\" - new group {")( - _ + "\n" + _) + "\n}", - psiManager), - closingBrace) + templateBody.addBefore(ScalaPsiElementFactory + .createExpressionFromText( + testNames + .map("eg := ok //" + _) + .fold( + "\"" + className + "\" - new group {")( + _ + "\n" + _) + "\n}", + psiManager), + closingBrace) } case _ => } @@ -557,8 +564,9 @@ object ScalaTestGenerator { project: Project) { val normalIndentString = FormatterUtil.getNormalIndentString(project) templateBody.addBefore( - ScalaPsiElementFactory - .createElement("val tests = TestSuite{}", psiManager, Def.parse(_)), + ScalaPsiElementFactory.createElement("val tests = TestSuite{}", + psiManager, + Def.parse(_)), templateBody.getLastChild) if (methods.nonEmpty) { templateBody.addBefore( diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/testingSupport/test/AbstractTestConfigurationProducer.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/testingSupport/test/AbstractTestConfigurationProducer.scala index f077df012d4..709afe75e4e 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/testingSupport/test/AbstractTestConfigurationProducer.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/testingSupport/test/AbstractTestConfigurationProducer.scala @@ -25,8 +25,9 @@ trait AbstractTestConfigurationProducer { : Option[(PsiElement, RunnerAndConfigurationSettings)] = { if (context.getModule == null) return null val scope: GlobalSearchScope = - GlobalSearchScope - .moduleWithDependenciesAndLibrariesScope(context.getModule, true) + GlobalSearchScope.moduleWithDependenciesAndLibrariesScope( + context.getModule, + true) if (suitePaths.forall( suitePath => ScalaPsiManager diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/testingSupport/test/AbstractTestRerunFailedTestsAction.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/testingSupport/test/AbstractTestRerunFailedTestsAction.scala index 2a0453a7fe6..aca7e62da10 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/testingSupport/test/AbstractTestRerunFailedTestsAction.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/testingSupport/test/AbstractTestRerunFailedTestsAction.scala @@ -42,8 +42,7 @@ class AbstractTestRerunFailedTestsAction( def getModules: Array[Module] = configuration.getModules def getTestName(failed: AbstractTestProxy): String = { - failed - .getLocation(getProject, GlobalSearchScope.allScope(getProject)) match { + failed.getLocation(getProject, GlobalSearchScope.allScope(getProject)) match { case PsiLocationWithName(_, _, testName) => testName case _ => failed.getName } diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/testingSupport/test/AbstractTestRunConfiguration.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/testingSupport/test/AbstractTestRunConfiguration.scala index f4c1d6442bf..3b5d7e33f01 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/testingSupport/test/AbstractTestRunConfiguration.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/testingSupport/test/AbstractTestRunConfiguration.scala @@ -443,8 +443,10 @@ abstract class AbstractTestRunConfiguration( //expand environment variables in vmParams for (entry <- params.getEnv.entrySet) { - vmParams = StringUtil - .replace(vmParams, "$" + entry.getKey + "$", entry.getValue, false) + vmParams = StringUtil.replace(vmParams, + "$" + entry.getKey + "$", + entry.getValue, + false) } params.getVMParametersList.addParametersString(vmParams) @@ -602,8 +604,10 @@ abstract class AbstractTestRunConfiguration( consoleProperties.setIdBasedTestTree(true) // console view - val consoleView = SMTestRunnerConnectionUtil - .createAndAttachConsole("Scala", processHandler, consoleProperties) + val consoleView = SMTestRunnerConnectionUtil.createAndAttachConsole( + "Scala", + processHandler, + consoleProperties) val res = new DefaultExecutionResult( consoleView, @@ -646,8 +650,9 @@ abstract class AbstractTestRunConfiguration( "testKind", if (testKind != null) testKind.toString else TestKind.CLASS.toString) - JDOMExternalizer - .write(element, "showProgressMessages", showProgressMessages.toString) + JDOMExternalizer.write(element, + "showProgressMessages", + showProgressMessages.toString) JDOMExternalizer.writeMap(element, envs, "envs", "envVar") PathMacroManager.getInstance(getProject).collapsePathsRecursively(element) } diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/testingSupport/test/scalatest/ScalaTestConfigurationProducer.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/testingSupport/test/scalatest/ScalaTestConfigurationProducer.scala index 83fc3a57e86..7a7acd63f75 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/testingSupport/test/scalatest/ScalaTestConfigurationProducer.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/testingSupport/test/scalatest/ScalaTestConfigurationProducer.scala @@ -117,8 +117,8 @@ class ScalaTestConfigurationProducer extends { if (element.isInstanceOf[PsiPackage] || element.isInstanceOf[PsiDirectory]) { if (!configuration.isInstanceOf[ScalaTestRunConfiguration]) return false - return TestConfigurationUtil - .isPackageConfiguration(element, configuration) + return TestConfigurationUtil.isPackageConfiguration(element, + configuration) } val (testClass, testName) = getLocationClassAndTest(location) if (testClass == null) return false @@ -477,8 +477,9 @@ class ScalaTestConfigurationProducer extends { var call = _call while (call != null) { val checkInfixResult2 = checkInfix( - PsiTreeUtil - .getParentOfType(call, classOf[MethodInvocation], true), + PsiTreeUtil.getParentOfType(call, + classOf[MethodInvocation], + true), Map("when" -> wfqn, "that" -> ifqn, "should" -> shouldFqn2, @@ -486,8 +487,9 @@ class ScalaTestConfigurationProducer extends { "can" -> canFqn2), checkFirstArgIsUnitOrString = true) lazy val checkInfixResult = checkInfix( - PsiTreeUtil - .getParentOfType(call, classOf[MethodInvocation], true), + PsiTreeUtil.getParentOfType(call, + classOf[MethodInvocation], + true), Map("when" -> wfqn, "that" -> ifqn, "should" -> shouldFqn, @@ -739,8 +741,9 @@ class ScalaTestConfigurationProducer extends { def checkJUnit3Suite(fqn: String): Option[String] = { if (!isInheritor(clazz, fqn)) return None - var fun = PsiTreeUtil - .getParentOfType(element, classOf[ScFunctionDefinition], false) + var fun = PsiTreeUtil.getParentOfType(element, + classOf[ScFunctionDefinition], + false) while (fun != null) { if (fun.getParent.isInstanceOf[ScTemplateBody] && fun.containingClass == clazz) { @@ -756,8 +759,9 @@ class ScalaTestConfigurationProducer extends { def checkAnnotatedSuite(fqn: String, annot: String): Option[String] = { if (!isInheritor(clazz, fqn)) return None - var fun = PsiTreeUtil - .getParentOfType(element, classOf[ScFunctionDefinition], false) + var fun = PsiTreeUtil.getParentOfType(element, + classOf[ScFunctionDefinition], + false) while (fun != null) { if (fun.getParent.isInstanceOf[ScTemplateBody] && fun.containingClass == clazz) { diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/testingSupport/test/specs2/Specs2ConfigurationProducer.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/testingSupport/test/specs2/Specs2ConfigurationProducer.scala index 81783b35808..e90f07d3513 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/testingSupport/test/specs2/Specs2ConfigurationProducer.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/testingSupport/test/specs2/Specs2ConfigurationProducer.scala @@ -113,8 +113,8 @@ with AbstractTestConfigurationProducer { if (element.isInstanceOf[PsiPackage] || element.isInstanceOf[PsiDirectory]) { if (!configuration.isInstanceOf[Specs2RunConfiguration]) return false - return TestConfigurationUtil - .isPackageConfiguration(element, configuration) + return TestConfigurationUtil.isPackageConfiguration(element, + configuration) } val parent: ScTypeDefinition = PsiTreeUtil.getParentOfType(element, classOf[ScTypeDefinition], false) diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/testingSupport/test/utest/UTestConfigurationProducer.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/testingSupport/test/utest/UTestConfigurationProducer.scala index 68b91d86ca7..2fcd5c75b74 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/testingSupport/test/utest/UTestConfigurationProducer.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/testingSupport/test/utest/UTestConfigurationProducer.scala @@ -45,8 +45,8 @@ with AbstractTestConfigurationProducer { if (element.isInstanceOf[PsiPackage] || element.isInstanceOf[PsiDirectory]) { if (!configuration.isInstanceOf[UTestRunConfiguration]) return false - return TestConfigurationUtil - .isPackageConfiguration(element, configuration) + return TestConfigurationUtil.isPackageConfiguration(element, + configuration) } val (testClass, testClassName) = getLocationClassAndTest(location) if (testClass == null) return false @@ -195,8 +195,9 @@ with AbstractTestConfigurationProducer { PsiTreeUtil.getParentOfType(containingObject, classOf[ScTypeDefinition], true) != null) { - containingObject = PsiTreeUtil - .getParentOfType(containingObject, classOf[ScTypeDefinition], true) + containingObject = PsiTreeUtil.getParentOfType(containingObject, + classOf[ScTypeDefinition], + true) } if (!containingObject.isInstanceOf[ScObject]) return fail if (!suitePaths.exists(suitePath => diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/testingSupport/test/utest/UTestRunConfiguration.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/testingSupport/test/utest/UTestRunConfiguration.scala index 3558bb09c80..8cbec7743b8 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/testingSupport/test/utest/UTestRunConfiguration.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/testingSupport/test/utest/UTestRunConfiguration.scala @@ -27,8 +27,9 @@ class UTestRunConfiguration( @tailrec private def getClassPath(currentClass: ScTypeDefinition, acc: String = ""): String = { - val parentTypeDef = PsiTreeUtil - .getParentOfType(currentClass, classOf[ScTypeDefinition], true) + val parentTypeDef = PsiTreeUtil.getParentOfType(currentClass, + classOf[ScTypeDefinition], + true) if (parentTypeDef == null) { currentClass.qualifiedName + acc } else { diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/util/IntentionUtils.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/util/IntentionUtils.scala index 05563082b29..2a02ba5eb9f 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/util/IntentionUtils.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/util/IntentionUtils.scala @@ -143,8 +143,9 @@ object IntentionUtils { if (parent != null && parent.isInstanceOf[ScPrefixExpr] && parent.asInstanceOf[ScPrefixExpr].operation.getText == "!") { - val newExpr = ScalaPsiElementFactory - .createExpressionFromText(buf.toString(), manager) + val newExpr = ScalaPsiElementFactory.createExpressionFromText( + buf.toString(), + manager) val size = newExpr match { case infix: ScInfixExpr => @@ -156,8 +157,9 @@ object IntentionUtils { (parent.asInstanceOf[ScPrefixExpr], newExpr, size) } else { buf.insert(0, "!(").append(")") - val newExpr = ScalaPsiElementFactory - .createExpressionFromText(buf.toString(), manager) + val newExpr = ScalaPsiElementFactory.createExpressionFromText( + buf.toString(), + manager) val children = newExpr .asInstanceOf[ScPrefixExpr] @@ -299,8 +301,11 @@ object IntentionUtils { PsiDocumentManager.getInstance(project).commitAllDocuments() GoToImplicitConversionAction.getPopup.dispose() if (selectedValue == MakeExplicitAction.MAKE_EXPLICIT) - IntentionUtils - .replaceWithExplicit(expr, function, project, editor, secondPart) + IntentionUtils.replaceWithExplicit(expr, + function, + project, + editor, + secondPart) if (selectedValue == MakeExplicitAction.MAKE_EXPLICIT_STATICALLY) IntentionUtils.replaceWithExplicitStatically(expr, function, diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/util/MultilineStringUtil.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/util/MultilineStringUtil.scala index 885c5e5071d..a39bf561b29 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/util/MultilineStringUtil.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/util/MultilineStringUtil.scala @@ -304,8 +304,9 @@ class MultilineStringSettings(project: Project) { def getSmartSpaces(count: Int) = if (useTabs) { - StringUtil.repeat("\t", count / tabSize) + StringUtil - .repeat(" ", count % tabSize) + StringUtil.repeat("\t", count / tabSize) + StringUtil.repeat( + " ", + count % tabSize) } else { StringUtil.repeat(" ", count) } diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/util/ScEquivalenceUtil.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/util/ScEquivalenceUtil.scala index fe0cecf9473..4749a0aeae7 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/util/ScEquivalenceUtil.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/util/ScEquivalenceUtil.scala @@ -23,11 +23,9 @@ object ScEquivalenceUtil { if (clazz1.qualifiedName != clazz2.qualifiedName) return false val isSomeClassLocalOrAnonymous = clazz1.qualifiedName == null || clazz2.qualifiedName == null || - (PsiTreeUtil - .getContextOfType(clazz1, true, classOf[PsiClass]) != null && + (PsiTreeUtil.getContextOfType(clazz1, true, classOf[PsiClass]) != null && clazz1.getContainingClass == null) || - (PsiTreeUtil - .getContextOfType(clazz2, true, classOf[PsiClass]) != null && + (PsiTreeUtil.getContextOfType(clazz2, true, classOf[PsiClass]) != null && clazz2.getContainingClass == null) if (isSomeClassLocalOrAnonymous) return false diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/util/macroDebug/CleanMacrosheetAction.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/util/macroDebug/CleanMacrosheetAction.scala index 9979d4b57ff..a08dce70f7b 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/util/macroDebug/CleanMacrosheetAction.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/util/macroDebug/CleanMacrosheetAction.scala @@ -55,8 +55,10 @@ class CleanMacrosheetAction() extends AnAction with TopComponentAction { inWriteAction { CleanWorksheetAction.resetScrollModel(viewer) - CleanWorksheetAction - .cleanWorksheet(psiFile.getNode, editor, viewer, e.getProject) + CleanWorksheetAction.cleanWorksheet(psiFile.getNode, + editor, + viewer, + e.getProject) parent.remove(splitPane) parent.add(editor.getComponent, BorderLayout.CENTER) diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/util/macroDebug/RunMacrosheetAction.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/util/macroDebug/RunMacrosheetAction.scala index 5a366c8b3cb..5c1058baa6c 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/util/macroDebug/RunMacrosheetAction.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/util/macroDebug/RunMacrosheetAction.scala @@ -59,8 +59,10 @@ class RunMacrosheetAction extends AnAction with TopComponentAction { override def run() { extensions.inWriteAction { CleanWorksheetAction.resetScrollModel(viewer) - CleanWorksheetAction - .cleanWorksheet(file.getNode, editor, viewer, project) + CleanWorksheetAction.cleanWorksheet(file.getNode, + editor, + viewer, + project) } } }, ModalityState.any()) diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/util/macroDebug/ScalaMacroDebuggingUtil.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/util/macroDebug/ScalaMacroDebuggingUtil.scala index a9b9f2dbc03..ef640c718f0 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/util/macroDebug/ScalaMacroDebuggingUtil.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/util/macroDebug/ScalaMacroDebuggingUtil.scala @@ -54,8 +54,9 @@ object ScalaMacroDebuggingUtil { import scala.collection.JavaConversions._ if (!isEnabled) return - val file = VfsUtil - .findFileByIoFile(new File(fileName stripPrefix MACRO_SIGN_PREFIX), true) + val file = VfsUtil.findFileByIoFile( + new File(fileName stripPrefix MACRO_SIGN_PREFIX), + true) val dataStream = SYNTHETIC_SOURCE_ATTRIBUTE writeAttribute file code foreach (dataStream writeUTF _.stripPrefix(MACRO_SIGN_PREFIX)) diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/worksheet/actions/CleanWorksheetAction.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/worksheet/actions/CleanWorksheetAction.scala index 30b10c16ac8..828bbf6d516 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/worksheet/actions/CleanWorksheetAction.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/worksheet/actions/CleanWorksheetAction.scala @@ -51,8 +51,10 @@ class CleanWorksheetAction() extends AnAction with TopComponentAction { inWriteAction { CleanWorksheetAction.resetScrollModel(viewer) - CleanWorksheetAction - .cleanWorksheet(psiFile.getNode, editor, viewer, project) + CleanWorksheetAction.cleanWorksheet(psiFile.getNode, + editor, + viewer, + project) parent.remove(splitPane) parent.add(editor.getComponent, BorderLayout.CENTER) diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/worksheet/actions/RunWorksheetAction.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/worksheet/actions/RunWorksheetAction.scala index 0fb6e94b949..32e721d70d2 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/worksheet/actions/RunWorksheetAction.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/worksheet/actions/RunWorksheetAction.scala @@ -109,8 +109,10 @@ object RunWorksheetAction { scala.extensions.inWriteAction { CleanWorksheetAction.resetScrollModel(viewer) if (!auto) - CleanWorksheetAction - .cleanWorksheet(file.getNode, editor, viewer, project) + CleanWorksheetAction.cleanWorksheet(file.getNode, + editor, + viewer, + project) } } }, ModalityState.any()) diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/worksheet/actions/WorksheetFileHook.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/worksheet/actions/WorksheetFileHook.scala index 8673b4011d9..35380c18b5f 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/worksheet/actions/WorksheetFileHook.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/worksheet/actions/WorksheetFileHook.scala @@ -215,8 +215,11 @@ class WorksheetFileHook(private val project: Project) if (splitter != null) { splitter setProportion ratio - WorksheetFoldGroup - .load(viewer, ext, project, splitter, scalaFile) + WorksheetFoldGroup.load(viewer, + ext, + project, + splitter, + scalaFile) } } case _ => diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/worksheet/processor/WorksheetCompiler.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/worksheet/processor/WorksheetCompiler.scala index dd48db789c9..c8ff0cf7256 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/worksheet/processor/WorksheetCompiler.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/worksheet/processor/WorksheetCompiler.scala @@ -61,8 +61,7 @@ class WorksheetCompiler { val oldContent = contentManager findContent ERROR_CONTENT_NAME if (oldContent != null) contentManager.removeContent(oldContent, true) - WorksheetSourceProcessor - .process(worksheetFile, ifEditor, iteration) match { + WorksheetSourceProcessor.process(worksheetFile, ifEditor, iteration) match { case Left((code, name)) => FileUtil.writeToFile(tempFile, code) diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/worksheet/processor/WorksheetPerFileConfig.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/worksheet/processor/WorksheetPerFileConfig.scala index 32bc7d70663..18c09bd8a70 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/worksheet/processor/WorksheetPerFileConfig.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/worksheet/processor/WorksheetPerFileConfig.scala @@ -16,7 +16,8 @@ trait WorksheetPerFileConfig { FileAttributeUtilCache.readAttribute(attribute, file).contains("enabled") def setEnabled(file: PsiFile, attribute: FileAttribute, e: Boolean) { - FileAttributeUtilCache - .writeAttribute(attribute, file, if (e) enabled else disabled) + FileAttributeUtilCache.writeAttribute(attribute, + file, + if (e) enabled else disabled) } } diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/worksheet/ui/WorksheetEditorPrinter.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/worksheet/ui/WorksheetEditorPrinter.scala index 71d55729ffb..dd981270156 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/worksheet/ui/WorksheetEditorPrinter.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/worksheet/ui/WorksheetEditorPrinter.scala @@ -473,21 +473,23 @@ object WorksheetEditorPrinter { def saveWorksheetEvaluation(file: ScalaFile, result: String, ratio: Float = 0.5f) { - FileAttributeUtilCache - .writeAttribute(LAST_WORKSHEET_RUN_RESULT, file, result) - FileAttributeUtilCache - .writeAttribute(LAST_WORKSHEET_RUN_RATIO, file, ratio.toString) + FileAttributeUtilCache.writeAttribute(LAST_WORKSHEET_RUN_RESULT, + file, + result) + FileAttributeUtilCache.writeAttribute(LAST_WORKSHEET_RUN_RATIO, + file, + ratio.toString) } def saveOnlyRatio(file: ScalaFile, ratio: Float = 0.5f) { - FileAttributeUtilCache - .writeAttribute(LAST_WORKSHEET_RUN_RATIO, file, ratio.toString) + FileAttributeUtilCache.writeAttribute(LAST_WORKSHEET_RUN_RATIO, + file, + ratio.toString) } def loadWorksheetEvaluation(file: ScalaFile): Option[(String, Float)] = { val ratio = - FileAttributeUtilCache - .readAttribute(LAST_WORKSHEET_RUN_RATIO, file) map { + FileAttributeUtilCache.readAttribute(LAST_WORKSHEET_RUN_RATIO, file) map { case rr => try { java.lang.Float.parseFloat(rr) @@ -503,8 +505,9 @@ object WorksheetEditorPrinter { def deleteWorksheetEvaluation(file: ScalaFile) { FileAttributeUtilCache.writeAttribute(LAST_WORKSHEET_RUN_RESULT, file, "") - FileAttributeUtilCache - .writeAttribute(LAST_WORKSHEET_RUN_RATIO, file, 0.5f.toString) + FileAttributeUtilCache.writeAttribute(LAST_WORKSHEET_RUN_RATIO, + file, + 0.5f.toString) } def newWorksheetUiFor(editor: Editor, virtualFile: VirtualFile) = diff --git a/repos/intellij-scala/src/org/jetbrains/plugins/scala/worksheet/ui/WorksheetFoldGroup.scala b/repos/intellij-scala/src/org/jetbrains/plugins/scala/worksheet/ui/WorksheetFoldGroup.scala index 3b96585a1d9..4f265a0230a 100644 --- a/repos/intellij-scala/src/org/jetbrains/plugins/scala/worksheet/ui/WorksheetFoldGroup.scala +++ b/repos/intellij-scala/src/org/jetbrains/plugins/scala/worksheet/ui/WorksheetFoldGroup.scala @@ -188,8 +188,9 @@ object WorksheetFoldGroup { def save(file: ScalaFile, group: WorksheetFoldGroup) { val virtualFile = file.getVirtualFile if (!virtualFile.isValid) return - FileAttributeUtilCache - .writeAttribute(WORKSHEET_PERSISTENT_FOLD_KEY, file, group.serialize()) + FileAttributeUtilCache.writeAttribute(WORKSHEET_PERSISTENT_FOLD_KEY, + file, + group.serialize()) } def load(viewerEditor: Editor, diff --git a/repos/intellij-scala/src/org/jetbrains/sbt/execution/SbtOrderEnumeratorHandler.scala b/repos/intellij-scala/src/org/jetbrains/sbt/execution/SbtOrderEnumeratorHandler.scala index b564972c772..d3349580caf 100644 --- a/repos/intellij-scala/src/org/jetbrains/sbt/execution/SbtOrderEnumeratorHandler.scala +++ b/repos/intellij-scala/src/org/jetbrains/sbt/execution/SbtOrderEnumeratorHandler.scala @@ -52,7 +52,7 @@ class SbtOrderEnumeratorHandlerFactory } override def isApplicable(module: Module): Boolean = { - ExternalSystemApiUtil - .isExternalSystemAwareModule(SbtProjectSystem.Id, module) + ExternalSystemApiUtil.isExternalSystemAwareModule(SbtProjectSystem.Id, + module) } } diff --git a/repos/intellij-scala/src/org/jetbrains/sbt/language/SbtFileImpl.scala b/repos/intellij-scala/src/org/jetbrains/sbt/language/SbtFileImpl.scala index 1c1d91c2d3d..dbd97f90b90 100644 --- a/repos/intellij-scala/src/org/jetbrains/sbt/language/SbtFileImpl.scala +++ b/repos/intellij-scala/src/org/jetbrains/sbt/language/SbtFileImpl.scala @@ -33,10 +33,14 @@ class SbtFileImpl(provider: FileViewProvider) state: ResolveState, lastParent: PsiElement, place: PsiElement): Boolean = - super[ScalaFileImpl] - .processDeclarations(processor, state, lastParent, place) && - super[ScDeclarationSequenceHolder] - .processDeclarations(processor, state, lastParent, place) && + super[ScalaFileImpl].processDeclarations(processor, + state, + lastParent, + place) && + super[ScDeclarationSequenceHolder].processDeclarations(processor, + state, + lastParent, + place) && processImplicitImports(processor, state, lastParent, place) private def processImplicitImports(processor: PsiScopeProcessor, diff --git a/repos/intellij-scala/src/org/jetbrains/sbt/project/SbtProjectResolver.scala b/repos/intellij-scala/src/org/jetbrains/sbt/project/SbtProjectResolver.scala index 943b00a1a21..f5a52f44274 100644 --- a/repos/intellij-scala/src/org/jetbrains/sbt/project/SbtProjectResolver.scala +++ b/repos/intellij-scala/src/org/jetbrains/sbt/project/SbtProjectResolver.scala @@ -411,8 +411,8 @@ class SbtProjectResolver buildRoot / Sbt.ProjectDirectory / Sbt.TargetDirectory) result.storePaths(ExternalSystemSourceType.SOURCE, sourceDirs.map(_.path)) - result - .storePaths(ExternalSystemSourceType.EXCLUDED, exludedDirs.map(_.path)) + result.storePaths(ExternalSystemSourceType.EXCLUDED, + exludedDirs.map(_.path)) result } @@ -496,8 +496,8 @@ class SbtProjectResolver private def addApklibDirs(contentRootNode: ContentRootNode, apklib: sbtStructure.ApkLib): Unit = { - contentRootNode - .storePath(ExternalSystemSourceType.SOURCE, apklib.sources.canonicalPath) + contentRootNode.storePath(ExternalSystemSourceType.SOURCE, + apklib.sources.canonicalPath) contentRootNode.storePath(ExternalSystemSourceType.SOURCE_GENERATED, apklib.gen.canonicalPath) contentRootNode.storePath(ExternalSystemSourceType.RESOURCE, diff --git a/repos/intellij-scala/src/org/jetbrains/sbt/project/module/SbtModuleSettingsEditor.scala b/repos/intellij-scala/src/org/jetbrains/sbt/project/module/SbtModuleSettingsEditor.scala index 4cfeaffd9ab..ccc37a12a6c 100644 --- a/repos/intellij-scala/src/org/jetbrains/sbt/project/module/SbtModuleSettingsEditor.scala +++ b/repos/intellij-scala/src/org/jetbrains/sbt/project/module/SbtModuleSettingsEditor.scala @@ -42,8 +42,8 @@ class SbtModuleSettingsEditor(state: ModuleConfigurationState) def createComponentImpl() = { myForm.sbtImportsList.setEmptyText( SbtBundle("sbt.settings.noImplicitImportsFound")) - JListCompatibility - .setModel(myForm.sbtImportsList, modelWrapper.getModelRaw) + JListCompatibility.setModel(myForm.sbtImportsList, + modelWrapper.getModelRaw) myForm.updateButton.addActionListener(new ActionListener { override def actionPerformed(e: ActionEvent): Unit = { diff --git a/repos/intellij-scala/src/org/jetbrains/sbt/project/notifications/SbtNoImportNotificationProvider.scala b/repos/intellij-scala/src/org/jetbrains/sbt/project/notifications/SbtNoImportNotificationProvider.scala index 8383068b03a..904cc3e61a3 100644 --- a/repos/intellij-scala/src/org/jetbrains/sbt/project/notifications/SbtNoImportNotificationProvider.scala +++ b/repos/intellij-scala/src/org/jetbrains/sbt/project/notifications/SbtNoImportNotificationProvider.scala @@ -36,13 +36,13 @@ class SbtNoImportNotificationProvider(project: Project, notifications.updateAllNotifications() } }) - panel - .createActionLabel(SbtBundle("sbt.notification.ignore"), new Runnable { - override def run() = { - ignoreFile(file) - notifications.updateAllNotifications() - } - }) + panel.createActionLabel(SbtBundle("sbt.notification.ignore"), + new Runnable { + override def run() = { + ignoreFile(file) + notifications.updateAllNotifications() + } + }) panel } } diff --git a/repos/intellij-scala/src/org/jetbrains/sbt/project/notifications/SbtReImportNotificationProvider.scala b/repos/intellij-scala/src/org/jetbrains/sbt/project/notifications/SbtReImportNotificationProvider.scala index 80a7d76f7ab..ac65e59b1c2 100644 --- a/repos/intellij-scala/src/org/jetbrains/sbt/project/notifications/SbtReImportNotificationProvider.scala +++ b/repos/intellij-scala/src/org/jetbrains/sbt/project/notifications/SbtReImportNotificationProvider.scala @@ -60,13 +60,13 @@ class SbtReImportNotificationProvider(project: Project, notifications.updateAllNotifications() } }) - panel - .createActionLabel(SbtBundle("sbt.notification.ignore"), new Runnable { - override def run() = { - ignoreFile(file) - notifications.updateAllNotifications() - } - }) + panel.createActionLabel(SbtBundle("sbt.notification.ignore"), + new Runnable { + override def run() = { + ignoreFile(file) + notifications.updateAllNotifications() + } + }) panel } } diff --git a/repos/intellij-scala/src/org/jetbrains/sbt/project/template/activator/ActivatorCachedRepoProcessor.scala b/repos/intellij-scala/src/org/jetbrains/sbt/project/template/activator/ActivatorCachedRepoProcessor.scala index 1875dda03b0..d24a8d9e437 100644 --- a/repos/intellij-scala/src/org/jetbrains/sbt/project/template/activator/ActivatorCachedRepoProcessor.scala +++ b/repos/intellij-scala/src/org/jetbrains/sbt/project/template/activator/ActivatorCachedRepoProcessor.scala @@ -160,8 +160,9 @@ class ActivatorCachedRepoProcessor extends ProjectComponent { } } - ActivatorRepoProcessor - .downloadTemplateFromRepo(templateId, pathTo, myOnError) + ActivatorRepoProcessor.downloadTemplateFromRepo(templateId, + pathTo, + myOnError) workOffline = hasError if (!workOffline) cacheFile(pathTo, cachedTemplate) } diff --git a/repos/intellij-scala/src/org/jetbrains/sbt/resolvers/SbtResolver.scala b/repos/intellij-scala/src/org/jetbrains/sbt/resolvers/SbtResolver.scala index d0abf4688a6..4c7f71e4d57 100644 --- a/repos/intellij-scala/src/org/jetbrains/sbt/resolvers/SbtResolver.scala +++ b/repos/intellij-scala/src/org/jetbrains/sbt/resolvers/SbtResolver.scala @@ -25,8 +25,9 @@ object SbtResolver { def localCacheResolver(localCachePath: Option[String]) = { val defaultPath = - System.getProperty("user.home") + "/.ivy2/cache" - .replace('/', File.separatorChar) + System.getProperty("user.home") + "/.ivy2/cache".replace( + '/', + File.separatorChar) SbtResolver(Kind.Ivy, "Local cache", localCachePath getOrElse defaultPath) } diff --git a/repos/intellij-scala/src/org/jetbrains/sbt/resolvers/SbtResolverIndexesManager.scala b/repos/intellij-scala/src/org/jetbrains/sbt/resolvers/SbtResolverIndexesManager.scala index 4d96167df28..9332644e989 100644 --- a/repos/intellij-scala/src/org/jetbrains/sbt/resolvers/SbtResolverIndexesManager.scala +++ b/repos/intellij-scala/src/org/jetbrains/sbt/resolvers/SbtResolverIndexesManager.scala @@ -45,8 +45,9 @@ class SbtResolverIndexesManager(val testIndexesDir: Option[File]) def add(resolver: SbtResolver) = find(resolver) match { case Some(index) => index case None => - val newIndex = SbtResolverIndex - .create(resolver.kind, resolver.root, getIndexDirectory(resolver.root)) + val newIndex = SbtResolverIndex.create(resolver.kind, + resolver.root, + getIndexDirectory(resolver.root)) indexes.add(newIndex) newIndex } diff --git a/repos/intellij-scala/src/org/jetbrains/sbt/runner/SbtRunConfiguration.scala b/repos/intellij-scala/src/org/jetbrains/sbt/runner/SbtRunConfiguration.scala index 3bb27eb8e45..e9a9d05e063 100644 --- a/repos/intellij-scala/src/org/jetbrains/sbt/runner/SbtRunConfiguration.scala +++ b/repos/intellij-scala/src/org/jetbrains/sbt/runner/SbtRunConfiguration.scala @@ -68,8 +68,8 @@ class SbtRunConfiguration(val project: Project, super.writeExternal(element) JDOMExternalizer.write(element, "tasks", getTasks) JDOMExternalizer.write(element, "vmparams", getJavaOptions) - EnvironmentVariablesComponent - .writeExternal(element, getEnvironmentVariables) + EnvironmentVariablesComponent.writeExternal(element, + getEnvironmentVariables) } override def readExternal(element: Element) { @@ -109,8 +109,8 @@ class SbtRunConfiguration(val project: Project, try { jdk.getSdkType match { case sdkType: AndroidSdkType => - envirnomentVariables - .put("ANDROID_HOME", jdk.getSdkModificator.getHomePath) + envirnomentVariables.put("ANDROID_HOME", + jdk.getSdkModificator.getHomePath) case _ => // do nothing } } catch { diff --git a/repos/intellij-scala/test/org/jetbrains/plugins/hocon/HoconEditorActionTest.scala b/repos/intellij-scala/test/org/jetbrains/plugins/hocon/HoconEditorActionTest.scala index cd9c6e738f2..5ad0a8101fe 100644 --- a/repos/intellij-scala/test/org/jetbrains/plugins/hocon/HoconEditorActionTest.scala +++ b/repos/intellij-scala/test/org/jetbrains/plugins/hocon/HoconEditorActionTest.scala @@ -13,8 +13,9 @@ abstract class HoconEditorActionTest(actionId: String, subpath: String) assert(actionHandler != null) inWriteCommandAction { - actionHandler - .execute(editor, editor.getCaretModel.getCurrentCaret, dataContext) + actionHandler.execute(editor, + editor.getCaretModel.getCurrentCaret, + dataContext) } } diff --git a/repos/intellij-scala/test/org/jetbrains/plugins/scala/LightScalaTestCase.scala b/repos/intellij-scala/test/org/jetbrains/plugins/scala/LightScalaTestCase.scala index c6c778bce50..1c409cf2c07 100644 --- a/repos/intellij-scala/test/org/jetbrains/plugins/scala/LightScalaTestCase.scala +++ b/repos/intellij-scala/test/org/jetbrains/plugins/scala/LightScalaTestCase.scala @@ -49,8 +49,8 @@ object LightScalaTestCase { JarFileSystem.getInstance.refreshAndFindFileByPath(scalaLib) modifiableModel.addRoot(scalaJar, OrderRootType.CLASSES) val srcRoot = new File(TestUtils.getScalaLibrarySrc) - modifiableModel - .addRoot(VfsUtil.getUrlForLibraryRoot(srcRoot), OrderRootType.SOURCES) + modifiableModel.addRoot(VfsUtil.getUrlForLibraryRoot(srcRoot), + OrderRootType.SOURCES) // do not forget to commit a model! modifiableModel.commit() } diff --git a/repos/intellij-scala/test/org/jetbrains/plugins/scala/annotator/InterpolatedStringsAnnotatorTest.scala b/repos/intellij-scala/test/org/jetbrains/plugins/scala/annotator/InterpolatedStringsAnnotatorTest.scala index 721258aad4c..ef39a8068a3 100644 --- a/repos/intellij-scala/test/org/jetbrains/plugins/scala/annotator/InterpolatedStringsAnnotatorTest.scala +++ b/repos/intellij-scala/test/org/jetbrains/plugins/scala/annotator/InterpolatedStringsAnnotatorTest.scala @@ -27,8 +27,8 @@ class InterpolatedStringsAnnotatorTest private def messageExists(text: String, message: String) { val annotatorMessages = collectAnnotatorMessages(text) if (!annotatorMessages.exists(_.toString == message)) { - Assert - .assertTrue("annotator messages is empty", annotatorMessages.nonEmpty) + Assert.assertTrue("annotator messages is empty", + annotatorMessages.nonEmpty) Assert.assertEquals(message, annotatorMessages.head) } } diff --git a/repos/intellij-scala/test/org/jetbrains/plugins/scala/annotator/PatternAnnotatorTest.scala b/repos/intellij-scala/test/org/jetbrains/plugins/scala/annotator/PatternAnnotatorTest.scala index 8c457dde2d9..56167daa789 100644 --- a/repos/intellij-scala/test/org/jetbrains/plugins/scala/annotator/PatternAnnotatorTest.scala +++ b/repos/intellij-scala/test/org/jetbrains/plugins/scala/annotator/PatternAnnotatorTest.scala @@ -17,13 +17,15 @@ class PatternAnnotatorTest private def fruitless(exprType: String, patType: String) = ScalaBundle.message("fruitless.type.test", exprType, patType) private def incompatible(exprType: String, patType: String) = - ScalaBundle - .message("scrutinee.incompatible.pattern.type", exprType, patType) + ScalaBundle.message("scrutinee.incompatible.pattern.type", + exprType, + patType) private def cannotBeUsed(typeText: String) = s"type $typeText cannot be used in a type pattern or isInstanceOf test" private def patternTypeIncompatible(found: String, required: String) = - ScalaBundle - .message("pattern.type.incompatible.with.expected", found, required) + ScalaBundle.message("pattern.type.incompatible.with.expected", + found, + required) private def constructorCannotBeInstantiated(found: String, required: String) = ScalaBundle.message("constructor.cannot.be.instantiated.to.expected.type", @@ -476,8 +478,9 @@ class PatternAnnotatorTest checkError( code, "foo appliedTo \"\"", - ScalaBundle - .message("wrong.number.arguments.extractor.unapplySeq", "2", "3")) + ScalaBundle.message("wrong.number.arguments.extractor.unapplySeq", + "2", + "3")) } def testNumberOfArgumentsCons(): Unit = { diff --git a/repos/intellij-scala/test/org/jetbrains/plugins/scala/annotator/PatternDefinitionAnnotatorTest.scala b/repos/intellij-scala/test/org/jetbrains/plugins/scala/annotator/PatternDefinitionAnnotatorTest.scala index dd965a2f932..862fceb1e54 100644 --- a/repos/intellij-scala/test/org/jetbrains/plugins/scala/annotator/PatternDefinitionAnnotatorTest.scala +++ b/repos/intellij-scala/test/org/jetbrains/plugins/scala/annotator/PatternDefinitionAnnotatorTest.scala @@ -67,8 +67,9 @@ class PatternDefinitionAnnotatorTest extends SimpleTestCase { val annotator = new PatternDefinitionAnnotator() {} val mock = new AnnotatorHolderMock - annotator - .annotatePatternDefinition(definition, mock, highlightErrors = true) + annotator.annotatePatternDefinition(definition, + mock, + highlightErrors = true) mock.annotations } diff --git a/repos/intellij-scala/test/org/jetbrains/plugins/scala/annotator/SingleAbstractMethodTest.scala b/repos/intellij-scala/test/org/jetbrains/plugins/scala/annotator/SingleAbstractMethodTest.scala index 3ad8859efb9..e2e29b5a763 100644 --- a/repos/intellij-scala/test/org/jetbrains/plugins/scala/annotator/SingleAbstractMethodTest.scala +++ b/repos/intellij-scala/test/org/jetbrains/plugins/scala/annotator/SingleAbstractMethodTest.scala @@ -613,8 +613,8 @@ class SingleAbstractMethodTest } def assertMatches[T](actual: T)(pattern: PartialFunction[T, Unit]) { - Assert - .assertTrue("actual: " + actual.toString, pattern.isDefinedAt(actual)) + Assert.assertTrue("actual: " + actual.toString, + pattern.isDefinedAt(actual)) } def parseText(@Language("Scala") s: String): ScalaFile = { diff --git a/repos/intellij-scala/test/org/jetbrains/plugins/scala/annotator/VariableDefinitionAnnotatorTest.scala b/repos/intellij-scala/test/org/jetbrains/plugins/scala/annotator/VariableDefinitionAnnotatorTest.scala index 9d266e67442..6c664af096d 100644 --- a/repos/intellij-scala/test/org/jetbrains/plugins/scala/annotator/VariableDefinitionAnnotatorTest.scala +++ b/repos/intellij-scala/test/org/jetbrains/plugins/scala/annotator/VariableDefinitionAnnotatorTest.scala @@ -69,8 +69,9 @@ class VariableDefinitionAnnotatorTest extends SimpleTestCase { val annotator = new VariableDefinitionAnnotator() {} val mock = new AnnotatorHolderMock - annotator - .annotateVariableDefinition(definition, mock, highlightErrors = true) + annotator.annotateVariableDefinition(definition, + mock, + highlightErrors = true) mock.annotations } diff --git a/repos/intellij-scala/test/org/jetbrains/plugins/scala/annotator/gutter/LineMarkerTestBase.scala b/repos/intellij-scala/test/org/jetbrains/plugins/scala/annotator/gutter/LineMarkerTestBase.scala index 164092e0363..4e7b7d432fc 100644 --- a/repos/intellij-scala/test/org/jetbrains/plugins/scala/annotator/gutter/LineMarkerTestBase.scala +++ b/repos/intellij-scala/test/org/jetbrains/plugins/scala/annotator/gutter/LineMarkerTestBase.scala @@ -51,8 +51,8 @@ abstract class LineMarkerTestBase extends LightCodeInsightFixtureTestCase { def getSeparatorsFrom(editor: Editor, project: Project) = { val separators = for { - each <- DaemonCodeAnalyzerImpl - .getLineMarkers(editor.getDocument, project) + each <- DaemonCodeAnalyzerImpl.getLineMarkers(editor.getDocument, + project) if each.separatorPlacement == SeparatorPlacement.TOP index = editor.getDocument.getLineNumber( each.getElement.getTextRange.getStartOffset) diff --git a/repos/intellij-scala/test/org/jetbrains/plugins/scala/annotator/template/ObjectCreationImpossibleTest.scala b/repos/intellij-scala/test/org/jetbrains/plugins/scala/annotator/template/ObjectCreationImpossibleTest.scala index 5bbb6df8d07..4573f785b2f 100644 --- a/repos/intellij-scala/test/org/jetbrains/plugins/scala/annotator/template/ObjectCreationImpossibleTest.scala +++ b/repos/intellij-scala/test/org/jetbrains/plugins/scala/annotator/template/ObjectCreationImpossibleTest.scala @@ -67,10 +67,11 @@ class ObjectCreationImpossibleTest } def testNeedsToBeAbstractPlaceDiffer() { - val Message = ObjectCreationImpossible - .message(("b: Unit", "Holder.B"), ("a: Unit", "Holder.A")) - val ReversedMessage = ObjectCreationImpossible - .message(("a: Unit", "Holder.A"), ("b: Unit", "Holder.B")) + val Message = ObjectCreationImpossible.message(("b: Unit", "Holder.B"), + ("a: Unit", "Holder.A")) + val ReversedMessage = ObjectCreationImpossible.message( + ("a: Unit", "Holder.A"), + ("b: Unit", "Holder.B")) assertMatches( messages("trait A { def a }; trait B { def b }; new A with B {}")) { diff --git a/repos/intellij-scala/test/org/jetbrains/plugins/scala/base/LibraryTestCase.scala b/repos/intellij-scala/test/org/jetbrains/plugins/scala/base/LibraryTestCase.scala index 9e45050a9e9..f772f23561e 100644 --- a/repos/intellij-scala/test/org/jetbrains/plugins/scala/base/LibraryTestCase.scala +++ b/repos/intellij-scala/test/org/jetbrains/plugins/scala/base/LibraryTestCase.scala @@ -12,8 +12,9 @@ abstract class LibraryTestCase extends LightCodeInsightFixtureTestCase { override def setUp() { super.setUp() - val loader = ScalaLibraryLoader - .withMockJdk(myFixture.getProject, myFixture.getModule, rootPath = null) + val loader = ScalaLibraryLoader.withMockJdk(myFixture.getProject, + myFixture.getModule, + rootPath = null) libraryLoader = Some(loader) loader.loadScala(TestUtils.DEFAULT_SCALA_SDK_VERSION) diff --git a/repos/intellij-scala/test/org/jetbrains/plugins/scala/base/ScalaFixtureTestCase.scala b/repos/intellij-scala/test/org/jetbrains/plugins/scala/base/ScalaFixtureTestCase.scala index 797c0b03d7e..e8632045d1b 100644 --- a/repos/intellij-scala/test/org/jetbrains/plugins/scala/base/ScalaFixtureTestCase.scala +++ b/repos/intellij-scala/test/org/jetbrains/plugins/scala/base/ScalaFixtureTestCase.scala @@ -17,8 +17,9 @@ abstract class ScalaFixtureTestCase extends CodeInsightFixtureTestCase { override protected def setUp(): Unit = { super.setUp() - libLoader = ScalaLibraryLoader - .withMockJdk(myFixture.getProject, myFixture.getModule, rootPath) + libLoader = ScalaLibraryLoader.withMockJdk(myFixture.getProject, + myFixture.getModule, + rootPath) libLoader.loadScala(TestUtils.DEFAULT_SCALA_SDK_VERSION) } diff --git a/repos/intellij-scala/test/org/jetbrains/plugins/scala/base/ScalaLibraryLoader.scala b/repos/intellij-scala/test/org/jetbrains/plugins/scala/base/ScalaLibraryLoader.scala index eae4e797ce0..12939850a49 100644 --- a/repos/intellij-scala/test/org/jetbrains/plugins/scala/base/ScalaLibraryLoader.scala +++ b/repos/intellij-scala/test/org/jetbrains/plugins/scala/base/ScalaLibraryLoader.scala @@ -147,8 +147,8 @@ class ScalaLibraryLoader(project: Project, val libRoot: File = new File(mockLib) assert(libRoot.exists) - libModel - .addRoot(VfsUtil.getUrlForLibraryRoot(libRoot), OrderRootType.CLASSES) + libModel.addRoot(VfsUtil.getUrlForLibraryRoot(libRoot), + OrderRootType.CLASSES) inWriteAction { libModel.commit() diff --git a/repos/intellij-scala/test/org/jetbrains/plugins/scala/base/ScalaLightCodeInsightFixtureTestAdapter.scala b/repos/intellij-scala/test/org/jetbrains/plugins/scala/base/ScalaLightCodeInsightFixtureTestAdapter.scala index 77c0e821542..e8efc4bc4e1 100644 --- a/repos/intellij-scala/test/org/jetbrains/plugins/scala/base/ScalaLightCodeInsightFixtureTestAdapter.scala +++ b/repos/intellij-scala/test/org/jetbrains/plugins/scala/base/ScalaLightCodeInsightFixtureTestAdapter.scala @@ -35,8 +35,9 @@ abstract class ScalaLightCodeInsightFixtureTestAdapter super.setUp() myFixture.allowTreeAccessForAllFiles() - libLoader = ScalaLibraryLoader - .withMockJdk(myFixture.getProject, myFixture.getModule, rootPath = null) + libLoader = ScalaLibraryLoader.withMockJdk(myFixture.getProject, + myFixture.getModule, + rootPath = null) libLoader.loadScala(libVersion) } @@ -80,8 +81,10 @@ abstract class ScalaLightCodeInsightFixtureTestAdapter .getInstance(getProject) .buildInitialFoldings(myFixture.getEditor) - myFixture - .testHighlighting(false, false, false, myFixture.getFile.getVirtualFile) + myFixture.testHighlighting(false, + false, + false, + myFixture.getFile.getVirtualFile) } protected def checkTextHasNoErrors( @@ -110,8 +113,8 @@ abstract class ScalaLightCodeInsightFixtureTestAdapter val cleanedText = text.replace("\r", "") val cleanedAssumed = assumedText.replace("\r", "") val caretIndex = cleanedText.indexOf(CARET_MARKER) - myFixture - .configureByText("dummy.scala", cleanedText.replace(CARET_MARKER, "")) + myFixture.configureByText("dummy.scala", + cleanedText.replace(CARET_MARKER, "")) myFixture.getEditor.getCaretModel.moveToOffset(caretIndex) testBody() @@ -137,12 +140,15 @@ abstract class ScalaLightCodeInsightFixtureTestAdapter protected def checkGeneratedTextAfterBackspace(text: String, assumedText: String) { performTest(text, assumedText) { () => - CommandProcessor.getInstance - .executeCommand(myFixture.getProject, new Runnable { + CommandProcessor.getInstance.executeCommand( + myFixture.getProject, + new Runnable { def run() { myFixture.performEditorAction(IdeActions.ACTION_EDITOR_BACKSPACE) } - }, "", null) + }, + "", + null) } } diff --git a/repos/intellij-scala/test/org/jetbrains/plugins/scala/base/SimpleTestCase.scala b/repos/intellij-scala/test/org/jetbrains/plugins/scala/base/SimpleTestCase.scala index 615fdb66c21..7ba9378d088 100644 --- a/repos/intellij-scala/test/org/jetbrains/plugins/scala/base/SimpleTestCase.scala +++ b/repos/intellij-scala/test/org/jetbrains/plugins/scala/base/SimpleTestCase.scala @@ -60,8 +60,8 @@ abstract class SimpleTestCase extends UsefulTestCase { } def assertMatches[T](actual: T)(pattern: PartialFunction[T, Unit]) { - Assert - .assertTrue("actual: " + actual.toString, pattern.isDefinedAt(actual)) + Assert.assertTrue("actual: " + actual.toString, + pattern.isDefinedAt(actual)) } def describe(tree: PsiElement): String = toString(tree, 0) diff --git a/repos/intellij-scala/test/org/jetbrains/plugins/scala/debugger/ScalaDebuggerTestCase.scala b/repos/intellij-scala/test/org/jetbrains/plugins/scala/debugger/ScalaDebuggerTestCase.scala index 5b42dfa281c..f1b67bd2f0f 100644 --- a/repos/intellij-scala/test/org/jetbrains/plugins/scala/debugger/ScalaDebuggerTestCase.scala +++ b/repos/intellij-scala/test/org/jetbrains/plugins/scala/debugger/ScalaDebuggerTestCase.scala @@ -111,16 +111,17 @@ abstract class ScalaDebuggerTestCase extends ScalaDebuggerTestBase { semaphore.down() val processHandler: AtomicReference[ProcessHandler] = new AtomicReference[ProcessHandler] - runner - .execute(executionEnvironmentBuilder.build, new ProgramRunner.Callback { - def processStarted(descriptor: RunContentDescriptor) { - val handler: ProcessHandler = descriptor.getProcessHandler - assert(handler != null) - handler.addProcessListener(listener) - processHandler.set(handler) - semaphore.up() - } - }) + runner.execute(executionEnvironmentBuilder.build, + new ProgramRunner.Callback { + def processStarted(descriptor: RunContentDescriptor) { + val handler: ProcessHandler = + descriptor.getProcessHandler + assert(handler != null) + handler.addProcessListener(listener) + processHandler.set(handler) + semaphore.up() + } + }) semaphore.waitFor() processHandler.get } @@ -208,8 +209,8 @@ abstract class ScalaDebuggerTestCase extends ScalaDebuggerTestBase { protected def managed[T >: Null](callback: => T): T = { var result: T = null def ctx = - DebuggerContextUtil - .createDebuggerContext(getDebugSession, suspendContext) + DebuggerContextUtil.createDebuggerContext(getDebugSession, + suspendContext) val semaphore = new Semaphore() semaphore.down() getDebugProcess.getManagerThread.invokeAndWait( @@ -277,8 +278,9 @@ abstract class ScalaDebuggerTestCase extends ScalaDebuggerTestBase { } protected def evalEquals(codeText: String, expected: String) { - Assert - .assertEquals(s"Evaluating:\n $codeText", expected, evalResult(codeText)) + Assert.assertEquals(s"Evaluating:\n $codeText", + expected, + evalResult(codeText)) } protected def evalStartsWith(codeText: String, startsWith: String) { diff --git a/repos/intellij-scala/test/org/jetbrains/plugins/scala/debugger/exactBreakpoints/ExactBreakpointTest.scala b/repos/intellij-scala/test/org/jetbrains/plugins/scala/debugger/exactBreakpoints/ExactBreakpointTest.scala index 09f2bbcfc30..e739b01b560 100644 --- a/repos/intellij-scala/test/org/jetbrains/plugins/scala/debugger/exactBreakpoints/ExactBreakpointTest.scala +++ b/repos/intellij-scala/test/org/jetbrains/plugins/scala/debugger/exactBreakpoints/ExactBreakpointTest.scala @@ -43,8 +43,9 @@ abstract class ExactBreakpointTestBase extends ScalaDebuggerTestCase { .computeVariants(getProject, xSourcePosition) .asScala .map(_.getText) - Assert - .assertEquals("Wrong set of variants found: ", variants, foundVariants) + Assert.assertEquals("Wrong set of variants found: ", + variants, + foundVariants) } protected def checkStoppedAtBreakpointAt(breakpoints: Breakpoint*)( diff --git a/repos/intellij-scala/test/org/jetbrains/plugins/scala/javaHighlighting/JavaHighlightingTest.scala b/repos/intellij-scala/test/org/jetbrains/plugins/scala/javaHighlighting/JavaHighlightingTest.scala index 48dcc2dfd28..a75975edfff 100644 --- a/repos/intellij-scala/test/org/jetbrains/plugins/scala/javaHighlighting/JavaHighlightingTest.scala +++ b/repos/intellij-scala/test/org/jetbrains/plugins/scala/javaHighlighting/JavaHighlightingTest.scala @@ -578,8 +578,8 @@ class JavaHighlightingTest extends ScalaFixtureTestCase { } def assertMatches[T](actual: T)(pattern: PartialFunction[T, Unit]) { - Assert - .assertTrue("actual: " + actual.toString, pattern.isDefinedAt(actual)) + Assert.assertTrue("actual: " + actual.toString, + pattern.isDefinedAt(actual)) } def assertNoErrors(messages: List[Message]): Unit = { diff --git a/repos/intellij-scala/test/org/jetbrains/plugins/scala/lang/lexer/IncrementalLexerHighlightingTest.scala b/repos/intellij-scala/test/org/jetbrains/plugins/scala/lang/lexer/IncrementalLexerHighlightingTest.scala index 2abb42b7126..5662efdbd65 100644 --- a/repos/intellij-scala/test/org/jetbrains/plugins/scala/lang/lexer/IncrementalLexerHighlightingTest.scala +++ b/repos/intellij-scala/test/org/jetbrains/plugins/scala/lang/lexer/IncrementalLexerHighlightingTest.scala @@ -23,12 +23,15 @@ class IncrementalLexerHighlightingTest typed foreach { case '\r' => - CommandProcessor.getInstance - .executeCommand(myFixture.getProject, new Runnable { + CommandProcessor.getInstance.executeCommand( + myFixture.getProject, + new Runnable { def run() { myFixture.performEditorAction(IdeActions.ACTION_EDITOR_BACKSPACE) } - }, "", null) + }, + "", + null) case '\n' => CommandProcessor .getInstance() diff --git a/repos/intellij-scala/test/org/jetbrains/plugins/scala/lang/lexer/LexerPerformanceTest.scala b/repos/intellij-scala/test/org/jetbrains/plugins/scala/lang/lexer/LexerPerformanceTest.scala index 51977f09dcd..1634fbb77c2 100644 --- a/repos/intellij-scala/test/org/jetbrains/plugins/scala/lang/lexer/LexerPerformanceTest.scala +++ b/repos/intellij-scala/test/org/jetbrains/plugins/scala/lang/lexer/LexerPerformanceTest.scala @@ -2169,19 +2169,20 @@ object Expressions { } """ - PlatformTestUtil - .assertTiming("Lexer performance test", 1000, new Runnable { - def run() { - try { - val lexer = new ScalaLexer() - lexer.start(text, 0, text.length) - while (lexer.getTokenType != null) { - lexer.advance() - } - } catch { - case e: RuntimeException => - } - } - }) + PlatformTestUtil.assertTiming("Lexer performance test", + 1000, + new Runnable { + def run() { + try { + val lexer = new ScalaLexer() + lexer.start(text, 0, text.length) + while (lexer.getTokenType != null) { + lexer.advance() + } + } catch { + case e: RuntimeException => + } + } + }) } } diff --git a/repos/intellij-scala/test/org/jetbrains/plugins/scala/lang/resolve2/ResolveTestBase.scala b/repos/intellij-scala/test/org/jetbrains/plugins/scala/lang/resolve2/ResolveTestBase.scala index d94d5e06c28..fe8a0ea7776 100644 --- a/repos/intellij-scala/test/org/jetbrains/plugins/scala/lang/resolve2/ResolveTestBase.scala +++ b/repos/intellij-scala/test/org/jetbrains/plugins/scala/lang/resolve2/ResolveTestBase.scala @@ -146,11 +146,11 @@ abstract class ResolveTestBase extends ScalaResolveTestCase { } if (options.contains(Resolved) && options(Resolved) == "false") { - Assert - .assertNull(message(referenceName + " must NOT be resolved!"), target) + Assert.assertNull(message(referenceName + " must NOT be resolved!"), + target) } else { - Assert - .assertNotNull(message(referenceName + " must BE resolved!"), target) + Assert.assertNotNull(message(referenceName + " must BE resolved!"), + target) if (options.contains(Accessible) && options(Accessible) == "false") { Assert.assertFalse(message(referenceName + " must NOT be accessible!"), diff --git a/repos/intellij-scala/test/org/jetbrains/plugins/scala/performance/highlighting/HighlightingPerformanceTest.scala b/repos/intellij-scala/test/org/jetbrains/plugins/scala/performance/highlighting/HighlightingPerformanceTest.scala index d54c96ef2d4..ff80d5b9332 100644 --- a/repos/intellij-scala/test/org/jetbrains/plugins/scala/performance/highlighting/HighlightingPerformanceTest.scala +++ b/repos/intellij-scala/test/org/jetbrains/plugins/scala/performance/highlighting/HighlightingPerformanceTest.scala @@ -25,8 +25,10 @@ class HighlightingPerformanceTest extends ScalaFixtureTestCase { new Runnable { def run() { try { - myFixture - .testHighlighting(false, false, false, file.getVirtualFile) + myFixture.testHighlighting(false, + false, + false, + file.getVirtualFile) } catch { case e: RuntimeException => } diff --git a/repos/intellij-scala/test/org/jetbrains/plugins/scala/refactoring/changeSignature/ChangeSignatureFromScalaTest.scala b/repos/intellij-scala/test/org/jetbrains/plugins/scala/refactoring/changeSignature/ChangeSignatureFromScalaTest.scala index 82baa129f57..156304e4b3b 100644 --- a/repos/intellij-scala/test/org/jetbrains/plugins/scala/refactoring/changeSignature/ChangeSignatureFromScalaTest.scala +++ b/repos/intellij-scala/test/org/jetbrains/plugins/scala/refactoring/changeSignature/ChangeSignatureFromScalaTest.scala @@ -106,8 +106,9 @@ class ChangeSignatureFromScalaTest extends ChangeSignatureTestBase { def testGenerics() = { def tpe = - ScalaPsiElementFactory - .createTypeFromText("T", targetMethod, targetMethod) + ScalaPsiElementFactory.createTypeFromText("T", + targetMethod, + targetMethod) doTest(null, "foo", "T", Seq(Seq(parameterInfo("t", 0, tpe)))) } diff --git a/repos/intellij-scala/test/org/jetbrains/plugins/scala/refactoring/extractTrait/ExtractTraitTestConflicts.scala b/repos/intellij-scala/test/org/jetbrains/plugins/scala/refactoring/extractTrait/ExtractTraitTestConflicts.scala index ada4af9e863..0b0e1b7d9d4 100644 --- a/repos/intellij-scala/test/org/jetbrains/plugins/scala/refactoring/extractTrait/ExtractTraitTestConflicts.scala +++ b/repos/intellij-scala/test/org/jetbrains/plugins/scala/refactoring/extractTrait/ExtractTraitTestConflicts.scala @@ -90,8 +90,9 @@ class ExtractTraitTestConflicts extends ExtractTraitTestBase { | def bar() {} |} """.stripMargin - val message = ScalaBundle - .message("super.reference.used.in.extracted.member", "foo(): Unit") + val message = ScalaBundle.message( + "super.reference.used.in.extracted.member", + "foo(): Unit") checkException(text, message, onlyDeclarations = false, diff --git a/repos/intellij-scala/test/org/jetbrains/plugins/scala/refactoring/introduceParameter/IntroduceParameterTestBase.scala b/repos/intellij-scala/test/org/jetbrains/plugins/scala/refactoring/introduceParameter/IntroduceParameterTestBase.scala index fcc41bef2b2..45886b38b9a 100644 --- a/repos/intellij-scala/test/org/jetbrains/plugins/scala/refactoring/introduceParameter/IntroduceParameterTestBase.scala +++ b/repos/intellij-scala/test/org/jetbrains/plugins/scala/refactoring/introduceParameter/IntroduceParameterTestBase.scala @@ -118,8 +118,9 @@ abstract class IntroduceParameterTestBase handler.collectData(exprWithTypes, elems, methodLike, editor) assert(collectedData.isDefined, "Could not collect data for introduce parameter") - val data = collectedData.get - .copy(paramName = paramName, replaceAll = replaceAllOccurrences) + val data = collectedData.get.copy(paramName = paramName, + replaceAll = + replaceAllOccurrences) val paramInfo = new ScalaParameterInfo(data.paramName, diff --git a/repos/intellij-scala/test/org/jetbrains/plugins/scala/refactoring/nameSuggester/NameSuggesterTest.scala b/repos/intellij-scala/test/org/jetbrains/plugins/scala/refactoring/nameSuggester/NameSuggesterTest.scala index 8be87f068ab..c8978c58d9a 100644 --- a/repos/intellij-scala/test/org/jetbrains/plugins/scala/refactoring/nameSuggester/NameSuggesterTest.scala +++ b/repos/intellij-scala/test/org/jetbrains/plugins/scala/refactoring/nameSuggester/NameSuggesterTest.scala @@ -12,8 +12,9 @@ import org.junit.Assert */ class NameSuggesterTest extends ScalaLightCodeInsightFixtureTestAdapter { def testNamesByType(typeElementText: String, names: Seq[String]) { - val typeElement = ScalaPsiElementFactory - .createTypeElementFromText(typeElementText, myFixture.getPsiManager) + val typeElement = ScalaPsiElementFactory.createTypeElementFromText( + typeElementText, + myFixture.getPsiManager) val scType = typeElement.getType().getOrNothing Assert.assertEquals( names.mkString(", "), diff --git a/repos/intellij-scala/test/org/jetbrains/plugins/scala/testingSupport/ScalaTestingTestCase.scala b/repos/intellij-scala/test/org/jetbrains/plugins/scala/testingSupport/ScalaTestingTestCase.scala index f27b1305ed0..5e53ca1b873 100644 --- a/repos/intellij-scala/test/org/jetbrains/plugins/scala/testingSupport/ScalaTestingTestCase.scala +++ b/repos/intellij-scala/test/org/jetbrains/plugins/scala/testingSupport/ScalaTestingTestCase.scala @@ -300,24 +300,25 @@ abstract class ScalaTestingTestCase( new AtomicReference[ProcessHandler] val contentDescriptor: AtomicReference[RunContentDescriptor] = new AtomicReference[RunContentDescriptor] - runner - .execute(executionEnvironmentBuilder.build, new ProgramRunner.Callback { - def processStarted(descriptor: RunContentDescriptor) { - System.setProperty("idea.dynamic.classpath", - useDynamicClassPath.toString) - disposeOnTearDown(new Disposable { - def dispose() { - descriptor.dispose() - } - }) - val handler: ProcessHandler = descriptor.getProcessHandler - assert(handler != null) - handler.addProcessListener(listener) - processHandler.set(handler) - contentDescriptor.set(descriptor) - semaphore.up() - } - }) + runner.execute(executionEnvironmentBuilder.build, + new ProgramRunner.Callback { + def processStarted(descriptor: RunContentDescriptor) { + System.setProperty("idea.dynamic.classpath", + useDynamicClassPath.toString) + disposeOnTearDown(new Disposable { + def dispose() { + descriptor.dispose() + } + }) + val handler: ProcessHandler = + descriptor.getProcessHandler + assert(handler != null) + handler.addProcessListener(listener) + processHandler.set(handler) + contentDescriptor.set(descriptor) + semaphore.up() + } + }) semaphore.waitFor() (processHandler.get, contentDescriptor.get) } diff --git a/repos/intellij-scala/test/org/jetbrains/sbt/annotator/SbtAnnotatorTest.scala b/repos/intellij-scala/test/org/jetbrains/sbt/annotator/SbtAnnotatorTest.scala index 1572ddd7aae..5fdac5c8c3a 100644 --- a/repos/intellij-scala/test/org/jetbrains/sbt/annotator/SbtAnnotatorTest.scala +++ b/repos/intellij-scala/test/org/jetbrains/sbt/annotator/SbtAnnotatorTest.scala @@ -106,8 +106,9 @@ class SbtAnnotatorTest extends AnnotatorTestBase with MockSbt { } private def addTestFileToModuleSources(): Unit = { - ModuleRootModificationUtil - .updateModel(getModule, new Consumer[ModifiableRootModel] { + ModuleRootModificationUtil.updateModel( + getModule, + new Consumer[ModifiableRootModel] { override def consume(model: ModifiableRootModel): Unit = { val testdataUrl = VfsUtilCore.pathToUrl(testdataPath) model diff --git a/repos/kafka/core/src/main/scala/kafka/admin/AclCommand.scala b/repos/kafka/core/src/main/scala/kafka/admin/AclCommand.scala index f1cbd672d67..79cde27b495 100644 --- a/repos/kafka/core/src/main/scala/kafka/admin/AclCommand.scala +++ b/repos/kafka/core/src/main/scala/kafka/admin/AclCommand.scala @@ -412,8 +412,9 @@ object AclCommand { val options = parser.parse(args: _*) def checkArgs() { - CommandLineUtils - .checkRequiredArgs(parser, options, authorizerPropertiesOpt) + CommandLineUtils.checkRequiredArgs(parser, + options, + authorizerPropertiesOpt) val actions = Seq(addOpt, removeOpt, listOpt).count(options.has) if (actions != 1) diff --git a/repos/kafka/core/src/main/scala/kafka/admin/AdminUtils.scala b/repos/kafka/core/src/main/scala/kafka/admin/AdminUtils.scala index 4ca6f20bd50..005d94834f1 100644 --- a/repos/kafka/core/src/main/scala/kafka/admin/AdminUtils.scala +++ b/repos/kafka/core/src/main/scala/kafka/admin/AdminUtils.scala @@ -486,8 +486,10 @@ object AdminUtils extends Logging { topicConfig: Properties = new Properties, rackAwareMode: RackAwareMode = RackAwareMode.Enforced) { val brokerMetadatas = getBrokerMetadatas(zkUtils, rackAwareMode) - val replicaAssignment = AdminUtils - .assignReplicasToBrokers(brokerMetadatas, partitions, replicationFactor) + val replicaAssignment = AdminUtils.assignReplicasToBrokers( + brokerMetadatas, + partitions, + replicationFactor) AdminUtils.createOrUpdateTopicPartitionAssignmentPathInZK( zkUtils, topic, diff --git a/repos/kafka/core/src/main/scala/kafka/admin/ConfigCommand.scala b/repos/kafka/core/src/main/scala/kafka/admin/ConfigCommand.scala index 31ced5ef77f..a690c288cbd 100644 --- a/repos/kafka/core/src/main/scala/kafka/admin/ConfigCommand.scala +++ b/repos/kafka/core/src/main/scala/kafka/admin/ConfigCommand.scala @@ -204,10 +204,14 @@ object ConfigCommand { "Command must include exactly one action: --describe, --alter") // check required args - CommandLineUtils - .checkRequiredArgs(parser, options, zkConnectOpt, entityType) - CommandLineUtils - .checkInvalidArgs(parser, options, alterOpt, Set(describeOpt)) + CommandLineUtils.checkRequiredArgs(parser, + options, + zkConnectOpt, + entityType) + CommandLineUtils.checkInvalidArgs(parser, + options, + alterOpt, + Set(describeOpt)) CommandLineUtils.checkInvalidArgs(parser, options, describeOpt, diff --git a/repos/kafka/core/src/main/scala/kafka/admin/ConsumerGroupCommand.scala b/repos/kafka/core/src/main/scala/kafka/admin/ConsumerGroupCommand.scala index af01d11a7d4..22ee28123d4 100755 --- a/repos/kafka/core/src/main/scala/kafka/admin/ConsumerGroupCommand.scala +++ b/repos/kafka/core/src/main/scala/kafka/admin/ConsumerGroupCommand.scala @@ -482,10 +482,10 @@ object ConsumerGroupCommand { opts.options.valueOf(opts.groupOpt)) properties.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, "false") properties.put(ConsumerConfig.SESSION_TIMEOUT_MS_CONFIG, "30000") - properties - .put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, deserializer) - properties - .put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, deserializer) + properties.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, + deserializer) + properties.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, + deserializer) if (opts.options.has(opts.commandConfigOpt)) properties.putAll( Utils.loadProps(opts.options.valueOf(opts.commandConfigOpt))) @@ -594,8 +594,9 @@ object ConsumerGroupCommand { !options.has(topicOpt)) CommandLineUtils.printUsageAndDie( parser, - "Option %s either takes %s, %s, or both" - .format(deleteOpt, groupOpt, topicOpt)) + "Option %s either takes %s, %s, or both".format(deleteOpt, + groupOpt, + topicOpt)) // check invalid args CommandLineUtils.checkInvalidArgs( diff --git a/repos/kafka/core/src/main/scala/kafka/admin/PreferredReplicaLeaderElectionCommand.scala b/repos/kafka/core/src/main/scala/kafka/admin/PreferredReplicaLeaderElectionCommand.scala index eb08e923332..799c72dfc3c 100755 --- a/repos/kafka/core/src/main/scala/kafka/admin/PreferredReplicaLeaderElectionCommand.scala +++ b/repos/kafka/core/src/main/scala/kafka/admin/PreferredReplicaLeaderElectionCommand.scala @@ -153,8 +153,9 @@ class PreferredReplicaLeaderElectionCommand( try { val validPartitions = partitions.filter(p => validatePartition(zkUtils, p.topic, p.partition)) - PreferredReplicaLeaderElectionCommand - .writePreferredReplicaElectionData(zkUtils, validPartitions) + PreferredReplicaLeaderElectionCommand.writePreferredReplicaElectionData( + zkUtils, + validPartitions) } catch { case e: Throwable => throw new AdminCommandFailedException("Admin command failed", e) diff --git a/repos/kafka/core/src/main/scala/kafka/admin/ReassignPartitionsCommand.scala b/repos/kafka/core/src/main/scala/kafka/admin/ReassignPartitionsCommand.scala index 69cb03de10d..c80bec1bec8 100755 --- a/repos/kafka/core/src/main/scala/kafka/admin/ReassignPartitionsCommand.scala +++ b/repos/kafka/core/src/main/scala/kafka/admin/ReassignPartitionsCommand.scala @@ -39,8 +39,9 @@ object ReassignPartitionsCommand extends Logging { opts.parser, "Command must include exactly one action: --generate, --execute or --verify") - CommandLineUtils - .checkRequiredArgs(opts.parser, opts.options, opts.zkConnectOpt) + CommandLineUtils.checkRequiredArgs(opts.parser, + opts.options, + opts.zkConnectOpt) val zkConnect = opts.options.valueOf(opts.zkConnectOpt) val zkUtils = @@ -139,8 +140,10 @@ object ReassignPartitionsCommand extends Logging { val groupedByTopic = currentAssignment.groupBy { case (tp, _) => tp.topic } val rackAwareMode = if (disableRackAware) RackAwareMode.Disabled else RackAwareMode.Enforced - val brokerMetadatas = AdminUtils - .getBrokerMetadatas(zkUtils, rackAwareMode, Some(brokerListToReassign)) + val brokerMetadatas = AdminUtils.getBrokerMetadatas( + zkUtils, + rackAwareMode, + Some(brokerListToReassign)) val partitionsToBeReassigned = mutable.Map[TopicAndPartition, Seq[Int]]() groupedByTopic.foreach { @@ -309,8 +312,9 @@ object ReassignPartitionsCommand extends Logging { .withRequiredArg .describedAs("brokerlist") .ofType(classOf[String]) - val disableRackAware = parser - .accepts("disable-rack-aware", "Disable rack aware replica assignment") + val disableRackAware = parser.accepts( + "disable-rack-aware", + "Disable rack aware replica assignment") if (args.length == 0) CommandLineUtils.printUsageAndDie( diff --git a/repos/kafka/core/src/main/scala/kafka/admin/TopicCommand.scala b/repos/kafka/core/src/main/scala/kafka/admin/TopicCommand.scala index 775a15d5101..7f239daa5ee 100755 --- a/repos/kafka/core/src/main/scala/kafka/admin/TopicCommand.scala +++ b/repos/kafka/core/src/main/scala/kafka/admin/TopicCommand.scala @@ -172,8 +172,10 @@ object TopicCommand extends Logging { val nPartitions = opts.options.valueOf(opts.partitionsOpt).intValue val replicaAssignmentStr = opts.options.valueOf(opts.replicaAssignmentOpt) - AdminUtils - .addPartitions(zkUtils, topic, nPartitions, replicaAssignmentStr) + AdminUtils.addPartitions(zkUtils, + topic, + nPartitions, + replicaAssignmentStr) println("Adding partitions succeeded!") } } @@ -420,8 +422,9 @@ object TopicCommand extends Logging { "if-not-exists", "if set when creating topics, the action will only execute if the topic does not already exist") - val disableRackAware = parser - .accepts("disable-rack-aware", "Disable rack aware replica assignment") + val disableRackAware = parser.accepts( + "disable-rack-aware", + "Disable rack aware replica assignment") val options = parser.parse(args: _*) val allTopicLevelOpts: Set[OptionSpec[_]] = diff --git a/repos/kafka/core/src/main/scala/kafka/admin/ZkSecurityMigrator.scala b/repos/kafka/core/src/main/scala/kafka/admin/ZkSecurityMigrator.scala index 33f78592439..4c2c35c8879 100644 --- a/repos/kafka/core/src/main/scala/kafka/admin/ZkSecurityMigrator.scala +++ b/repos/kafka/core/src/main/scala/kafka/admin/ZkSecurityMigrator.scala @@ -160,8 +160,10 @@ class ZkSecurityMigrator(zkUtils: ZkUtils) extends Logging { private def getChildren(path: String, childrenPromise: Promise[String]) = { info("Getting children to set ACLs for path %s".format(path)) - zkUtils.zkConnection.getZookeeper - .getChildren(path, false, GetChildrenCallback, childrenPromise) + zkUtils.zkConnection.getZookeeper.getChildren(path, + false, + GetChildrenCallback, + childrenPromise) } private def setAclIndividually(path: String) = { diff --git a/repos/kafka/core/src/main/scala/kafka/api/FetchResponse.scala b/repos/kafka/core/src/main/scala/kafka/api/FetchResponse.scala index 0dae256f9d3..477ca54d067 100644 --- a/repos/kafka/core/src/main/scala/kafka/api/FetchResponse.scala +++ b/repos/kafka/core/src/main/scala/kafka/api/FetchResponse.scala @@ -81,8 +81,10 @@ class PartitionDataSend(val partitionId: Int, if (buffer.hasRemaining) written += channel.write(buffer) if (!buffer.hasRemaining) { if (messagesSentSize < messageSize) { - val bytesSent = partitionData.messages - .writeTo(channel, messagesSentSize, messageSize - messagesSentSize) + val bytesSent = partitionData.messages.writeTo( + channel, + messagesSentSize, + messageSize - messagesSentSize) messagesSentSize += bytesSent written += bytesSent } diff --git a/repos/kafka/core/src/main/scala/kafka/cluster/Partition.scala b/repos/kafka/core/src/main/scala/kafka/cluster/Partition.scala index 47df8a0e0b5..65839a7ceac 100755 --- a/repos/kafka/core/src/main/scala/kafka/cluster/Partition.scala +++ b/repos/kafka/core/src/main/scala/kafka/cluster/Partition.scala @@ -66,8 +66,9 @@ class Partition(val topic: String, * In addition to the leader, the controller can also send the epoch of the controller that elected the leader for * each partition. */ private var controllerEpoch: Int = KafkaController.InitialControllerEpoch - 1 - this.logIdent = "Partition [%s,%d] on broker %d: " - .format(topic, partitionId, localBrokerId) + this.logIdent = "Partition [%s,%d] on broker %d: ".format(topic, + partitionId, + localBrokerId) private def isReplicaLocal(replicaId: Int): Boolean = (replicaId == localBrokerId) @@ -158,9 +159,10 @@ class Partition(val topic: String, removePartitionMetrics() } catch { case e: IOException => - fatal("Error deleting the log for partition [%s,%d]" - .format(topic, partitionId), - e) + fatal( + "Error deleting the log for partition [%s,%d]".format(topic, + partitionId), + e) Runtime.getRuntime().halt(1) } } diff --git a/repos/kafka/core/src/main/scala/kafka/common/OffsetMetadataAndError.scala b/repos/kafka/core/src/main/scala/kafka/common/OffsetMetadataAndError.scala index 81ed570a800..61c02b1d4fe 100644 --- a/repos/kafka/core/src/main/scala/kafka/common/OffsetMetadataAndError.scala +++ b/repos/kafka/core/src/main/scala/kafka/common/OffsetMetadataAndError.scala @@ -47,8 +47,9 @@ case class OffsetAndMetadata( def metadata = offsetMetadata.metadata override def toString = - "[%s,CommitTime %d,ExpirationTime %d]" - .format(offsetMetadata, commitTimestamp, expireTimestamp) + "[%s,CommitTime %d,ExpirationTime %d]".format(offsetMetadata, + commitTimestamp, + expireTimestamp) } object OffsetAndMetadata { diff --git a/repos/kafka/core/src/main/scala/kafka/consumer/ConsumerConfig.scala b/repos/kafka/core/src/main/scala/kafka/consumer/ConsumerConfig.scala index d628b4eeecc..566f4eb7c32 100644 --- a/repos/kafka/core/src/main/scala/kafka/consumer/ConsumerConfig.scala +++ b/repos/kafka/core/src/main/scala/kafka/consumer/ConsumerConfig.scala @@ -174,8 +174,9 @@ class ConsumerConfig private (val props: VerifiableProperties) /** socket timeout to use when reading responses for Offset Fetch/Commit requests. This timeout will also be used for * the ConsumerMetdata requests that are used to query for the offset coordinator. */ - val offsetsChannelSocketTimeoutMs = props - .getInt("offsets.channel.socket.timeout.ms", OffsetsChannelSocketTimeoutMs) + val offsetsChannelSocketTimeoutMs = props.getInt( + "offsets.channel.socket.timeout.ms", + OffsetsChannelSocketTimeoutMs) /** Retry the offset commit up to this many times on failure. This retry count only applies to offset commits during * shut-down. It does not apply to commits from the auto-commit thread. It also does not apply to attempts to query diff --git a/repos/kafka/core/src/main/scala/kafka/consumer/ConsumerFetcherManager.scala b/repos/kafka/core/src/main/scala/kafka/consumer/ConsumerFetcherManager.scala index e01550d9902..cb2265b5fec 100755 --- a/repos/kafka/core/src/main/scala/kafka/consumer/ConsumerFetcherManager.scala +++ b/repos/kafka/core/src/main/scala/kafka/consumer/ConsumerFetcherManager.scala @@ -137,8 +137,9 @@ class ConsumerFetcherManager(private val consumerIdString: String, fetcherId: Int, sourceBroker: BrokerEndPoint): AbstractFetcherThread = { new ConsumerFetcherThread( - "ConsumerFetcherThread-%s-%d-%d" - .format(consumerIdString, fetcherId, sourceBroker.id), + "ConsumerFetcherThread-%s-%d-%d".format(consumerIdString, + fetcherId, + sourceBroker.id), config, sourceBroker, partitionMap, diff --git a/repos/kafka/core/src/main/scala/kafka/consumer/PartitionAssignor.scala b/repos/kafka/core/src/main/scala/kafka/consumer/PartitionAssignor.scala index e1108093a94..1bfd2aea2f7 100755 --- a/repos/kafka/core/src/main/scala/kafka/consumer/PartitionAssignor.scala +++ b/repos/kafka/core/src/main/scala/kafka/consumer/PartitionAssignor.scala @@ -46,8 +46,10 @@ class AssignmentContext(group: String, zkUtils: ZkUtils) { val myTopicThreadIds: collection.Map[String, collection.Set[ConsumerThreadId]] = { - val myTopicCount = TopicCount - .constructTopicCount(group, consumerId, zkUtils, excludeInternalTopics) + val myTopicCount = TopicCount.constructTopicCount(group, + consumerId, + zkUtils, + excludeInternalTopics) myTopicCount.getConsumerThreadIdsPerTopic } diff --git a/repos/kafka/core/src/main/scala/kafka/consumer/ZookeeperConsumerConnector.scala b/repos/kafka/core/src/main/scala/kafka/consumer/ZookeeperConsumerConnector.scala index 3951047ea72..96b792bad20 100755 --- a/repos/kafka/core/src/main/scala/kafka/consumer/ZookeeperConsumerConnector.scala +++ b/repos/kafka/core/src/main/scala/kafka/consumer/ZookeeperConsumerConnector.scala @@ -432,28 +432,30 @@ private[kafka] class ZookeeperConsumerConnector( retryableIfFailed, shouldRefreshCoordinator, errorCount) = { - offsetCommitResponse.commitStatus - .foldLeft(false, false, false, 0) { - case (folded, (topicPartition, errorCode)) => - if (errorCode == Errors.NONE.code && - config.dualCommitEnabled) { - val offset = offsetsToCommit(topicPartition).offset - commitOffsetToZooKeeper(topicPartition, offset) - } - - (folded._1 || // update commitFailed - errorCode != Errors.NONE.code, - folded._2 || - // update retryableIfFailed - (only metadata too large is not retryable) - (errorCode != Errors.NONE.code && - errorCode != Errors.OFFSET_METADATA_TOO_LARGE.code), - folded._3 || // update shouldRefreshCoordinator - errorCode == Errors.NOT_COORDINATOR_FOR_GROUP.code || - errorCode == Errors.GROUP_COORDINATOR_NOT_AVAILABLE.code, - // update error count - folded._4 + - (if (errorCode != Errors.NONE.code) 1 else 0)) - } + offsetCommitResponse.commitStatus.foldLeft(false, + false, + false, + 0) { + case (folded, (topicPartition, errorCode)) => + if (errorCode == Errors.NONE.code && + config.dualCommitEnabled) { + val offset = offsetsToCommit(topicPartition).offset + commitOffsetToZooKeeper(topicPartition, offset) + } + + (folded._1 || // update commitFailed + errorCode != Errors.NONE.code, + folded._2 || + // update retryableIfFailed - (only metadata too large is not retryable) + (errorCode != Errors.NONE.code && + errorCode != Errors.OFFSET_METADATA_TOO_LARGE.code), + folded._3 || // update shouldRefreshCoordinator + errorCode == Errors.NOT_COORDINATOR_FOR_GROUP.code || + errorCode == Errors.GROUP_COORDINATOR_NOT_AVAILABLE.code, + // update error count + folded._4 + + (if (errorCode != Errors.NONE.code) 1 else 0)) + } } debug(errorCount + " errors in offset commit response.") @@ -1170,8 +1172,8 @@ private[kafka] class ZookeeperConsumerConnector( topicStreamsMap.foreach { topicAndStreams => // register on broker partition path changes val topicPath = BrokerTopicsPath + "/" + topicAndStreams._1 - zkUtils.zkClient - .subscribeDataChanges(topicPath, topicPartitionChangeListener) + zkUtils.zkClient.subscribeDataChanges(topicPath, + topicPartitionChangeListener) } // explicitly trigger load balancing for this consumer diff --git a/repos/kafka/core/src/main/scala/kafka/controller/ControllerChannelManager.scala b/repos/kafka/core/src/main/scala/kafka/controller/ControllerChannelManager.scala index a0ebe83629e..211f9a297d6 100755 --- a/repos/kafka/core/src/main/scala/kafka/controller/ControllerChannelManager.scala +++ b/repos/kafka/core/src/main/scala/kafka/controller/ControllerChannelManager.scala @@ -145,11 +145,12 @@ class ControllerChannelManager(controllerContext: ControllerContext, } val threadName = threadNamePrefix match { case None => - "Controller-%d-to-broker-%d-send-thread" - .format(config.brokerId, broker.id) + "Controller-%d-to-broker-%d-send-thread".format(config.brokerId, + broker.id) case Some(name) => - "%s:Controller-%d-to-broker-%d-send-thread" - .format(name, config.brokerId, broker.id) + "%s:Controller-%d-to-broker-%d-send-thread".format(name, + config.brokerId, + broker.id) } val requestThread = new RequestSendThread(config.brokerId, diff --git a/repos/kafka/core/src/main/scala/kafka/controller/KafkaController.scala b/repos/kafka/core/src/main/scala/kafka/controller/KafkaController.scala index d61b50e4cd7..fe89e06bca9 100755 --- a/repos/kafka/core/src/main/scala/kafka/controller/KafkaController.scala +++ b/repos/kafka/core/src/main/scala/kafka/controller/KafkaController.scala @@ -530,8 +530,8 @@ class KafkaController(val config: KafkaConfig, // supposed to host. Based on that the broker starts the high watermark threads for the input list of partitions val allReplicasOnNewBrokers = controllerContext.replicasOnBrokers(newBrokersSet) - replicaStateMachine - .handleStateChanges(allReplicasOnNewBrokers, OnlineReplica) + replicaStateMachine.handleStateChanges(allReplicasOnNewBrokers, + OnlineReplica) // when a new broker comes up, the controller needs to trigger leader election for all new and offline partitions // to see if these brokers can become leaders for some/all of those partitionStateMachine.triggerOnlinePartitionStateChange() @@ -587,8 +587,8 @@ class KafkaController(val config: KafkaConfig, !deleteTopicManager.isTopicQueuedUpForDeletion( partitionAndLeader._1.topic)) .keySet - partitionStateMachine - .handleStateChanges(partitionsWithoutLeader, OfflinePartition) + partitionStateMachine.handleStateChanges(partitionsWithoutLeader, + OfflinePartition) // trigger OnlinePartition state changes for offline or new partitions partitionStateMachine.triggerOnlinePartitionStateChange() // filter out the replicas that belong to topics that are being deleted @@ -597,8 +597,8 @@ class KafkaController(val config: KafkaConfig, val activeReplicasOnDeadBrokers = allReplicasOnDeadBrokers.filterNot(p => deleteTopicManager.isTopicQueuedUpForDeletion(p.topic)) // handle dead replicas - replicaStateMachine - .handleStateChanges(activeReplicasOnDeadBrokers, OfflineReplica) + replicaStateMachine.handleStateChanges(activeReplicasOnDeadBrokers, + OfflineReplica) // check if topic deletion state for the dead replicas needs to be updated val replicasForTopicsToBeDeleted = allReplicasOnDeadBrokers.filter(p => deleteTopicManager.isTopicQueuedUpForDeletion(p.topic)) @@ -910,8 +910,11 @@ class KafkaController(val config: KafkaConfig, apiVersion: Option[Short], request: AbstractRequest, callback: AbstractRequestResponse => Unit = null) = { - controllerContext.controllerChannelManager - .sendRequest(brokerId, apiKey, apiVersion, request, callback) + controllerContext.controllerChannelManager.sendRequest(brokerId, + apiKey, + apiVersion, + request, + callback) } def incrementControllerEpoch(zkClient: ZkClient) = { @@ -1106,8 +1109,9 @@ class KafkaController(val config: KafkaConfig, def updateLeaderAndIsrCache( topicAndPartitions: Set[TopicAndPartition] = controllerContext.partitionReplicaAssignment.keySet) { - val leaderAndIsrInfo = zkUtils - .getPartitionLeaderAndIsrForTopics(zkUtils.zkClient, topicAndPartitions) + val leaderAndIsrInfo = zkUtils.getPartitionLeaderAndIsrForTopics( + zkUtils.zkClient, + topicAndPartitions) for ((topicPartition, leaderIsrAndControllerEpoch) <- leaderAndIsrInfo) controllerContext.partitionLeadershipInfo .put(topicPartition, leaderIsrAndControllerEpoch) @@ -1188,13 +1192,13 @@ class KafkaController(val config: KafkaConfig, oldReplicas.map(r => PartitionAndReplica(topic, partition, r)) replicaStateMachine.handleStateChanges(replicasToBeDeleted, OfflineReplica) // send stop replica command to the old replicas - replicaStateMachine - .handleStateChanges(replicasToBeDeleted, ReplicaDeletionStarted) + replicaStateMachine.handleStateChanges(replicasToBeDeleted, + ReplicaDeletionStarted) // TODO: Eventually partition reassignment could use a callback that does retries if deletion failed - replicaStateMachine - .handleStateChanges(replicasToBeDeleted, ReplicaDeletionSuccessful) - replicaStateMachine - .handleStateChanges(replicasToBeDeleted, NonExistentReplica) + replicaStateMachine.handleStateChanges(replicasToBeDeleted, + ReplicaDeletionSuccessful) + replicaStateMachine.handleStateChanges(replicasToBeDeleted, + NonExistentReplica) } private def updateAssignedReplicasForPartition( @@ -1415,8 +1419,8 @@ class KafkaController(val config: KafkaConfig, Set.empty[TopicAndPartition]) { try { brokerRequestBatch.newBatch() - brokerRequestBatch - .addUpdateMetadataRequestForBrokers(brokers, partitions) + brokerRequestBatch.addUpdateMetadataRequestForBrokers(brokers, + partitions) brokerRequestBatch.sendRequestsToBrokers(epoch) } catch { case e: IllegalStateException => { @@ -1459,8 +1463,9 @@ class KafkaController(val config: KafkaConfig, while (!zkWriteCompleteOrUnnecessary) { // refresh leader and isr from zookeeper again val leaderIsrAndEpochOpt = - ReplicationUtils - .getLeaderIsrAndEpochForPartition(zkUtils, topic, partition) + ReplicationUtils.getLeaderIsrAndEpochForPartition(zkUtils, + topic, + partition) zkWriteCompleteOrUnnecessary = leaderIsrAndEpochOpt match { case Some(leaderIsrAndEpoch) => // increment the leader epoch even if the ISR changes @@ -1560,8 +1565,9 @@ class KafkaController(val config: KafkaConfig, while (!zkWriteCompleteOrUnnecessary) { // refresh leader and isr from zookeeper again val leaderIsrAndEpochOpt = - ReplicationUtils - .getLeaderIsrAndEpochForPartition(zkUtils, topic, partition) + ReplicationUtils.getLeaderIsrAndEpochForPartition(zkUtils, + topic, + partition) zkWriteCompleteOrUnnecessary = leaderIsrAndEpochOpt match { case Some(leaderIsrAndEpoch) => val leaderAndIsr = leaderIsrAndEpoch.leaderAndIsr diff --git a/repos/kafka/core/src/main/scala/kafka/controller/PartitionStateMachine.scala b/repos/kafka/core/src/main/scala/kafka/controller/PartitionStateMachine.scala index 7c8f1028f36..ce2e53e42ef 100755 --- a/repos/kafka/core/src/main/scala/kafka/controller/PartitionStateMachine.scala +++ b/repos/kafka/core/src/main/scala/kafka/controller/PartitionStateMachine.scala @@ -523,8 +523,9 @@ class PartitionStateMachine(controller: KafkaController) extends Logging { } def registerPartitionChangeListener(topic: String) = { - partitionModificationsListeners - .put(topic, new PartitionModificationsListener(topic)) + partitionModificationsListeners.put( + topic, + new PartitionModificationsListener(topic)) zkUtils.zkClient.subscribeDataChanges( getTopicPath(topic), partitionModificationsListeners(topic)) @@ -551,8 +552,9 @@ class PartitionStateMachine(controller: KafkaController) extends Logging { topic: String, partition: Int): LeaderIsrAndControllerEpoch = { val topicAndPartition = TopicAndPartition(topic, partition) - ReplicationUtils - .getLeaderIsrAndEpochForPartition(zkUtils, topic, partition) match { + ReplicationUtils.getLeaderIsrAndEpochForPartition(zkUtils, + topic, + partition) match { case Some(currentLeaderIsrAndEpoch) => currentLeaderIsrAndEpoch case None => val failMsg = diff --git a/repos/kafka/core/src/main/scala/kafka/controller/ReplicaStateMachine.scala b/repos/kafka/core/src/main/scala/kafka/controller/ReplicaStateMachine.scala index 8327c640b70..0fbe49701a4 100755 --- a/repos/kafka/core/src/main/scala/kafka/controller/ReplicaStateMachine.scala +++ b/repos/kafka/core/src/main/scala/kafka/controller/ReplicaStateMachine.scala @@ -191,8 +191,9 @@ class ReplicaStateMachine(controller: KafkaController) extends Logging { targetState) // start replica as a follower to the current leader for its partition val leaderIsrAndControllerEpochOpt = - ReplicationUtils - .getLeaderIsrAndEpochForPartition(zkUtils, topic, partition) + ReplicationUtils.getLeaderIsrAndEpochForPartition(zkUtils, + topic, + partition) leaderIsrAndControllerEpochOpt match { case Some(leaderIsrAndControllerEpoch) => if (leaderIsrAndControllerEpoch.leaderAndIsr.leader == replicaId) @@ -348,8 +349,7 @@ class ReplicaStateMachine(controller: KafkaController) extends Logging { val leaderAndIsrIsEmpty: Boolean = controllerContext.partitionLeadershipInfo .get(topicAndPartition) match { case Some(currLeaderIsrAndControllerEpoch) => - controller - .removeReplicaFromIsr(topic, partition, replicaId) match { + controller.removeReplicaFromIsr(topic, partition, replicaId) match { case Some(updatedLeaderIsrAndControllerEpoch) => // send the shrunk ISR state change request to all the remaining alive replicas of the partition. val currentAssignedReplicas = diff --git a/repos/kafka/core/src/main/scala/kafka/controller/TopicDeletionManager.scala b/repos/kafka/core/src/main/scala/kafka/controller/TopicDeletionManager.scala index 44509559d2c..8d069d66bbf 100755 --- a/repos/kafka/core/src/main/scala/kafka/controller/TopicDeletionManager.scala +++ b/repos/kafka/core/src/main/scala/kafka/controller/TopicDeletionManager.scala @@ -304,14 +304,14 @@ class TopicDeletionManager(controller: KafkaController, val replicasForDeletedTopic = controller.replicaStateMachine .replicasInState(topic, ReplicaDeletionSuccessful) // controller will remove this replica from the state machine as well as its partition assignment cache - replicaStateMachine - .handleStateChanges(replicasForDeletedTopic, NonExistentReplica) + replicaStateMachine.handleStateChanges(replicasForDeletedTopic, + NonExistentReplica) val partitionsForDeletedTopic = controllerContext.partitionsForTopic(topic) // move respective partition to OfflinePartition and NonExistentPartition state - partitionStateMachine - .handleStateChanges(partitionsForDeletedTopic, OfflinePartition) - partitionStateMachine - .handleStateChanges(partitionsForDeletedTopic, NonExistentPartition) + partitionStateMachine.handleStateChanges(partitionsForDeletedTopic, + OfflinePartition) + partitionStateMachine.handleStateChanges(partitionsForDeletedTopic, + NonExistentPartition) topicsToBeDeleted -= topic partitionsToBeDeleted.retain(_.topic != topic) val zkUtils = controllerContext.zkUtils diff --git a/repos/kafka/core/src/main/scala/kafka/coordinator/DelayedHeartbeat.scala b/repos/kafka/core/src/main/scala/kafka/coordinator/DelayedHeartbeat.scala index 4166d165e86..8506de7ccf1 100644 --- a/repos/kafka/core/src/main/scala/kafka/coordinator/DelayedHeartbeat.scala +++ b/repos/kafka/core/src/main/scala/kafka/coordinator/DelayedHeartbeat.scala @@ -29,8 +29,10 @@ private[coordinator] class DelayedHeartbeat(coordinator: GroupCoordinator, sessionTimeout: Long) extends DelayedOperation(sessionTimeout) { override def tryComplete(): Boolean = - coordinator - .tryCompleteHeartbeat(group, member, heartbeatDeadline, forceComplete) + coordinator.tryCompleteHeartbeat(group, + member, + heartbeatDeadline, + forceComplete) override def onExpiration() = coordinator.onExpireHeartbeat(group, member, heartbeatDeadline) override def onComplete() = coordinator.onCompleteHeartbeat() diff --git a/repos/kafka/core/src/main/scala/kafka/coordinator/GroupCoordinator.scala b/repos/kafka/core/src/main/scala/kafka/coordinator/GroupCoordinator.scala index 69eaebcc350..56ef2d91a9d 100644 --- a/repos/kafka/core/src/main/scala/kafka/coordinator/GroupCoordinator.scala +++ b/repos/kafka/core/src/main/scala/kafka/coordinator/GroupCoordinator.scala @@ -626,8 +626,8 @@ class GroupCoordinator(val brokerId: Int, } def handleGroupEmigration(offsetTopicPartitionId: Int) { - groupManager - .removeGroupsForPartition(offsetTopicPartitionId, onGroupUnloaded) + groupManager.removeGroupsForPartition(offsetTopicPartitionId, + onGroupUnloaded) } private def setAndPropagateAssignment(group: GroupMetadata, diff --git a/repos/kafka/core/src/main/scala/kafka/coordinator/GroupMetadata.scala b/repos/kafka/core/src/main/scala/kafka/coordinator/GroupMetadata.scala index a7426fdbfd7..66b8c951846 100644 --- a/repos/kafka/core/src/main/scala/kafka/coordinator/GroupMetadata.scala +++ b/repos/kafka/core/src/main/scala/kafka/coordinator/GroupMetadata.scala @@ -259,7 +259,9 @@ private[coordinator] class GroupMetadata(val groupId: String, } override def toString = { - "[%s,%s,%s,%s]" - .format(groupId, protocolType, currentState.toString, members) + "[%s,%s,%s,%s]".format(groupId, + protocolType, + currentState.toString, + members) } } diff --git a/repos/kafka/core/src/main/scala/kafka/coordinator/GroupMetadataManager.scala b/repos/kafka/core/src/main/scala/kafka/coordinator/GroupMetadataManager.scala index 5e6ca67e4e1..0a2acbe4a3a 100644 --- a/repos/kafka/core/src/main/scala/kafka/coordinator/GroupMetadataManager.scala +++ b/repos/kafka/core/src/main/scala/kafka/coordinator/GroupMetadataManager.scala @@ -155,8 +155,9 @@ class GroupMetadataManager(val brokerId: Int, timestamp = timestamp, magicValue = magicValue) - val partitionOpt = replicaManager - .getPartition(TopicConstants.GROUP_METADATA_TOPIC_NAME, groupPartition) + val partitionOpt = replicaManager.getPartition( + TopicConstants.GROUP_METADATA_TOPIC_NAME, + groupPartition) partitionOpt.foreach { partition => val appendPartition = TopicAndPartition(TopicConstants.GROUP_METADATA_TOPIC_NAME, @@ -716,8 +717,9 @@ class GroupMetadataManager(val brokerId: Int, } private def getHighWatermark(partitionId: Int): Long = { - val partitionOpt = replicaManager - .getPartition(TopicConstants.GROUP_METADATA_TOPIC_NAME, partitionId) + val partitionOpt = replicaManager.getPartition( + TopicConstants.GROUP_METADATA_TOPIC_NAME, + partitionId) val hw = partitionOpt.map { partition => partition diff --git a/repos/kafka/core/src/main/scala/kafka/log/Log.scala b/repos/kafka/core/src/main/scala/kafka/log/Log.scala index b0df112123f..1d4ddb59f10 100644 --- a/repos/kafka/core/src/main/scala/kafka/log/Log.scala +++ b/repos/kafka/core/src/main/scala/kafka/log/Log.scala @@ -517,8 +517,9 @@ class Log(val dir: File, } // Apply broker-side compression if any - val targetCodec = BrokerCompressionCodec - .getTargetCompressionCodec(config.compressionType, sourceCodec) + val targetCodec = BrokerCompressionCodec.getTargetCompressionCodec( + config.compressionType, + sourceCodec) LogAppendInfo(firstOffset, lastOffset, @@ -924,8 +925,9 @@ class Log(val dir: File, info("Deleting segment %d from log %s.".format(segment.baseOffset, name)) segment.delete() } - scheduler - .schedule("delete-file", deleteSeg, delay = config.fileDeleteDelayMs) + scheduler.schedule("delete-file", + deleteSeg, + delay = config.fileDeleteDelayMs) } /** diff --git a/repos/kafka/core/src/main/scala/kafka/log/LogCleaner.scala b/repos/kafka/core/src/main/scala/kafka/log/LogCleaner.scala index 9057a8aee52..e7a60c9313f 100644 --- a/repos/kafka/core/src/main/scala/kafka/log/LogCleaner.scala +++ b/repos/kafka/core/src/main/scala/kafka/log/LogCleaner.scala @@ -296,10 +296,12 @@ class LogCleaner(val config: CleanerConfig, stats.elapsedSecs - stats.elapsedIndexSecs, mb(stats.bytesRead) / (stats.elapsedSecs - stats.elapsedIndexSecs), 100 * (stats.elapsedSecs - stats.elapsedIndexSecs).toDouble / stats.elapsedSecs) + - "\tStart size: %,.1f MB (%,d messages)%n" - .format(mb(stats.bytesRead), stats.messagesRead) + - "\tEnd size: %,.1f MB (%,d messages)%n" - .format(mb(stats.bytesWritten), stats.messagesWritten) + + "\tStart size: %,.1f MB (%,d messages)%n".format( + mb(stats.bytesRead), + stats.messagesRead) + + "\tEnd size: %,.1f MB (%,d messages)%n".format( + mb(stats.bytesWritten), + stats.messagesWritten) + "\t%.1f%% size reduction (%.1f%% fewer messages)%n".format( 100.0 * (1.0 - stats.bytesWritten.toDouble / stats.bytesRead), 100.0 * (1.0 - stats.messagesWritten.toDouble / stats.messagesRead)) @@ -505,8 +507,9 @@ private[log] class Cleaner(val id: Int, if (shouldRetainMessage(source, map, retainDeletes, entry)) { val convertedMessage = entry.message.toFormatVersion(messageFormatVersion) - ByteBufferMessageSet - .writeMessage(writeBuffer, convertedMessage, entry.offset) + ByteBufferMessageSet.writeMessage(writeBuffer, + convertedMessage, + entry.offset) stats.recopyMessage(size) } messagesRead += 1 @@ -536,8 +539,9 @@ private[log] class Cleaner(val id: Int, // There are no messages compacted out and no message format conversion, write the original message set back if (writeOriginalMessageSet) - ByteBufferMessageSet - .writeMessage(writeBuffer, entry.message, entry.offset) + ByteBufferMessageSet.writeMessage(writeBuffer, + entry.message, + entry.offset) else if (retainedMessages.nonEmpty) compressMessages(writeBuffer, entry.message.compressionCodec, @@ -570,8 +574,9 @@ private[log] class Cleaner(val id: Int, MessageSet.Empty.sizeInBytes } else if (compressionCodec == NoCompressionCodec) { for (messageOffset <- messageAndOffsets) - ByteBufferMessageSet - .writeMessage(buffer, messageOffset.message, messageOffset.offset) + ByteBufferMessageSet.writeMessage(buffer, + messageOffset.message, + messageOffset.offset) MessageSet.messageSetSize(messages) } else { val magicAndTimestamp = MessageSet.magicAndLargestTimestamp(messages) diff --git a/repos/kafka/core/src/main/scala/kafka/log/OffsetMap.scala b/repos/kafka/core/src/main/scala/kafka/log/OffsetMap.scala index 6fda9b4634e..0ca2625d0fd 100755 --- a/repos/kafka/core/src/main/scala/kafka/log/OffsetMap.scala +++ b/repos/kafka/core/src/main/scala/kafka/log/OffsetMap.scala @@ -164,8 +164,9 @@ class SkimpyOffsetMap(val memory: Int, val hashAlgorithm: String = "MD5") */ private def positionOf(hash: Array[Byte], attempt: Int): Int = { val probe = - CoreUtils.readInt(hash, math.min(attempt, hashSize - 4)) + math - .max(0, attempt - hashSize + 4) + CoreUtils.readInt(hash, math.min(attempt, hashSize - 4)) + math.max( + 0, + attempt - hashSize + 4) val slot = Utils.abs(probe) % slots this.probes += 1 slot * bytesPerEntry diff --git a/repos/kafka/core/src/main/scala/kafka/network/BlockingChannel.scala b/repos/kafka/core/src/main/scala/kafka/network/BlockingChannel.scala index 79b5dbb0486..41122d2c9c1 100644 --- a/repos/kafka/core/src/main/scala/kafka/network/BlockingChannel.scala +++ b/repos/kafka/core/src/main/scala/kafka/network/BlockingChannel.scala @@ -58,8 +58,8 @@ class BlockingChannel(val host: String, channel.socket.setSoTimeout(readTimeoutMs) channel.socket.setKeepAlive(true) channel.socket.setTcpNoDelay(true) - channel.socket - .connect(new InetSocketAddress(host, port), connectTimeoutMs) + channel.socket.connect(new InetSocketAddress(host, port), + connectTimeoutMs) writeChannel = channel // Need to create a new ReadableByteChannel from input stream because SocketChannel doesn't implement read with timeout diff --git a/repos/kafka/core/src/main/scala/kafka/network/RequestChannel.scala b/repos/kafka/core/src/main/scala/kafka/network/RequestChannel.scala index 2f28fbdb88b..cbb6cc66d30 100644 --- a/repos/kafka/core/src/main/scala/kafka/network/RequestChannel.scala +++ b/repos/kafka/core/src/main/scala/kafka/network/RequestChannel.scala @@ -103,8 +103,9 @@ object RequestChannel extends Logging { } else null val body: AbstractRequest = if (requestObj == null) - try AbstractRequest - .getRequest(header.apiKey, header.apiVersion, buffer) + try AbstractRequest.getRequest(header.apiKey, + header.apiVersion, + buffer) catch { case ex: Throwable => throw new InvalidRequestException( diff --git a/repos/kafka/core/src/main/scala/kafka/network/SocketServer.scala b/repos/kafka/core/src/main/scala/kafka/network/SocketServer.scala index 38c32b98068..0df6c75fbc5 100644 --- a/repos/kafka/core/src/main/scala/kafka/network/SocketServer.scala +++ b/repos/kafka/core/src/main/scala/kafka/network/SocketServer.scala @@ -429,8 +429,10 @@ private[kafka] class Processor(val id: Int, private val newConnections = new ConcurrentLinkedQueue[SocketChannel]() private val inflightResponses = mutable.Map[String, RequestChannel.Response]() - private val channelBuilder = ChannelBuilders - .create(protocol, Mode.SERVER, LoginType.SERVER, channelConfigs) + private val channelBuilder = ChannelBuilders.create(protocol, + Mode.SERVER, + LoginType.SERVER, + channelConfigs) private val metricTags = new util.HashMap[String, String]() metricTags.put("networkProcessor", id.toString) diff --git a/repos/kafka/core/src/main/scala/kafka/producer/BrokerPartitionInfo.scala b/repos/kafka/core/src/main/scala/kafka/producer/BrokerPartitionInfo.scala index c01060d34a4..e1aafa0471d 100644 --- a/repos/kafka/core/src/main/scala/kafka/producer/BrokerPartitionInfo.scala +++ b/repos/kafka/core/src/main/scala/kafka/producer/BrokerPartitionInfo.scala @@ -72,8 +72,9 @@ class BrokerPartitionInfo(producerConfig: ProducerConfig, m.leader match { case Some(leader) => debug( - "Partition [%s,%d] has leader %d" - .format(topic, m.partitionId, leader.id)) + "Partition [%s,%d] has leader %d".format(topic, + m.partitionId, + leader.id)) new PartitionAndLeader(topic, m.partitionId, Some(leader.id)) case None => debug( @@ -90,8 +91,10 @@ class BrokerPartitionInfo(producerConfig: ProducerConfig, */ def updateInfo(topics: Set[String], correlationId: Int) { var topicsMetadata: Seq[TopicMetadata] = Nil - val topicMetadataResponse = ClientUtils - .fetchTopicMetadata(topics, brokers, producerConfig, correlationId) + val topicMetadataResponse = ClientUtils.fetchTopicMetadata(topics, + brokers, + producerConfig, + correlationId) topicsMetadata = topicMetadataResponse.topicsMetadata // throw partition specific exception topicsMetadata.foreach(tmd => { diff --git a/repos/kafka/core/src/main/scala/kafka/producer/async/DefaultEventHandler.scala b/repos/kafka/core/src/main/scala/kafka/producer/async/DefaultEventHandler.scala index 80be953ff1d..ac5929e7e41 100755 --- a/repos/kafka/core/src/main/scala/kafka/producer/async/DefaultEventHandler.scala +++ b/repos/kafka/core/src/main/scala/kafka/producer/async/DefaultEventHandler.scala @@ -258,8 +258,9 @@ class DefaultEventHandler[K, V]( private def getPartitionListForTopic( m: KeyedMessage[K, Message]): Seq[PartitionAndLeader] = { - val topicPartitionsList = brokerPartitionInfo - .getBrokerPartitionInfo(m.topic, correlationId.getAndIncrement) + val topicPartitionsList = brokerPartitionInfo.getBrokerPartitionInfo( + m.topic, + correlationId.getAndIncrement) debug( "Broker partitions registered for topic: %s are %s".format( m.topic, diff --git a/repos/kafka/core/src/main/scala/kafka/security/auth/Acl.scala b/repos/kafka/core/src/main/scala/kafka/security/auth/Acl.scala index e1aad60d3d1..e530096c503 100644 --- a/repos/kafka/core/src/main/scala/kafka/security/auth/Acl.scala +++ b/repos/kafka/core/src/main/scala/kafka/security/auth/Acl.scala @@ -113,7 +113,10 @@ case class Acl(principal: KafkaPrincipal, } override def toString: String = { - "%s has %s permission for operations: %s from hosts: %s" - .format(principal, permissionType.name, operation, host) + "%s has %s permission for operations: %s from hosts: %s".format( + principal, + permissionType.name, + operation, + host) } } diff --git a/repos/kafka/core/src/main/scala/kafka/security/auth/SimpleAclAuthorizer.scala b/repos/kafka/core/src/main/scala/kafka/security/auth/SimpleAclAuthorizer.scala index 1bb44d42db2..00e06e27494 100644 --- a/repos/kafka/core/src/main/scala/kafka/security/auth/SimpleAclAuthorizer.scala +++ b/repos/kafka/core/src/main/scala/kafka/security/auth/SimpleAclAuthorizer.scala @@ -376,8 +376,9 @@ class SimpleAclAuthorizer extends Authorizer with Logging { data: String, expectedVersion: Int): (Boolean, Int) = { try { - zkUtils - .conditionalUpdatePersistentPathIfExists(path, data, expectedVersion) + zkUtils.conditionalUpdatePersistentPathIfExists(path, + data, + expectedVersion) } catch { case e: ZkNoNodeException => try { diff --git a/repos/kafka/core/src/main/scala/kafka/server/ClientQuotaManager.scala b/repos/kafka/core/src/main/scala/kafka/server/ClientQuotaManager.scala index 8e6caa112af..fd20b334afe 100644 --- a/repos/kafka/core/src/main/scala/kafka/server/ClientQuotaManager.scala +++ b/repos/kafka/core/src/main/scala/kafka/server/ClientQuotaManager.scala @@ -80,8 +80,9 @@ class ClientQuotaManager(private val config: ClientQuotaManagerConfig, private val delayQueueSensor = metrics.sensor(apiKey + "-delayQueue") delayQueueSensor.add( - metrics - .metricName("queue-size", apiKey, "Tracks the size of the delay queue"), + metrics.metricName("queue-size", + apiKey, + "Tracks the size of the delay queue"), new Total()) /** diff --git a/repos/kafka/core/src/main/scala/kafka/server/DelayedProduce.scala b/repos/kafka/core/src/main/scala/kafka/server/DelayedProduce.scala index a9bc8ed65fa..35d77258eac 100644 --- a/repos/kafka/core/src/main/scala/kafka/server/DelayedProduce.scala +++ b/repos/kafka/core/src/main/scala/kafka/server/DelayedProduce.scala @@ -47,8 +47,8 @@ case class ProduceMetadata( produceStatus: Map[TopicPartition, ProducePartitionStatus]) { override def toString = - "[requiredAcks: %d, partitionStatus: %s]" - .format(produceRequiredAcks, produceStatus) + "[requiredAcks: %d, partitionStatus: %s]".format(produceRequiredAcks, + produceStatus) } /** @@ -96,8 +96,9 @@ class DelayedProduce( .format(topicAndPartition, status)) // skip those partitions that have already been satisfied if (status.acksPending) { - val partitionOpt = replicaManager - .getPartition(topicAndPartition.topic, topicAndPartition.partition) + val partitionOpt = + replicaManager.getPartition(topicAndPartition.topic, + topicAndPartition.partition) val (hasEnough, errorCode) = partitionOpt match { case Some(partition) => partition.checkEnoughReplicasReachOffset(status.requiredOffset) diff --git a/repos/kafka/core/src/main/scala/kafka/server/KafkaApis.scala b/repos/kafka/core/src/main/scala/kafka/server/KafkaApis.scala index 630a3fdd710..fba2c14eabb 100644 --- a/repos/kafka/core/src/main/scala/kafka/server/KafkaApis.scala +++ b/repos/kafka/core/src/main/scala/kafka/server/KafkaApis.scala @@ -863,8 +863,8 @@ class KafkaApis(val requestChannel: RequestChannel, val aliveBrokers = metadataCache.getAliveBrokers val offsetsTopicReplicationFactor = if (aliveBrokers.nonEmpty) - Math - .min(config.offsetsTopicReplicationFactor.toInt, aliveBrokers.length) + Math.min(config.offsetsTopicReplicationFactor.toInt, + aliveBrokers.length) else config.offsetsTopicReplicationFactor.toInt createTopic(TopicConstants.GROUP_METADATA_TOPIC_NAME, config.offsetsTopicPartitions, diff --git a/repos/kafka/core/src/main/scala/kafka/server/KafkaHealthcheck.scala b/repos/kafka/core/src/main/scala/kafka/server/KafkaHealthcheck.scala index f344fa02b2b..1dfa8dce315 100644 --- a/repos/kafka/core/src/main/scala/kafka/server/KafkaHealthcheck.scala +++ b/repos/kafka/core/src/main/scala/kafka/server/KafkaHealthcheck.scala @@ -66,8 +66,9 @@ class KafkaHealthcheck( // the default host and port are here for compatibility with older client // only PLAINTEXT is supported as default // if the broker doesn't listen on PLAINTEXT protocol, an empty endpoint will be registered and older clients will be unable to connect - val plaintextEndpoint = updatedEndpoints - .getOrElse(SecurityProtocol.PLAINTEXT, new EndPoint(null, -1, null)) + val plaintextEndpoint = updatedEndpoints.getOrElse( + SecurityProtocol.PLAINTEXT, + new EndPoint(null, -1, null)) zkUtils.registerBrokerInZk(brokerId, plaintextEndpoint.host, plaintextEndpoint.port, diff --git a/repos/kafka/core/src/main/scala/kafka/server/KafkaServer.scala b/repos/kafka/core/src/main/scala/kafka/server/KafkaServer.scala index 0d8fa755137..1dc97b5fd67 100755 --- a/repos/kafka/core/src/main/scala/kafka/server/KafkaServer.scala +++ b/repos/kafka/core/src/main/scala/kafka/server/KafkaServer.scala @@ -74,32 +74,32 @@ object KafkaServer { val logProps = new util.HashMap[String, Object]() logProps.put(LogConfig.SegmentBytesProp, kafkaConfig.logSegmentBytes) logProps.put(LogConfig.SegmentMsProp, kafkaConfig.logRollTimeMillis) - logProps - .put(LogConfig.SegmentJitterMsProp, kafkaConfig.logRollTimeJitterMillis) - logProps - .put(LogConfig.SegmentIndexBytesProp, kafkaConfig.logIndexSizeMaxBytes) - logProps - .put(LogConfig.FlushMessagesProp, kafkaConfig.logFlushIntervalMessages) + logProps.put(LogConfig.SegmentJitterMsProp, + kafkaConfig.logRollTimeJitterMillis) + logProps.put(LogConfig.SegmentIndexBytesProp, + kafkaConfig.logIndexSizeMaxBytes) + logProps.put(LogConfig.FlushMessagesProp, + kafkaConfig.logFlushIntervalMessages) logProps.put(LogConfig.FlushMsProp, kafkaConfig.logFlushIntervalMs) logProps.put(LogConfig.RetentionBytesProp, kafkaConfig.logRetentionBytes) logProps.put(LogConfig.RetentionMsProp, kafkaConfig.logRetentionTimeMillis: java.lang.Long) logProps.put(LogConfig.MaxMessageBytesProp, kafkaConfig.messageMaxBytes) - logProps - .put(LogConfig.IndexIntervalBytesProp, kafkaConfig.logIndexIntervalBytes) + logProps.put(LogConfig.IndexIntervalBytesProp, + kafkaConfig.logIndexIntervalBytes) logProps.put(LogConfig.DeleteRetentionMsProp, kafkaConfig.logCleanerDeleteRetentionMs) logProps.put(LogConfig.FileDeleteDelayMsProp, kafkaConfig.logDeleteDelayMs) logProps.put(LogConfig.MinCleanableDirtyRatioProp, kafkaConfig.logCleanerMinCleanRatio) logProps.put(LogConfig.CleanupPolicyProp, kafkaConfig.logCleanupPolicy) - logProps - .put(LogConfig.MinInSyncReplicasProp, kafkaConfig.minInSyncReplicas) + logProps.put(LogConfig.MinInSyncReplicasProp, + kafkaConfig.minInSyncReplicas) logProps.put(LogConfig.CompressionTypeProp, kafkaConfig.compressionType) logProps.put(LogConfig.UncleanLeaderElectionEnableProp, kafkaConfig.uncleanLeaderElectionEnable) - logProps - .put(LogConfig.PreAllocateEnableProp, kafkaConfig.logPreAllocateEnable) + logProps.put(LogConfig.PreAllocateEnableProp, + kafkaConfig.logPreAllocateEnable) logProps.put(LogConfig.MessageFormatVersionProp, kafkaConfig.logMessageFormatVersion.version) logProps.put(LogConfig.MessageTimestampTypeProp, diff --git a/repos/kafka/core/src/main/scala/kafka/server/ReplicaManager.scala b/repos/kafka/core/src/main/scala/kafka/server/ReplicaManager.scala index 3f0681c546e..83cfbe93121 100644 --- a/repos/kafka/core/src/main/scala/kafka/server/ReplicaManager.scala +++ b/repos/kafka/core/src/main/scala/kafka/server/ReplicaManager.scala @@ -429,8 +429,8 @@ class ReplicaManager(val config: KafkaConfig, // try to complete the request immediately, otherwise put it into the purgatory // this is because while the delayed produce operation is being created, new // requests may arrive and hence make this operation completable. - delayedProducePurgatory - .tryCompleteElseWatch(delayedProduce, producerRequestKeys) + delayedProducePurgatory.tryCompleteElseWatch(delayedProduce, + producerRequestKeys) } else { // we can respond immediately val produceResponseStatus = @@ -635,8 +635,8 @@ class ReplicaManager(val config: KafkaConfig, // try to complete the request immediately, otherwise put it into the purgatory; // this is because while the delayed fetch operation is being created, new requests // may arrive and hence make this operation completable. - delayedFetchPurgatory - .tryCompleteElseWatch(delayedFetch, delayedFetchKeys) + delayedFetchPurgatory.tryCompleteElseWatch(delayedFetch, + delayedFetchKeys) } } @@ -1181,8 +1181,10 @@ class ReplicaManager(val config: KafkaConfig, case e: Throwable => val errorMsg = ("Error on broker %d while processing LeaderAndIsr request with correlationId %d received from controller %d " + - "epoch %d") - .format(localBrokerId, correlationId, controllerId, epoch) + "epoch %d").format(localBrokerId, + correlationId, + controllerId, + epoch) stateChangeLogger.error(errorMsg, e) // Re-throw the exception for it to be caught in KafkaApis throw e diff --git a/repos/kafka/core/src/main/scala/kafka/tools/ConsoleConsumer.scala b/repos/kafka/core/src/main/scala/kafka/tools/ConsoleConsumer.scala index 55b7e5d1d5a..9a17b0a9bfb 100755 --- a/repos/kafka/core/src/main/scala/kafka/tools/ConsoleConsumer.scala +++ b/repos/kafka/core/src/main/scala/kafka/tools/ConsoleConsumer.scala @@ -399,8 +399,8 @@ object ConsoleConsumer extends Logging { csvReporterProps.put("kafka.metrics.reporters", "kafka.metrics.KafkaCSVMetricsReporter") if (options.has(metricsDirectoryOpt)) - csvReporterProps - .put("kafka.csv.metrics.dir", options.valueOf(metricsDirectoryOpt)) + csvReporterProps.put("kafka.csv.metrics.dir", + options.valueOf(metricsDirectoryOpt)) else csvReporterProps.put("kafka.csv.metrics.dir", "kafka_metrics") csvReporterProps.put("kafka.csv.metrics.reporter.enabled", "true") val verifiableProps = new VerifiableProperties(csvReporterProps) diff --git a/repos/kafka/core/src/main/scala/kafka/tools/ConsoleProducer.scala b/repos/kafka/core/src/main/scala/kafka/tools/ConsoleProducer.scala index 7e32547d228..8ff1e7f5cd9 100644 --- a/repos/kafka/core/src/main/scala/kafka/tools/ConsoleProducer.scala +++ b/repos/kafka/core/src/main/scala/kafka/tools/ConsoleProducer.scala @@ -84,13 +84,13 @@ object ConsoleProducer { props.put("compression.codec", config.compressionCodec) props.put("producer.type", if (config.sync) "sync" else "async") props.put("batch.num.messages", config.batchSize.toString) - props - .put("message.send.max.retries", config.messageSendMaxRetries.toString) + props.put("message.send.max.retries", + config.messageSendMaxRetries.toString) props.put("retry.backoff.ms", config.retryBackoffMs.toString) props.put("queue.buffering.max.ms", config.sendTimeout.toString) props.put("queue.buffering.max.messages", config.queueSize.toString) - props - .put("queue.enqueue.timeout.ms", config.queueEnqueueTimeoutMs.toString) + props.put("queue.enqueue.timeout.ms", + config.queueEnqueueTimeoutMs.toString) props.put("request.required.acks", config.requestRequiredAcks.toString) props.put("request.timeout.ms", config.requestTimeoutMs.toString) props.put("key.serializer.class", config.keyEncoderClass) @@ -129,8 +129,8 @@ object ConsoleProducer { props.put(ProducerConfig.RETRIES_CONFIG, config.messageSendMaxRetries.toString) props.put(ProducerConfig.LINGER_MS_CONFIG, config.sendTimeout.toString) - props - .put(ProducerConfig.BUFFER_MEMORY_CONFIG, config.maxMemoryBytes.toString) + props.put(ProducerConfig.BUFFER_MEMORY_CONFIG, + config.maxMemoryBytes.toString) props.put(ProducerConfig.BATCH_SIZE_CONFIG, config.maxPartitionMemoryBytes.toString) props.put(ProducerConfig.CLIENT_ID_CONFIG, "console-producer") @@ -322,8 +322,10 @@ object ConsoleProducer { CommandLineUtils.printUsageAndDie( parser, "Read data from standard input and publish it to Kafka.") - CommandLineUtils - .checkRequiredArgs(parser, options, topicOpt, brokerListOpt) + CommandLineUtils.checkRequiredArgs(parser, + options, + topicOpt, + brokerListOpt) import scala.collection.JavaConversions._ val useOldProducer = options.has(useOldProducerOpt) diff --git a/repos/kafka/core/src/main/scala/kafka/tools/ConsumerOffsetChecker.scala b/repos/kafka/core/src/main/scala/kafka/tools/ConsumerOffsetChecker.scala index 6e89ef60ca9..fbf4ea114d3 100644 --- a/repos/kafka/core/src/main/scala/kafka/tools/ConsumerOffsetChecker.scala +++ b/repos/kafka/core/src/main/scala/kafka/tools/ConsumerOffsetChecker.scala @@ -173,8 +173,8 @@ object ConsumerOffsetChecker extends Logging { parser.accepts("help", "Print this message.") if (args.length == 0) - CommandLineUtils - .printUsageAndDie(parser, "Check the offset of your consumers.") + CommandLineUtils.printUsageAndDie(parser, + "Check the offset of your consumers.") val options = parser.parse(args: _*) diff --git a/repos/kafka/core/src/main/scala/kafka/tools/ConsumerPerformance.scala b/repos/kafka/core/src/main/scala/kafka/tools/ConsumerPerformance.scala index bd7b573726e..3e1b072c310 100644 --- a/repos/kafka/core/src/main/scala/kafka/tools/ConsumerPerformance.scala +++ b/repos/kafka/core/src/main/scala/kafka/tools/ConsumerPerformance.scala @@ -275,8 +275,10 @@ object ConsumerPerformance { val options = parser.parse(args: _*) - CommandLineUtils - .checkRequiredArgs(parser, options, topicOpt, numMessagesOpt) + CommandLineUtils.checkRequiredArgs(parser, + options, + topicOpt, + numMessagesOpt) val useNewConsumer = options.has(useNewConsumerOpt) @@ -302,20 +304,22 @@ object ConsumerPerformance { classOf[ByteArrayDeserializer]) props.put(ConsumerConfig.CHECK_CRCS_CONFIG, "false") } else { - CommandLineUtils - .checkRequiredArgs(parser, options, zkConnectOpt, numMessagesOpt) + CommandLineUtils.checkRequiredArgs(parser, + options, + zkConnectOpt, + numMessagesOpt) props.put("group.id", options.valueOf(groupIdOpt)) props.put("socket.receive.buffer.bytes", options.valueOf(socketBufferSizeOpt).toString) - props - .put("fetch.message.max.bytes", options.valueOf(fetchSizeOpt).toString) + props.put("fetch.message.max.bytes", + options.valueOf(fetchSizeOpt).toString) props.put("auto.offset.reset", if (options.has(resetBeginningOffsetOpt)) "largest" else "smallest") props.put("zookeeper.connect", options.valueOf(zkConnectOpt)) props.put("consumer.timeout.ms", "1000") - props - .put("num.consumer.fetchers", options.valueOf(numFetchersOpt).toString) + props.put("num.consumer.fetchers", + options.valueOf(numFetchersOpt).toString) } val numThreads = options.valueOf(numThreadsOpt).intValue val topic = options.valueOf(topicOpt) diff --git a/repos/kafka/core/src/main/scala/kafka/tools/DumpLogSegments.scala b/repos/kafka/core/src/main/scala/kafka/tools/DumpLogSegments.scala index 949b47e2b63..91cb9ab42ba 100755 --- a/repos/kafka/core/src/main/scala/kafka/tools/DumpLogSegments.scala +++ b/repos/kafka/core/src/main/scala/kafka/tools/DumpLogSegments.scala @@ -179,8 +179,9 @@ object DumpLogSegments { val messageAndOffset = getIterator(partialFileMessageSet.head, isDeepIteration = true).next() if (messageAndOffset.offset != entry.offset + index.baseOffset) { - var misMatchesSeq = misMatchesForIndexFilesMap - .getOrElse(file.getAbsolutePath, List[(Long, Long)]()) + var misMatchesSeq = misMatchesForIndexFilesMap.getOrElse( + file.getAbsolutePath, + List[(Long, Long)]()) misMatchesSeq ::= (entry.offset + index.baseOffset, messageAndOffset.offset) misMatchesForIndexFilesMap.put(file.getAbsolutePath, misMatchesSeq) @@ -310,11 +311,11 @@ object DumpLogSegments { else if (msg.compressionCodec == NoCompressionCodec && messageAndOffset.offset != lastOffset + 1) { var nonConsecutivePairsSeq = - nonConsecutivePairsForLogFilesMap - .getOrElse(file.getAbsolutePath, List[(Long, Long)]()) + nonConsecutivePairsForLogFilesMap.getOrElse(file.getAbsolutePath, + List[(Long, Long)]()) nonConsecutivePairsSeq ::= (lastOffset, messageAndOffset.offset) - nonConsecutivePairsForLogFilesMap - .put(file.getAbsolutePath, nonConsecutivePairsSeq) + nonConsecutivePairsForLogFilesMap.put(file.getAbsolutePath, + nonConsecutivePairsSeq) } lastOffset = messageAndOffset.offset diff --git a/repos/kafka/core/src/main/scala/kafka/tools/EndToEndLatency.scala b/repos/kafka/core/src/main/scala/kafka/tools/EndToEndLatency.scala index bdf8794863b..29e48a01969 100755 --- a/repos/kafka/core/src/main/scala/kafka/tools/EndToEndLatency.scala +++ b/repos/kafka/core/src/main/scala/kafka/tools/EndToEndLatency.scala @@ -70,8 +70,7 @@ object EndToEndLatency { consumerProps.put( ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.ByteArrayDeserializer") - consumerProps - .put(ConsumerConfig.FETCH_MAX_WAIT_MS_CONFIG, "0") //ensure we have no temporal batching + consumerProps.put(ConsumerConfig.FETCH_MAX_WAIT_MS_CONFIG, "0") //ensure we have no temporal batching val consumer = new KafkaConsumer[Array[Byte], Array[Byte]](consumerProps) consumer.subscribe(List(topic)) @@ -80,10 +79,9 @@ object EndToEndLatency { if (sslPropsFile.equals("")) new Properties() else Utils.loadProps(sslPropsFile) producerProps.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, brokerList) - producerProps - .put(ProducerConfig.LINGER_MS_CONFIG, "0") //ensure writes are synchronous - producerProps - .put(ProducerConfig.MAX_BLOCK_MS_CONFIG, Long.MaxValue.toString) + producerProps.put(ProducerConfig.LINGER_MS_CONFIG, "0") //ensure writes are synchronous + producerProps.put(ProducerConfig.MAX_BLOCK_MS_CONFIG, + Long.MaxValue.toString) producerProps.put(ProducerConfig.ACKS_CONFIG, producerAcks.toString) producerProps.put( ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, diff --git a/repos/kafka/core/src/main/scala/kafka/tools/ExportZkOffsets.scala b/repos/kafka/core/src/main/scala/kafka/tools/ExportZkOffsets.scala index 9cc66b75bfa..59c4682ce87 100644 --- a/repos/kafka/core/src/main/scala/kafka/tools/ExportZkOffsets.scala +++ b/repos/kafka/core/src/main/scala/kafka/tools/ExportZkOffsets.scala @@ -59,8 +59,9 @@ object ExportZkOffsets extends Logging { parser.accepts("help", "Print this message.") if (args.length == 0) - CommandLineUtils - .printUsageAndDie(parser, "Export consumer offsets to an output file.") + CommandLineUtils.printUsageAndDie( + parser, + "Export consumer offsets to an output file.") val options = parser.parse(args: _*) @@ -69,8 +70,10 @@ object ExportZkOffsets extends Logging { System.exit(0) } - CommandLineUtils - .checkRequiredArgs(parser, options, zkConnectOpt, outFileOpt) + CommandLineUtils.checkRequiredArgs(parser, + options, + zkConnectOpt, + outFileOpt) val zkConnect = options.valueOf(zkConnectOpt) val groups = options.valuesOf(groupOpt) diff --git a/repos/kafka/core/src/main/scala/kafka/tools/GetOffsetShell.scala b/repos/kafka/core/src/main/scala/kafka/tools/GetOffsetShell.scala index 8f749ea8b8a..cddb4ece132 100644 --- a/repos/kafka/core/src/main/scala/kafka/tools/GetOffsetShell.scala +++ b/repos/kafka/core/src/main/scala/kafka/tools/GetOffsetShell.scala @@ -76,8 +76,11 @@ object GetOffsetShell { val options = parser.parse(args: _*) - CommandLineUtils - .checkRequiredArgs(parser, options, brokerListOpt, topicOpt, timeOpt) + CommandLineUtils.checkRequiredArgs(parser, + options, + brokerListOpt, + topicOpt, + timeOpt) val clientId = "GetOffsetShell" val brokerList = options.valueOf(brokerListOpt) diff --git a/repos/kafka/core/src/main/scala/kafka/tools/ImportZkOffsets.scala b/repos/kafka/core/src/main/scala/kafka/tools/ImportZkOffsets.scala index 0cd3c648425..652032c51c3 100644 --- a/repos/kafka/core/src/main/scala/kafka/tools/ImportZkOffsets.scala +++ b/repos/kafka/core/src/main/scala/kafka/tools/ImportZkOffsets.scala @@ -55,8 +55,9 @@ object ImportZkOffsets extends Logging { parser.accepts("help", "Print this message.") if (args.length == 0) - CommandLineUtils - .printUsageAndDie(parser, "Import offsets to zookeeper from files.") + CommandLineUtils.printUsageAndDie( + parser, + "Import offsets to zookeeper from files.") val options = parser.parse(args: _*) diff --git a/repos/kafka/core/src/main/scala/kafka/tools/JmxTool.scala b/repos/kafka/core/src/main/scala/kafka/tools/JmxTool.scala index 69dce8b5d8f..03989bc4521 100644 --- a/repos/kafka/core/src/main/scala/kafka/tools/JmxTool.scala +++ b/repos/kafka/core/src/main/scala/kafka/tools/JmxTool.scala @@ -75,8 +75,8 @@ object JmxTool extends Logging { .defaultsTo("service:jmx:rmi:///jndi/rmi://:9999/jmxrmi") if (args.length == 0) - CommandLineUtils - .printUsageAndDie(parser, "Dump JMX values to standard output.") + CommandLineUtils.printUsageAndDie(parser, + "Dump JMX values to standard output.") val options = parser.parse(args: _*) diff --git a/repos/kafka/core/src/main/scala/kafka/tools/MirrorMaker.scala b/repos/kafka/core/src/main/scala/kafka/tools/MirrorMaker.scala index 1d33cae97e0..4eba8da5892 100755 --- a/repos/kafka/core/src/main/scala/kafka/tools/MirrorMaker.scala +++ b/repos/kafka/core/src/main/scala/kafka/tools/MirrorMaker.scala @@ -414,8 +414,8 @@ object MirrorMaker extends Logging with KafkaMetricsGroup { // Disable consumer auto offsets commit to prevent data loss. maybeSetDefaultProperty(consumerConfigProps, "enable.auto.commit", "false") // Hardcode the deserializer to ByteArrayDeserializer - consumerConfigProps - .setProperty("key.deserializer", classOf[ByteArrayDeserializer].getName) + consumerConfigProps.setProperty("key.deserializer", + classOf[ByteArrayDeserializer].getName) consumerConfigProps.setProperty("value.deserializer", classOf[ByteArrayDeserializer].getName) // The default client id is group id, we manually set client id to groupId-index to avoid metric collision @@ -470,8 +470,8 @@ object MirrorMaker extends Logging with KafkaMetricsGroup { propertyName: String, defaultValue: String) { val propertyValue = properties.getProperty(propertyName) - properties - .setProperty(propertyName, Option(propertyValue).getOrElse(defaultValue)) + properties.setProperty(propertyName, + Option(propertyValue).getOrElse(defaultValue)) if (properties.getProperty(propertyName) != defaultValue) info( "Property %s is overridden to %s - data loss or message reordering is possible." diff --git a/repos/kafka/core/src/main/scala/kafka/tools/PerfConfig.scala b/repos/kafka/core/src/main/scala/kafka/tools/PerfConfig.scala index 2803dc0743a..64150e13034 100644 --- a/repos/kafka/core/src/main/scala/kafka/tools/PerfConfig.scala +++ b/repos/kafka/core/src/main/scala/kafka/tools/PerfConfig.scala @@ -43,8 +43,9 @@ class PerfConfig(args: Array[String]) { "show-detailed-stats", "If set, stats are reported for each reporting " + "interval as configured by reporting-interval") - val hideHeaderOpt = parser - .accepts("hide-header", "If set, skips printing the header for the stats ") + val hideHeaderOpt = parser.accepts( + "hide-header", + "If set, skips printing the header for the stats ") val messageSizeOpt = parser .accepts("message-size", "The size of each message.") .withRequiredArg diff --git a/repos/kafka/core/src/main/scala/kafka/tools/ProducerPerformance.scala b/repos/kafka/core/src/main/scala/kafka/tools/ProducerPerformance.scala index b7fbfb7b8d0..1be78b4300c 100644 --- a/repos/kafka/core/src/main/scala/kafka/tools/ProducerPerformance.scala +++ b/repos/kafka/core/src/main/scala/kafka/tools/ProducerPerformance.scala @@ -221,8 +221,8 @@ object ProducerPerformance extends Logging { props.put("kafka.metrics.reporters", "kafka.metrics.KafkaCSVMetricsReporter") if (options.has(metricsDirectoryOpt)) - props - .put("kafka.csv.metrics.dir", options.valueOf(metricsDirectoryOpt)) + props.put("kafka.csv.metrics.dir", + options.valueOf(metricsDirectoryOpt)) else props.put("kafka.csv.metrics.dir", "kafka_metrics") props.put("kafka.csv.metrics.reporter.enabled", "true") val verifiableProps = new VerifiableProperties(props) @@ -277,10 +277,10 @@ object ProducerPerformance extends Logging { props.put("client.id", "producer-performance") props.put("request.required.acks", config.producerRequestRequiredAcks.toString) - props - .put("request.timeout.ms", config.producerRequestTimeoutMs.toString) - props - .put("message.send.max.retries", config.producerNumRetries.toString) + props.put("request.timeout.ms", + config.producerRequestTimeoutMs.toString) + props.put("message.send.max.retries", + config.producerNumRetries.toString) props.put("retry.backoff.ms", config.producerRetryBackoffMs.toString) props.put("serializer.class", classOf[DefaultEncoder].getName) props.put("key.serializer.class", classOf[NullEncoder[Long]].getName) diff --git a/repos/kafka/core/src/main/scala/kafka/tools/ReplayLogProducer.scala b/repos/kafka/core/src/main/scala/kafka/tools/ReplayLogProducer.scala index 77bd10767ff..8b82918a74a 100644 --- a/repos/kafka/core/src/main/scala/kafka/tools/ReplayLogProducer.scala +++ b/repos/kafka/core/src/main/scala/kafka/tools/ReplayLogProducer.scala @@ -49,8 +49,8 @@ object ReplayLogProducer extends Logging { consumerProps.put("consumer.timeout.ms", "10000") consumerProps.put("auto.offset.reset", OffsetRequest.SmallestTimeString) consumerProps.put("fetch.message.max.bytes", (1024 * 1024).toString) - consumerProps - .put("socket.receive.buffer.bytes", (2 * 1024 * 1024).toString) + consumerProps.put("socket.receive.buffer.bytes", + (2 * 1024 * 1024).toString) val consumerConfig = new ConsumerConfig(consumerProps) val consumerConnector: ConsumerConnector = Consumer.create(consumerConfig) val topicMessageStreams = consumerConnector.createMessageStreams( @@ -126,8 +126,10 @@ object ReplayLogProducer extends Logging { val options = parser.parse(args: _*) - CommandLineUtils - .checkRequiredArgs(parser, options, brokerListOpt, inputTopicOpt) + CommandLineUtils.checkRequiredArgs(parser, + options, + brokerListOpt, + inputTopicOpt) val zkConnect = options.valueOf(zkConnectOpt) val brokerList = options.valueOf(brokerListOpt) diff --git a/repos/kafka/core/src/main/scala/kafka/tools/ReplicaVerificationTool.scala b/repos/kafka/core/src/main/scala/kafka/tools/ReplicaVerificationTool.scala index 25214c84232..fbf48d20ddb 100644 --- a/repos/kafka/core/src/main/scala/kafka/tools/ReplicaVerificationTool.scala +++ b/repos/kafka/core/src/main/scala/kafka/tools/ReplicaVerificationTool.scala @@ -279,8 +279,8 @@ private class ReplicaBuffer( private def initialize() { for (topicAndPartition <- expectedReplicasPerTopicAndPartition.keySet) - messageSetCache - .put(topicAndPartition, new Pool[Int, FetchResponsePartitionData]) + messageSetCache.put(topicAndPartition, + new Pool[Int, FetchResponsePartitionData]) setInitialOffsets() } @@ -479,8 +479,9 @@ private class ReplicaFetcher(name: String, if (response != null) { response.data.foreach { case (topicAndPartition, partitionData) => - replicaBuffer - .addFetchedData(topicAndPartition, sourceBroker.id, partitionData) + replicaBuffer.addFetchedData(topicAndPartition, + sourceBroker.id, + partitionData) } } else { for (topicAndPartition <- topicAndPartitions) diff --git a/repos/kafka/core/src/main/scala/kafka/tools/SimpleConsumerPerformance.scala b/repos/kafka/core/src/main/scala/kafka/tools/SimpleConsumerPerformance.scala index 0d8315a78ce..79bcccfa4c1 100644 --- a/repos/kafka/core/src/main/scala/kafka/tools/SimpleConsumerPerformance.scala +++ b/repos/kafka/core/src/main/scala/kafka/tools/SimpleConsumerPerformance.scala @@ -180,8 +180,11 @@ object SimpleConsumerPerformance { val options = parser.parse(args: _*) - CommandLineUtils - .checkRequiredArgs(parser, options, topicOpt, urlOpt, numMessagesOpt) + CommandLineUtils.checkRequiredArgs(parser, + options, + topicOpt, + urlOpt, + numMessagesOpt) val url = new URI(options.valueOf(urlOpt)) val fetchSize = options.valueOf(fetchSizeOpt).intValue diff --git a/repos/kafka/core/src/main/scala/kafka/tools/SimpleConsumerShell.scala b/repos/kafka/core/src/main/scala/kafka/tools/SimpleConsumerShell.scala index 2dd5b954ee3..1437f93702d 100755 --- a/repos/kafka/core/src/main/scala/kafka/tools/SimpleConsumerShell.scala +++ b/repos/kafka/core/src/main/scala/kafka/tools/SimpleConsumerShell.scala @@ -97,8 +97,9 @@ object SimpleConsumerShell extends Logging { .withRequiredArg .describedAs("prop") .ofType(classOf[String]) - val printOffsetOpt = parser - .accepts("print-offsets", "Print the offsets returned by the iterator") + val printOffsetOpt = parser.accepts( + "print-offsets", + "Print the offsets returned by the iterator") val maxWaitMsOpt = parser .accepts("max-wait-ms", "The max amount of time each fetch request waits.") diff --git a/repos/kafka/core/src/main/scala/kafka/utils/DelayedItem.scala b/repos/kafka/core/src/main/scala/kafka/utils/DelayedItem.scala index ef91c30b90d..3bc1c407596 100644 --- a/repos/kafka/core/src/main/scala/kafka/utils/DelayedItem.scala +++ b/repos/kafka/core/src/main/scala/kafka/utils/DelayedItem.scala @@ -29,8 +29,8 @@ class DelayedItem(delayMs: Long) extends Delayed with Logging { * The remaining delay time */ def getDelay(unit: TimeUnit): Long = { - unit - .convert(max(dueMs - SystemTime.milliseconds, 0), TimeUnit.MILLISECONDS) + unit.convert(max(dueMs - SystemTime.milliseconds, 0), + TimeUnit.MILLISECONDS) } def compareTo(d: Delayed): Int = { diff --git a/repos/kafka/core/src/main/scala/kafka/utils/ZkUtils.scala b/repos/kafka/core/src/main/scala/kafka/utils/ZkUtils.scala index 9d36992101a..54a33442914 100644 --- a/repos/kafka/core/src/main/scala/kafka/utils/ZkUtils.scala +++ b/repos/kafka/core/src/main/scala/kafka/utils/ZkUtils.scala @@ -408,8 +408,7 @@ class ZkUtils(val zkClient: ZkClient, } else acls if (!zkClient.exists(path)) - ZkPath - .createPersistent(zkClient, path, true, acl) //won't throw NoNodeException or NodeExistsException + ZkPath.createPersistent(zkClient, path, true, acl) //won't throw NoNodeException or NodeExistsException } /** @@ -912,8 +911,10 @@ class ZkUtils(val zkClient: ZkClient, val consumersPerTopicMap = new mutable.HashMap[String, List[ConsumerThreadId]] for (consumer <- consumers) { - val topicCount = TopicCount - .constructTopicCount(group, consumer, this, excludeInternalTopics) + val topicCount = TopicCount.constructTopicCount(group, + consumer, + this, + excludeInternalTopics) for ((topic, consumerThreadIdSet) <- topicCount.getConsumerThreadIdsPerTopic) { for (consumerThreadId <- consumerThreadIdSet) consumersPerTopicMap.get(topic) match { diff --git a/repos/kafka/core/src/test/scala/integration/kafka/api/BaseProducerSendTest.scala b/repos/kafka/core/src/test/scala/integration/kafka/api/BaseProducerSendTest.scala index 2a9c465666d..9eeabcfa30b 100644 --- a/repos/kafka/core/src/test/scala/integration/kafka/api/BaseProducerSendTest.scala +++ b/repos/kafka/core/src/test/scala/integration/kafka/api/BaseProducerSendTest.scala @@ -280,11 +280,11 @@ abstract class BaseProducerSendTest extends KafkaServerTestHarness { // create topic val topicProps = new Properties() if (timestampType == TimestampType.LOG_APPEND_TIME) - topicProps - .setProperty(LogConfig.MessageTimestampTypeProp, "LogAppendTime") + topicProps.setProperty(LogConfig.MessageTimestampTypeProp, + "LogAppendTime") else - topicProps - .setProperty(LogConfig.MessageTimestampTypeProp, "CreateTime") + topicProps.setProperty(LogConfig.MessageTimestampTypeProp, + "CreateTime") TestUtils.createTopic(zkUtils, topic, 1, 2, servers, topicProps) for (i <- 1 to numRecords) { @@ -585,8 +585,8 @@ abstract class BaseProducerSendTest extends KafkaServerTestHarness { @Test def testSendWithInvalidCreateTime() { val topicProps = new Properties() - topicProps - .setProperty(LogConfig.MessageTimestampDifferenceMaxMsProp, "1000"); + topicProps.setProperty(LogConfig.MessageTimestampDifferenceMaxMsProp, + "1000"); TestUtils.createTopic(zkUtils, topic, 1, 2, servers, topicProps) val producer = createProducer(brokerList = brokerList) diff --git a/repos/kafka/core/src/test/scala/integration/kafka/api/EndToEndAuthorizationTest.scala b/repos/kafka/core/src/test/scala/integration/kafka/api/EndToEndAuthorizationTest.scala index 84d54225842..8a5830020d4 100644 --- a/repos/kafka/core/src/test/scala/integration/kafka/api/EndToEndAuthorizationTest.scala +++ b/repos/kafka/core/src/test/scala/integration/kafka/api/EndToEndAuthorizationTest.scala @@ -222,8 +222,9 @@ trait EndToEndAuthorizationTest extends IntegrationTestHarness with SaslSetup { TopicReadAcl ++ TopicWriteAcl ++ TopicDescribeAcl, s.apis.authorizer.get, topicResource) - TestUtils - .waitAndVerifyAcls(GroupReadAcl, s.apis.authorizer.get, groupResource) + TestUtils.waitAndVerifyAcls(GroupReadAcl, + s.apis.authorizer.get, + groupResource) }) //Produce records debug("Starting to send records") diff --git a/repos/kafka/core/src/test/scala/integration/kafka/api/PlaintextConsumerTest.scala b/repos/kafka/core/src/test/scala/integration/kafka/api/PlaintextConsumerTest.scala index 070b3a24d18..996b885022b 100644 --- a/repos/kafka/core/src/test/scala/integration/kafka/api/PlaintextConsumerTest.scala +++ b/repos/kafka/core/src/test/scala/integration/kafka/api/PlaintextConsumerTest.scala @@ -277,8 +277,11 @@ class PlaintextConsumerTest extends BaseConsumerTest { }, s"Expected partitions ${subscriptions.asJava} but actually got ${this.consumers(0).assignment}") - TestUtils - .createTopic(this.zkUtils, otherTopic, 2, serverCount, this.servers) + TestUtils.createTopic(this.zkUtils, + otherTopic, + 2, + serverCount, + this.servers) this.consumers(0).subscribe(List(topic, otherTopic).asJava) TestUtils.waitUntilTrue( () => { @@ -291,8 +294,11 @@ class PlaintextConsumerTest extends BaseConsumerTest { @Test def testShrinkingTopicSubscriptions() { val otherTopic = "other" - TestUtils - .createTopic(this.zkUtils, otherTopic, 2, serverCount, this.servers) + TestUtils.createTopic(this.zkUtils, + otherTopic, + 2, + serverCount, + this.servers) val subscriptions = Set(new TopicPartition(topic, 0), new TopicPartition(topic, 1), new TopicPartition(otherTopic, 0), @@ -392,8 +398,8 @@ class PlaintextConsumerTest extends BaseConsumerTest { val producerProps = new Properties() producerProps.setProperty(ProducerConfig.COMPRESSION_TYPE_CONFIG, CompressionType.GZIP.name) - producerProps - .setProperty(ProducerConfig.LINGER_MS_CONFIG, Long.MaxValue.toString) + producerProps.setProperty(ProducerConfig.LINGER_MS_CONFIG, + Long.MaxValue.toString) val producer = TestUtils.createNewProducer(brokerList, securityProtocol = securityProtocol, diff --git a/repos/kafka/core/src/test/scala/integration/kafka/api/ProducerBounceTest.scala b/repos/kafka/core/src/test/scala/integration/kafka/api/ProducerBounceTest.scala index 3b4ede4cd34..df78620b614 100644 --- a/repos/kafka/core/src/test/scala/integration/kafka/api/ProducerBounceTest.scala +++ b/repos/kafka/core/src/test/scala/integration/kafka/api/ProducerBounceTest.scala @@ -31,8 +31,8 @@ class ProducerBounceTest extends KafkaServerTestHarness { val overridingProps = new Properties() overridingProps.put(KafkaConfig.AutoCreateTopicsEnableProp, false.toString) - overridingProps - .put(KafkaConfig.MessageMaxBytesProp, serverMessageMaxBytes.toString) + overridingProps.put(KafkaConfig.MessageMaxBytesProp, + serverMessageMaxBytes.toString) // Set a smaller value for the number of partitions for the offset commit topic (__consumer_offset topic) // so that the creation of that topic/partition(s) and subsequent leader assignment doesn't take relatively long overridingProps.put(KafkaConfig.OffsetsTopicPartitionsProp, 1.toString) @@ -69,10 +69,12 @@ class ProducerBounceTest extends KafkaServerTestHarness { override def setUp() { super.setUp() - producer1 = TestUtils - .createNewProducer(brokerList, acks = 0, bufferSize = producerBufferSize) - producer2 = TestUtils - .createNewProducer(brokerList, acks = 1, bufferSize = producerBufferSize) + producer1 = TestUtils.createNewProducer(brokerList, + acks = 0, + bufferSize = producerBufferSize) + producer2 = TestUtils.createNewProducer(brokerList, + acks = 1, + bufferSize = producerBufferSize) producer3 = TestUtils.createNewProducer(brokerList, acks = -1, bufferSize = producerBufferSize) @@ -94,8 +96,11 @@ class ProducerBounceTest extends KafkaServerTestHarness { @Test def testBrokerFailure() { val numPartitions = 3 - val leaders = TestUtils - .createTopic(zkUtils, topic1, numPartitions, numServers, servers) + val leaders = TestUtils.createTopic(zkUtils, + topic1, + numPartitions, + numServers, + servers) assertTrue("Leader of all partitions of the topic should exist", leaders.values.forall(leader => leader.isDefined)) diff --git a/repos/kafka/core/src/test/scala/integration/kafka/api/ProducerFailureHandlingTest.scala b/repos/kafka/core/src/test/scala/integration/kafka/api/ProducerFailureHandlingTest.scala index ca636c619fe..9fb795ee4cc 100644 --- a/repos/kafka/core/src/test/scala/integration/kafka/api/ProducerFailureHandlingTest.scala +++ b/repos/kafka/core/src/test/scala/integration/kafka/api/ProducerFailureHandlingTest.scala @@ -43,8 +43,8 @@ class ProducerFailureHandlingTest extends KafkaServerTestHarness { val overridingProps = new Properties() overridingProps.put(KafkaConfig.AutoCreateTopicsEnableProp, false.toString) - overridingProps - .put(KafkaConfig.MessageMaxBytesProp, serverMessageMaxBytes.toString) + overridingProps.put(KafkaConfig.MessageMaxBytesProp, + serverMessageMaxBytes.toString) // Set a smaller value for the number of partitions for the offset commit topic (__consumer_offset topic) // so that the creation of that topic/partition(s) and subsequent leader assignment doesn't take relatively long overridingProps.put(KafkaConfig.OffsetsTopicPartitionsProp, 1.toString) @@ -260,8 +260,12 @@ class ProducerFailureHandlingTest extends KafkaServerTestHarness { val topicProps = new Properties() topicProps.put("min.insync.replicas", (numServers + 1).toString) - TestUtils - .createTopic(zkUtils, topicName, 1, numServers, servers, topicProps) + TestUtils.createTopic(zkUtils, + topicName, + 1, + numServers, + servers, + topicProps) val record = new ProducerRecord[Array[Byte], Array[Byte]](topicName, null, @@ -286,8 +290,12 @@ class ProducerFailureHandlingTest extends KafkaServerTestHarness { val topicProps = new Properties() topicProps.put("min.insync.replicas", numServers.toString) - TestUtils - .createTopic(zkUtils, topicName, 1, numServers, servers, topicProps) + TestUtils.createTopic(zkUtils, + topicName, + 1, + numServers, + servers, + topicProps) val record = new ProducerRecord[Array[Byte], Array[Byte]](topicName, null, diff --git a/repos/kafka/core/src/test/scala/integration/kafka/api/QuotasTest.scala b/repos/kafka/core/src/test/scala/integration/kafka/api/QuotasTest.scala index bd3cf7e060f..9e830ff9266 100644 --- a/repos/kafka/core/src/test/scala/integration/kafka/api/QuotasTest.scala +++ b/repos/kafka/core/src/test/scala/integration/kafka/api/QuotasTest.scala @@ -51,10 +51,10 @@ class QuotasTest extends KafkaServerTestHarness { val overridingProps = new Properties() // Low enough quota that a producer sending a small payload in a tight loop should get throttled - overridingProps - .put(KafkaConfig.ProducerQuotaBytesPerSecondDefaultProp, "8000") - overridingProps - .put(KafkaConfig.ConsumerQuotaBytesPerSecondDefaultProp, "2500") + overridingProps.put(KafkaConfig.ProducerQuotaBytesPerSecondDefaultProp, + "8000") + overridingProps.put(KafkaConfig.ConsumerQuotaBytesPerSecondDefaultProp, + "2500") override def generateConfigs() = { FixedPortTestUtils @@ -78,8 +78,8 @@ class QuotasTest extends KafkaServerTestHarness { val producerProps = new Properties() producerProps.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, brokerList) producerProps.put(ProducerConfig.ACKS_CONFIG, "0") - producerProps - .put(ProducerConfig.BUFFER_MEMORY_CONFIG, producerBufferSize.toString) + producerProps.put(ProducerConfig.BUFFER_MEMORY_CONFIG, + producerBufferSize.toString) producerProps.put(ProducerConfig.CLIENT_ID_CONFIG, producerId1) producerProps.put( ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, @@ -93,8 +93,11 @@ class QuotasTest extends KafkaServerTestHarness { producers += new KafkaProducer[Array[Byte], Array[Byte]](producerProps) val numPartitions = 1 - val leaders = TestUtils - .createTopic(zkUtils, topic1, numPartitions, numServers, servers) + val leaders = TestUtils.createTopic(zkUtils, + topic1, + numPartitions, + numServers, + servers) leaderNode = if (leaders(0).get == servers.head.config.brokerId) servers.head @@ -111,8 +114,8 @@ class QuotasTest extends KafkaServerTestHarness { consumerProps.setProperty(ConsumerConfig.GROUP_ID_CONFIG, "QuotasTest") consumerProps.setProperty(ConsumerConfig.MAX_PARTITION_FETCH_BYTES_CONFIG, 4096.toString) - consumerProps - .setProperty(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest") + consumerProps.setProperty(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, + "earliest") consumerProps.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, brokerList) consumerProps.put( ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, diff --git a/repos/kafka/core/src/test/scala/integration/kafka/api/RackAwareAutoTopicCreationTest.scala b/repos/kafka/core/src/test/scala/integration/kafka/api/RackAwareAutoTopicCreationTest.scala index f436a815641..69ab2bb1621 100644 --- a/repos/kafka/core/src/test/scala/integration/kafka/api/RackAwareAutoTopicCreationTest.scala +++ b/repos/kafka/core/src/test/scala/integration/kafka/api/RackAwareAutoTopicCreationTest.scala @@ -35,8 +35,8 @@ class RackAwareAutoTopicCreationTest val replicationFactor = 2 val overridingProps = new Properties() overridingProps.put(KafkaConfig.NumPartitionsProp, numPartitions.toString) - overridingProps - .put(KafkaConfig.DefaultReplicationFactorProp, replicationFactor.toString) + overridingProps.put(KafkaConfig.DefaultReplicationFactorProp, + replicationFactor.toString) def generateConfigs() = (0 until numServers) map { node => diff --git a/repos/kafka/core/src/test/scala/integration/kafka/api/SaslSetup.scala b/repos/kafka/core/src/test/scala/integration/kafka/api/SaslSetup.scala index 26a8bb4c1fb..2ee74850d05 100644 --- a/repos/kafka/core/src/test/scala/integration/kafka/api/SaslSetup.scala +++ b/repos/kafka/core/src/test/scala/integration/kafka/api/SaslSetup.scala @@ -59,8 +59,8 @@ trait SaslSetup { val (keytabFile, jaasFile) = createKeytabAndJaasFiles(mode) // This will cause a reload of the Configuration singleton when `getConfiguration` is called Configuration.setConfiguration(null) - System - .setProperty(JaasUtils.JAVA_LOGIN_CONFIG_PARAM, jaasFile.getAbsolutePath) + System.setProperty(JaasUtils.JAVA_LOGIN_CONFIG_PARAM, + jaasFile.getAbsolutePath) keytabFile } diff --git a/repos/kafka/core/src/test/scala/kafka/tools/TestLogCleaning.scala b/repos/kafka/core/src/test/scala/kafka/tools/TestLogCleaning.scala index 4ea2a53ffbf..6ae338c11c6 100755 --- a/repos/kafka/core/src/test/scala/kafka/tools/TestLogCleaning.scala +++ b/repos/kafka/core/src/test/scala/kafka/tools/TestLogCleaning.scala @@ -108,8 +108,9 @@ object TestLogCleaning { val options = parser.parse(args: _*) if (args.length == 0) - CommandLineUtils - .printUsageAndDie(parser, "An integration test for log cleaning.") + CommandLineUtils.printUsageAndDie( + parser, + "An integration test for log cleaning.") if (options.has(dumpOpt)) { dumpLog(new File(options.valueOf(dumpOpt))) @@ -281,18 +282,18 @@ object TestLogCleaning { dups: Int, percentDeletes: Int): File = { val producerProps = new Properties - producerProps - .setProperty(ProducerConfig.MAX_BLOCK_MS_CONFIG, Long.MaxValue.toString) - producerProps - .setProperty(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, brokerUrl) + producerProps.setProperty(ProducerConfig.MAX_BLOCK_MS_CONFIG, + Long.MaxValue.toString) + producerProps.setProperty(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, + brokerUrl) producerProps.put( ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.ByteArraySerializer") producerProps.put( ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.ByteArraySerializer") - producerProps - .setProperty(ProducerConfig.COMPRESSION_TYPE_CONFIG, compressionType) + producerProps.setProperty(ProducerConfig.COMPRESSION_TYPE_CONFIG, + compressionType) val producer = new KafkaProducer[Array[Byte], Array[Byte]](producerProps) val rand = new Random(1) val keyCount = (messages / dups).toInt diff --git a/repos/kafka/core/src/test/scala/other/kafka/StressTestLog.scala b/repos/kafka/core/src/test/scala/other/kafka/StressTestLog.scala index e87c2b77cd2..4b3611b4647 100755 --- a/repos/kafka/core/src/test/scala/other/kafka/StressTestLog.scala +++ b/repos/kafka/core/src/test/scala/other/kafka/StressTestLog.scala @@ -35,12 +35,12 @@ object StressTestLog { val dir = TestUtils.randomPartitionLogDir(TestUtils.tempDir()) val time = new MockTime val logProprties = new Properties() - logProprties - .put(LogConfig.SegmentBytesProp, 64 * 1024 * 1024: java.lang.Integer) - logProprties - .put(LogConfig.MaxMessageBytesProp, Int.MaxValue: java.lang.Integer) - logProprties - .put(LogConfig.SegmentIndexBytesProp, 1024 * 1024: java.lang.Integer) + logProprties.put(LogConfig.SegmentBytesProp, + 64 * 1024 * 1024: java.lang.Integer) + logProprties.put(LogConfig.MaxMessageBytesProp, + Int.MaxValue: java.lang.Integer) + logProprties.put(LogConfig.SegmentIndexBytesProp, + 1024 * 1024: java.lang.Integer) val log = new Log(dir = dir, config = LogConfig(logProprties), @@ -109,10 +109,10 @@ object StressTestLog { require( first.offset == offset, "We should either read nothing or the message we asked for.") - require( - MessageSet.entrySize(first.message) == read.sizeInBytes, - "Expected %d but got %d." - .format(MessageSet.entrySize(first.message), read.sizeInBytes)) + require(MessageSet.entrySize(first.message) == read.sizeInBytes, + "Expected %d but got %d.".format( + MessageSet.entrySize(first.message), + read.sizeInBytes)) offset += 1 } case _ => diff --git a/repos/kafka/core/src/test/scala/other/kafka/TestLinearWriteSpeed.scala b/repos/kafka/core/src/test/scala/other/kafka/TestLinearWriteSpeed.scala index ac94ed94d86..7e0a759049e 100755 --- a/repos/kafka/core/src/test/scala/other/kafka/TestLinearWriteSpeed.scala +++ b/repos/kafka/core/src/test/scala/other/kafka/TestLinearWriteSpeed.scala @@ -92,8 +92,11 @@ object TestLinearWriteSpeed { val options = parser.parse(args: _*) - CommandLineUtils - .checkRequiredArgs(parser, options, bytesOpt, sizeOpt, filesOpt) + CommandLineUtils.checkRequiredArgs(parser, + options, + bytesOpt, + sizeOpt, + filesOpt) var bytesToWrite = options.valueOf(bytesOpt).longValue val bufferSize = options.valueOf(sizeOpt).intValue @@ -133,10 +136,10 @@ object TestLinearWriteSpeed { rand.nextInt(512) * 1024 * 1024 + 64 * 1024 * 1024 // vary size to avoid herd effect val logProperties = new Properties() - logProperties - .put(LogConfig.SegmentBytesProp, segmentSize: java.lang.Integer) - logProperties - .put(LogConfig.FlushMessagesProp, flushInterval: java.lang.Long) + logProperties.put(LogConfig.SegmentBytesProp, + segmentSize: java.lang.Integer) + logProperties.put(LogConfig.FlushMessagesProp, + flushInterval: java.lang.Long) writables(i) = new LogWritable(new File(dir, "kafka-test-" + i), new LogConfig(logProperties), scheduler, diff --git a/repos/kafka/core/src/test/scala/other/kafka/TestOffsetManager.scala b/repos/kafka/core/src/test/scala/other/kafka/TestOffsetManager.scala index 0a44f13c5ec..9bd089eacd0 100644 --- a/repos/kafka/core/src/test/scala/other/kafka/TestOffsetManager.scala +++ b/repos/kafka/core/src/test/scala/other/kafka/TestOffsetManager.scala @@ -168,8 +168,10 @@ object TestOffsetManager { val channel = if (channels.contains(coordinatorId)) channels(coordinatorId) else { - val newChannel = ClientUtils - .channelToOffsetManager(group, zkUtils, SocketTimeoutMs) + val newChannel = ClientUtils.channelToOffsetManager( + group, + zkUtils, + SocketTimeoutMs) channels.put(coordinatorId, newChannel) newChannel } diff --git a/repos/kafka/core/src/test/scala/unit/kafka/admin/AclCommandTest.scala b/repos/kafka/core/src/test/scala/unit/kafka/admin/AclCommandTest.scala index 5b6e2fa3911..739453add58 100644 --- a/repos/kafka/core/src/test/scala/unit/kafka/admin/AclCommandTest.scala +++ b/repos/kafka/core/src/test/scala/unit/kafka/admin/AclCommandTest.scala @@ -67,15 +67,21 @@ class AclCommandTest extends ZooKeeperTestHarness with Logging { ) private val ProducerResourceToAcls = Map[Set[Resource], Set[Acl]]( - TopicResources -> AclCommand - .getAcls(Users, Allow, Set(Write, Describe), Hosts), - Set(Resource.ClusterResource) -> AclCommand - .getAcls(Users, Allow, Set(Create), Hosts) + TopicResources -> AclCommand.getAcls(Users, + Allow, + Set(Write, Describe), + Hosts), + Set(Resource.ClusterResource) -> AclCommand.getAcls(Users, + Allow, + Set(Create), + Hosts) ) private val ConsumerResourceToAcls = Map[Set[Resource], Set[Acl]]( - TopicResources -> AclCommand - .getAcls(Users, Allow, Set(Read, Describe), Hosts), + TopicResources -> AclCommand.getAcls(Users, + Allow, + Set(Read, Describe), + Hosts), GroupResources -> AclCommand.getAcls(Users, Allow, Set(Read), Hosts) ) diff --git a/repos/kafka/core/src/test/scala/unit/kafka/admin/AdminTest.scala b/repos/kafka/core/src/test/scala/unit/kafka/admin/AdminTest.scala index f7aa2eb4ee8..3472438d88d 100755 --- a/repos/kafka/core/src/test/scala/unit/kafka/admin/AdminTest.scala +++ b/repos/kafka/core/src/test/scala/unit/kafka/admin/AdminTest.scala @@ -673,8 +673,10 @@ class AdminTest extends ZooKeeperTestHarness with Logging with RackAwareTest { } val partialList = List(0, 1, 2, 3, 5) - val processedMetadatas3 = AdminUtils - .getBrokerMetadatas(zkUtils, RackAwareMode.Enforced, Some(partialList)) + val processedMetadatas3 = AdminUtils.getBrokerMetadatas( + zkUtils, + RackAwareMode.Enforced, + Some(partialList)) assertEquals(partialList, processedMetadatas3.map(_.id)) assertEquals(partialList.map(rackInfo), processedMetadatas3.flatMap(_.rack)) diff --git a/repos/kafka/core/src/test/scala/unit/kafka/admin/DeleteConsumerGroupTest.scala b/repos/kafka/core/src/test/scala/unit/kafka/admin/DeleteConsumerGroupTest.scala index 6c0b5718d35..53c8ba2e1fc 100644 --- a/repos/kafka/core/src/test/scala/unit/kafka/admin/DeleteConsumerGroupTest.scala +++ b/repos/kafka/core/src/test/scala/unit/kafka/admin/DeleteConsumerGroupTest.scala @@ -78,8 +78,9 @@ class DeleteConsumerGroupTest extends KafkaServerTestHarness { fillInConsumerGroupInfo(topic, groupToDelete, "consumer", 0, 10, false) fillInConsumerGroupInfo(topic, otherGroup, "consumer", 0, 10, false) - AdminUtils - .deleteConsumerGroupInfoForTopicInZK(zkUtils, groupToDelete, topic) + AdminUtils.deleteConsumerGroupInfoForTopicInZK(zkUtils, + groupToDelete, + topic) TestUtils.waitUntilTrue( () => !groupDirExists(new ZKGroupDirs(groupToDelete)), @@ -152,8 +153,9 @@ class DeleteConsumerGroupTest extends KafkaServerTestHarness { fillInConsumerGroupInfo(topicToDelete, group, "consumer", 0, 10, true) fillInConsumerGroupInfo(otherTopic, group, "consumer", 0, 10, true) - AdminUtils - .deleteConsumerGroupInfoForTopicInZK(zkUtils, group, topicToDelete) + AdminUtils.deleteConsumerGroupInfoForTopicInZK(zkUtils, + group, + topicToDelete) TestUtils.waitUntilTrue( () => diff --git a/repos/kafka/core/src/test/scala/unit/kafka/admin/RackAwareTest.scala b/repos/kafka/core/src/test/scala/unit/kafka/admin/RackAwareTest.scala index 5392683663d..e8e4a35e1cf 100644 --- a/repos/kafka/core/src/test/scala/unit/kafka/admin/RackAwareTest.scala +++ b/repos/kafka/core/src/test/scala/unit/kafka/admin/RackAwareTest.scala @@ -79,8 +79,9 @@ trait RackAwareTest { brokerId, sys.error( s"No mapping found for $brokerId in `brokerRackMapping`")) - partitionRackMap(partitionId) = rack :: partitionRackMap - .getOrElse(partitionId, List()) + partitionRackMap(partitionId) = rack :: partitionRackMap.getOrElse( + partitionId, + List()) } } ReplicaDistributions(partitionRackMap, leaderCount, partitionCount) diff --git a/repos/kafka/core/src/test/scala/unit/kafka/coordinator/GroupCoordinatorResponseTest.scala b/repos/kafka/core/src/test/scala/unit/kafka/coordinator/GroupCoordinatorResponseTest.scala index 77a8f16ed47..dbf22f2c4b1 100644 --- a/repos/kafka/core/src/test/scala/unit/kafka/coordinator/GroupCoordinatorResponseTest.scala +++ b/repos/kafka/core/src/test/scala/unit/kafka/coordinator/GroupCoordinatorResponseTest.scala @@ -1174,8 +1174,10 @@ class GroupCoordinatorResponseTest extends JUnitSuite { EasyMock.replay(replicaManager) - groupCoordinator - .handleHeartbeat(groupId, consumerId, generationId, responseCallback) + groupCoordinator.handleHeartbeat(groupId, + consumerId, + generationId, + responseCallback) Await.result(responseFuture, Duration(40, TimeUnit.MILLISECONDS)) } diff --git a/repos/kafka/core/src/test/scala/unit/kafka/integration/UncleanLeaderElectionTest.scala b/repos/kafka/core/src/test/scala/unit/kafka/integration/UncleanLeaderElectionTest.scala index 15b8bd8c604..6e2a07a665f 100755 --- a/repos/kafka/core/src/test/scala/unit/kafka/integration/UncleanLeaderElectionTest.scala +++ b/repos/kafka/core/src/test/scala/unit/kafka/integration/UncleanLeaderElectionTest.scala @@ -72,8 +72,8 @@ class UncleanLeaderElectionTest extends ZooKeeperTestHarness { configProps.put("controlled.shutdown.enable", String.valueOf(enableControlledShutdown)) configProps.put("controlled.shutdown.max.retries", String.valueOf(1)) - configProps - .put("controlled.shutdown.retry.backoff.ms", String.valueOf(1000)) + configProps.put("controlled.shutdown.retry.backoff.ms", + String.valueOf(1000)) } // temporarily set loggers to a higher level so that tests run quietly diff --git a/repos/kafka/core/src/test/scala/unit/kafka/log/LogCleanerIntegrationTest.scala b/repos/kafka/core/src/test/scala/unit/kafka/log/LogCleanerIntegrationTest.scala index ade5d457126..a045b771a00 100755 --- a/repos/kafka/core/src/test/scala/unit/kafka/log/LogCleanerIntegrationTest.scala +++ b/repos/kafka/core/src/test/scala/unit/kafka/log/LogCleanerIntegrationTest.scala @@ -167,10 +167,10 @@ class LogCleanerIntegrationTest(compressionCodec: String) { dir.mkdirs() val logProps = new Properties() logProps.put(LogConfig.SegmentBytesProp, segmentSize: java.lang.Integer) - logProps - .put(LogConfig.SegmentIndexBytesProp, 100 * 1024: java.lang.Integer) - logProps - .put(LogConfig.FileDeleteDelayMsProp, deleteDelay: java.lang.Integer) + logProps.put(LogConfig.SegmentIndexBytesProp, + 100 * 1024: java.lang.Integer) + logProps.put(LogConfig.FileDeleteDelayMsProp, + deleteDelay: java.lang.Integer) logProps.put(LogConfig.CleanupPolicyProp, LogConfig.Compact) logProps.put(LogConfig.MinCleanableDirtyRatioProp, minCleanableDirtyRatio: java.lang.Float) diff --git a/repos/kafka/core/src/test/scala/unit/kafka/log/LogTest.scala b/repos/kafka/core/src/test/scala/unit/kafka/log/LogTest.scala index 797e12313c0..fe5f199c605 100755 --- a/repos/kafka/core/src/test/scala/unit/kafka/log/LogTest.scala +++ b/repos/kafka/core/src/test/scala/unit/kafka/log/LogTest.scala @@ -456,8 +456,8 @@ class LogTest extends JUnitSuite { // append messages to log val configSegmentSize = messageSet.sizeInBytes - 1 val logProps = new Properties() - logProps - .put(LogConfig.SegmentBytesProp, configSegmentSize: java.lang.Integer) + logProps.put(LogConfig.SegmentBytesProp, + configSegmentSize: java.lang.Integer) // We use need to use magic value 1 here because the test is message size sensitive. logProps.put(LogConfig.MessageFormatVersionProp, ApiVersion.latestVersion.toString) @@ -559,8 +559,8 @@ class LogTest extends JUnitSuite { // append messages to log val maxMessageSize = second.sizeInBytes - 1 val logProps = new Properties() - logProps - .put(LogConfig.MaxMessageBytesProp, maxMessageSize: java.lang.Integer) + logProps.put(LogConfig.MaxMessageBytesProp, + maxMessageSize: java.lang.Integer) val log = new Log(logDir, LogConfig(logProps), recoveryPoint = 0L, @@ -589,8 +589,8 @@ class LogTest extends JUnitSuite { val indexInterval = 3 * messageSize val logProps = new Properties() logProps.put(LogConfig.SegmentBytesProp, segmentSize: java.lang.Integer) - logProps - .put(LogConfig.IndexIntervalBytesProp, indexInterval: java.lang.Integer) + logProps.put(LogConfig.IndexIntervalBytesProp, + indexInterval: java.lang.Integer) logProps.put(LogConfig.SegmentIndexBytesProp, 4096: java.lang.Integer) val config = LogConfig(logProps) var log = new Log(logDir, config, recoveryPoint = 0L, time.scheduler, time) @@ -819,8 +819,8 @@ class LogTest extends JUnitSuite { val set = TestUtils.singleMessageSet("test".getBytes) val logProps = new Properties() - logProps - .put(LogConfig.SegmentBytesProp, set.sizeInBytes * 5: java.lang.Integer) + logProps.put(LogConfig.SegmentBytesProp, + set.sizeInBytes * 5: java.lang.Integer) logProps.put(LogConfig.SegmentIndexBytesProp, 1000: java.lang.Integer) logProps.put(LogConfig.IndexIntervalBytesProp, 1: java.lang.Integer) val log = new Log(logDir, @@ -848,8 +848,8 @@ class LogTest extends JUnitSuite { def testReopenThenTruncate() { val set = TestUtils.singleMessageSet("test".getBytes) val logProps = new Properties() - logProps - .put(LogConfig.SegmentBytesProp, set.sizeInBytes * 5: java.lang.Integer) + logProps.put(LogConfig.SegmentBytesProp, + set.sizeInBytes * 5: java.lang.Integer) logProps.put(LogConfig.SegmentIndexBytesProp, 1000: java.lang.Integer) logProps.put(LogConfig.IndexIntervalBytesProp, 10000: java.lang.Integer) val config = LogConfig(logProps) @@ -876,12 +876,12 @@ class LogTest extends JUnitSuite { val set = TestUtils.singleMessageSet("test".getBytes) val asyncDeleteMs = 1000 val logProps = new Properties() - logProps - .put(LogConfig.SegmentBytesProp, set.sizeInBytes * 5: java.lang.Integer) + logProps.put(LogConfig.SegmentBytesProp, + set.sizeInBytes * 5: java.lang.Integer) logProps.put(LogConfig.SegmentIndexBytesProp, 1000: java.lang.Integer) logProps.put(LogConfig.IndexIntervalBytesProp, 10000: java.lang.Integer) - logProps - .put(LogConfig.FileDeleteDelayMsProp, asyncDeleteMs: java.lang.Integer) + logProps.put(LogConfig.FileDeleteDelayMsProp, + asyncDeleteMs: java.lang.Integer) val config = LogConfig(logProps) val log = new Log(logDir, config, recoveryPoint = 0L, time.scheduler, time) @@ -917,8 +917,8 @@ class LogTest extends JUnitSuite { def testOpenDeletesObsoleteFiles() { val set = TestUtils.singleMessageSet("test".getBytes) val logProps = new Properties() - logProps - .put(LogConfig.SegmentBytesProp, set.sizeInBytes * 5: java.lang.Integer) + logProps.put(LogConfig.SegmentBytesProp, + set.sizeInBytes * 5: java.lang.Integer) logProps.put(LogConfig.SegmentIndexBytesProp, 1000: java.lang.Integer) val config = LogConfig(logProps) var log = new Log(logDir, config, recoveryPoint = 0L, time.scheduler, time) diff --git a/repos/kafka/core/src/test/scala/unit/kafka/producer/AsyncProducerTest.scala b/repos/kafka/core/src/test/scala/unit/kafka/producer/AsyncProducerTest.scala index e884c0e3da5..49c8f00bbc0 100755 --- a/repos/kafka/core/src/test/scala/unit/kafka/producer/AsyncProducerTest.scala +++ b/repos/kafka/core/src/test/scala/unit/kafka/producer/AsyncProducerTest.scala @@ -304,8 +304,8 @@ class AsyncProducerTest { TestUtils.readString(d.message.payload))) TestUtils.checkEquals(produceData.iterator, deserializedData.iterator) - TestUtils - .checkEquals(produceData.iterator, deserializedStreamData.iterator) + TestUtils.checkEquals(produceData.iterator, + deserializedStreamData.iterator) } @Test @@ -448,8 +448,8 @@ class AsyncProducerTest { props.put("metadata.broker.list", brokerList) props.put("request.required.acks", "1") props.put("serializer.class", classOf[StringEncoder].getName.toString) - props - .put("key.serializer.class", classOf[NullEncoder[Int]].getName.toString) + props.put("key.serializer.class", + classOf[NullEncoder[Int]].getName.toString) props.put("producer.num.retries", 3.toString) val config = new ProducerConfig(props) diff --git a/repos/kafka/core/src/test/scala/unit/kafka/security/auth/SimpleAclAuthorizerTest.scala b/repos/kafka/core/src/test/scala/unit/kafka/security/auth/SimpleAclAuthorizerTest.scala index ffd5d257aa1..20df2b71938 100644 --- a/repos/kafka/core/src/test/scala/unit/kafka/security/auth/SimpleAclAuthorizerTest.scala +++ b/repos/kafka/core/src/test/scala/unit/kafka/security/auth/SimpleAclAuthorizerTest.scala @@ -326,8 +326,9 @@ class SimpleAclAuthorizerTest extends ZooKeeperTestHarness { simpleAclAuthorizer.addAcls(Set(acl1), commonResource) simpleAclAuthorizer.addAcls(Set(acl2), commonResource) - TestUtils - .waitAndVerifyAcls(Set(acl1, acl2), simpleAclAuthorizer, commonResource) + TestUtils.waitAndVerifyAcls(Set(acl1, acl2), + simpleAclAuthorizer, + commonResource) } @Test @@ -344,10 +345,12 @@ class SimpleAclAuthorizerTest extends ZooKeeperTestHarness { simpleAclAuthorizer.addAcls(Set(acl1), commonResource) simpleAclAuthorizer2.addAcls(Set(acl2), commonResource) - TestUtils - .waitAndVerifyAcls(Set(acl1, acl2), simpleAclAuthorizer, commonResource) - TestUtils - .waitAndVerifyAcls(Set(acl1, acl2), simpleAclAuthorizer2, commonResource) + TestUtils.waitAndVerifyAcls(Set(acl1, acl2), + simpleAclAuthorizer, + commonResource) + TestUtils.waitAndVerifyAcls(Set(acl1, acl2), + simpleAclAuthorizer2, + commonResource) val user3 = new KafkaPrincipal(KafkaPrincipal.USER_TYPE, "joe") val acl3 = new Acl(user3, Deny, WildCardHost, Read) @@ -359,10 +362,12 @@ class SimpleAclAuthorizerTest extends ZooKeeperTestHarness { assertTrue("The authorizer should see a value that needs to be deleted", deleted) - TestUtils - .waitAndVerifyAcls(Set(acl1, acl2), simpleAclAuthorizer, commonResource) - TestUtils - .waitAndVerifyAcls(Set(acl1, acl2), simpleAclAuthorizer2, commonResource) + TestUtils.waitAndVerifyAcls(Set(acl1, acl2), + simpleAclAuthorizer, + commonResource) + TestUtils.waitAndVerifyAcls(Set(acl1, acl2), + simpleAclAuthorizer2, + commonResource) } @Test @@ -398,10 +403,12 @@ class SimpleAclAuthorizerTest extends ZooKeeperTestHarness { concurrentFuctions, 15000) - TestUtils - .waitAndVerifyAcls(expectedAcls, simpleAclAuthorizer, commonResource) - TestUtils - .waitAndVerifyAcls(expectedAcls, simpleAclAuthorizer2, commonResource) + TestUtils.waitAndVerifyAcls(expectedAcls, + simpleAclAuthorizer, + commonResource) + TestUtils.waitAndVerifyAcls(expectedAcls, + simpleAclAuthorizer2, + commonResource) } private def changeAclAndVerify(originalAcls: Set[Acl], diff --git a/repos/kafka/core/src/test/scala/unit/kafka/server/ISRExpirationTest.scala b/repos/kafka/core/src/test/scala/unit/kafka/server/ISRExpirationTest.scala index 0b3505fb941..c775102e405 100644 --- a/repos/kafka/core/src/test/scala/unit/kafka/server/ISRExpirationTest.scala +++ b/repos/kafka/core/src/test/scala/unit/kafka/server/ISRExpirationTest.scala @@ -39,10 +39,10 @@ class IsrExpirationTest { val replicaFetchWaitMaxMs = 100 val overridingProps = new Properties() - overridingProps - .put(KafkaConfig.ReplicaLagTimeMaxMsProp, replicaLagTimeMaxMs.toString) - overridingProps - .put(KafkaConfig.ReplicaFetchWaitMaxMsProp, replicaFetchWaitMaxMs.toString) + overridingProps.put(KafkaConfig.ReplicaLagTimeMaxMsProp, + replicaLagTimeMaxMs.toString) + overridingProps.put(KafkaConfig.ReplicaFetchWaitMaxMsProp, + replicaFetchWaitMaxMs.toString) val configs = TestUtils .createBrokerConfigs(2, TestUtils.MockZkConnect) .map(KafkaConfig.fromProps(_, overridingProps)) @@ -97,8 +97,9 @@ class IsrExpirationTest { -1L, -1, true))) - var partition0OSR = partition0 - .getOutOfSyncReplicas(leaderReplica, configs.head.replicaLagTimeMaxMs) + var partition0OSR = partition0.getOutOfSyncReplicas( + leaderReplica, + configs.head.replicaLagTimeMaxMs) assertEquals("No replica should be out of sync", Set.empty[Int], partition0OSR.map(_.brokerId)) @@ -107,8 +108,9 @@ class IsrExpirationTest { time.sleep(150) // now follower hasn't pulled any data for > replicaMaxLagTimeMs ms. So it is stuck - partition0OSR = partition0 - .getOutOfSyncReplicas(leaderReplica, configs.head.replicaLagTimeMaxMs) + partition0OSR = partition0.getOutOfSyncReplicas( + leaderReplica, + configs.head.replicaLagTimeMaxMs) assertEquals("Replica 1 should be out of sync", Set(configs.last.brokerId), partition0OSR.map(_.brokerId)) @@ -134,8 +136,9 @@ class IsrExpirationTest { // Let enough time pass for the replica to be considered stuck time.sleep(150) - val partition0OSR = partition0 - .getOutOfSyncReplicas(leaderReplica, configs.head.replicaLagTimeMaxMs) + val partition0OSR = partition0.getOutOfSyncReplicas( + leaderReplica, + configs.head.replicaLagTimeMaxMs) assertEquals("Replica 1 should be out of sync", Set(configs.last.brokerId), partition0OSR.map(_.brokerId)) @@ -169,8 +172,9 @@ class IsrExpirationTest { // Simulate 2 fetch requests spanning more than 100 ms which do not read to the end of the log. // The replicas will no longer be in ISR. We do 2 fetches because we want to simulate the case where the replica is lagging but is not stuck - var partition0OSR = partition0 - .getOutOfSyncReplicas(leaderReplica, configs.head.replicaLagTimeMaxMs) + var partition0OSR = partition0.getOutOfSyncReplicas( + leaderReplica, + configs.head.replicaLagTimeMaxMs) assertEquals("No replica should be out of sync", Set.empty[Int], partition0OSR.map(_.brokerId)) @@ -185,8 +189,9 @@ class IsrExpirationTest { -1L, -1, false))) - partition0OSR = partition0 - .getOutOfSyncReplicas(leaderReplica, configs.head.replicaLagTimeMaxMs) + partition0OSR = partition0.getOutOfSyncReplicas( + leaderReplica, + configs.head.replicaLagTimeMaxMs) assertEquals("No replica should be out of sync", Set.empty[Int], partition0OSR.map(_.brokerId)) @@ -194,8 +199,9 @@ class IsrExpirationTest { time.sleep(75) // The replicas will no longer be in ISR - partition0OSR = partition0 - .getOutOfSyncReplicas(leaderReplica, configs.head.replicaLagTimeMaxMs) + partition0OSR = partition0.getOutOfSyncReplicas( + leaderReplica, + configs.head.replicaLagTimeMaxMs) assertEquals("Replica 1 should be out of sync", Set(configs.last.brokerId), partition0OSR.map(_.brokerId)) @@ -209,8 +215,9 @@ class IsrExpirationTest { -1L, -1, true))) - partition0OSR = partition0 - .getOutOfSyncReplicas(leaderReplica, configs.head.replicaLagTimeMaxMs) + partition0OSR = partition0.getOutOfSyncReplicas( + leaderReplica, + configs.head.replicaLagTimeMaxMs) assertEquals("No replica should be out of sync", Set.empty[Int], partition0OSR.map(_.brokerId)) diff --git a/repos/kafka/core/src/test/scala/unit/kafka/server/KafkaConfigTest.scala b/repos/kafka/core/src/test/scala/unit/kafka/server/KafkaConfigTest.scala index 9a8fc4bec21..f8700ea1672 100755 --- a/repos/kafka/core/src/test/scala/unit/kafka/server/KafkaConfigTest.scala +++ b/repos/kafka/core/src/test/scala/unit/kafka/server/KafkaConfigTest.scala @@ -341,8 +341,8 @@ class KafkaConfigTest { def testUncleanElectionDisabled() { val props = TestUtils.createBrokerConfig(0, TestUtils.MockZkConnect, port = 8181) - props - .put(KafkaConfig.UncleanLeaderElectionEnableProp, String.valueOf(false)) + props.put(KafkaConfig.UncleanLeaderElectionEnableProp, + String.valueOf(false)) val serverConfig = KafkaConfig.fromProps(props) assertEquals(serverConfig.uncleanLeaderElectionEnable, false) @@ -352,8 +352,8 @@ class KafkaConfigTest { def testUncleanElectionEnabled() { val props = TestUtils.createBrokerConfig(0, TestUtils.MockZkConnect, port = 8181) - props - .put(KafkaConfig.UncleanLeaderElectionEnableProp, String.valueOf(true)) + props.put(KafkaConfig.UncleanLeaderElectionEnableProp, + String.valueOf(true)) val serverConfig = KafkaConfig.fromProps(props) assertEquals(serverConfig.uncleanLeaderElectionEnable, true) @@ -458,8 +458,8 @@ class KafkaConfigTest { TestUtils.createBrokerConfig(0, TestUtils.MockZkConnect, port = 8181) props.put(KafkaConfig.ListenersProp, "TRACE://localhost:9091,SSL://localhost:9093") - props - .put(KafkaConfig.AdvertisedListenersProp, "PLAINTEXT://localhost:9092") + props.put(KafkaConfig.AdvertisedListenersProp, + "PLAINTEXT://localhost:9092") intercept[IllegalArgumentException] { KafkaConfig.fromProps(props) } diff --git a/repos/kafka/core/src/test/scala/unit/kafka/server/LogOffsetTest.scala b/repos/kafka/core/src/test/scala/unit/kafka/server/LogOffsetTest.scala index f417c188706..13163024446 100755 --- a/repos/kafka/core/src/test/scala/unit/kafka/server/LogOffsetTest.scala +++ b/repos/kafka/core/src/test/scala/unit/kafka/server/LogOffsetTest.scala @@ -177,8 +177,8 @@ class LogOffsetTest extends ZooKeeperTestHarness { AdminUtils.createTopic(zkUtils, topic, 3, 1) val logManager = server.getLogManager - val log = logManager - .createLog(TopicAndPartition(topic, part), logManager.defaultConfig) + val log = logManager.createLog(TopicAndPartition(topic, part), + logManager.defaultConfig) val message = new Message(Integer.toString(42).getBytes()) for (i <- 0 until 20) log.append(new ByteBufferMessageSet(NoCompressionCodec, message)) @@ -188,8 +188,10 @@ class LogOffsetTest extends ZooKeeperTestHarness { time.milliseconds + 30000 // pretend it is the future to avoid race conditions with the fs - val offsets = server.apis - .fetchOffsets(logManager, new TopicPartition(topic, part), now, 15) + val offsets = server.apis.fetchOffsets(logManager, + new TopicPartition(topic, part), + now, + 15) assertEquals(Seq(20L, 18L, 16L, 14L, 12L, 10L, 8L, 6L, 4L, 2L, 0L), offsets) @@ -217,8 +219,8 @@ class LogOffsetTest extends ZooKeeperTestHarness { AdminUtils.createTopic(zkUtils, topic, 3, 1) val logManager = server.getLogManager - val log = logManager - .createLog(TopicAndPartition(topic, part), logManager.defaultConfig) + val log = logManager.createLog(TopicAndPartition(topic, part), + logManager.defaultConfig) val message = new Message(Integer.toString(42).getBytes()) for (i <- 0 until 20) log.append(new ByteBufferMessageSet(NoCompressionCodec, message)) diff --git a/repos/kafka/core/src/test/scala/unit/kafka/server/LogRecoveryTest.scala b/repos/kafka/core/src/test/scala/unit/kafka/server/LogRecoveryTest.scala index 64cc209e1ec..d09eabf00b3 100755 --- a/repos/kafka/core/src/test/scala/unit/kafka/server/LogRecoveryTest.scala +++ b/repos/kafka/core/src/test/scala/unit/kafka/server/LogRecoveryTest.scala @@ -40,12 +40,12 @@ class LogRecoveryTest extends ZooKeeperTestHarness { val replicaFetchMinBytes = 20 val overridingProps = new Properties() - overridingProps - .put(KafkaConfig.ReplicaLagTimeMaxMsProp, replicaLagTimeMaxMs.toString) - overridingProps - .put(KafkaConfig.ReplicaFetchWaitMaxMsProp, replicaFetchWaitMaxMs.toString) - overridingProps - .put(KafkaConfig.ReplicaFetchMinBytesProp, replicaFetchMinBytes.toString) + overridingProps.put(KafkaConfig.ReplicaLagTimeMaxMsProp, + replicaLagTimeMaxMs.toString) + overridingProps.put(KafkaConfig.ReplicaFetchWaitMaxMsProp, + replicaFetchWaitMaxMs.toString) + overridingProps.put(KafkaConfig.ReplicaFetchMinBytesProp, + replicaFetchMinBytes.toString) var configs: Seq[KafkaConfig] = null val topic = "new-topic" diff --git a/repos/kafka/core/src/test/scala/unit/kafka/server/SimpleFetchTest.scala b/repos/kafka/core/src/test/scala/unit/kafka/server/SimpleFetchTest.scala index ca15f02b2c5..5c1efe4abdc 100644 --- a/repos/kafka/core/src/test/scala/unit/kafka/server/SimpleFetchTest.scala +++ b/repos/kafka/core/src/test/scala/unit/kafka/server/SimpleFetchTest.scala @@ -41,10 +41,10 @@ class SimpleFetchTest { val replicaLagMaxMessages = 10L val overridingProps = new Properties() - overridingProps - .put(KafkaConfig.ReplicaLagTimeMaxMsProp, replicaLagTimeMaxMs.toString) - overridingProps - .put(KafkaConfig.ReplicaFetchWaitMaxMsProp, replicaFetchWaitMaxMs.toString) + overridingProps.put(KafkaConfig.ReplicaLagTimeMaxMsProp, + replicaLagTimeMaxMs.toString) + overridingProps.put(KafkaConfig.ReplicaFetchWaitMaxMsProp, + replicaFetchWaitMaxMs.toString) val configs = TestUtils .createBrokerConfigs(2, TestUtils.MockZkConnect) diff --git a/repos/kafka/core/src/test/scala/unit/kafka/utils/ReplicationUtilsTest.scala b/repos/kafka/core/src/test/scala/unit/kafka/utils/ReplicationUtilsTest.scala index 3abffb6b953..64c6aef2e46 100644 --- a/repos/kafka/core/src/test/scala/unit/kafka/utils/ReplicationUtilsTest.scala +++ b/repos/kafka/core/src/test/scala/unit/kafka/utils/ReplicationUtilsTest.scala @@ -135,13 +135,15 @@ class ReplicationUtilsTest extends ZooKeeperTestHarness { @Test def testGetLeaderIsrAndEpochForPartition() { val leaderIsrAndControllerEpoch = - ReplicationUtils - .getLeaderIsrAndEpochForPartition(zkUtils, topic, partitionId) + ReplicationUtils.getLeaderIsrAndEpochForPartition(zkUtils, + topic, + partitionId) assertEquals(topicDataLeaderIsrAndControllerEpoch, leaderIsrAndControllerEpoch.get) assertEquals( None, - ReplicationUtils - .getLeaderIsrAndEpochForPartition(zkUtils, topic, partitionId + 1)) + ReplicationUtils.getLeaderIsrAndEpochForPartition(zkUtils, + topic, + partitionId + 1)) } } diff --git a/repos/kafka/core/src/test/scala/unit/kafka/utils/SchedulerTest.scala b/repos/kafka/core/src/test/scala/unit/kafka/utils/SchedulerTest.scala index a81d8a77b45..9d58a81974a 100644 --- a/repos/kafka/core/src/test/scala/unit/kafka/utils/SchedulerTest.scala +++ b/repos/kafka/core/src/test/scala/unit/kafka/utils/SchedulerTest.scala @@ -61,10 +61,14 @@ class SchedulerTest { @Test def testMockSchedulerPeriodicTask() { - mockTime.scheduler - .schedule("test1", counter1.getAndIncrement, delay = 1, period = 1) - mockTime.scheduler - .schedule("test2", counter2.getAndIncrement, delay = 100, period = 100) + mockTime.scheduler.schedule("test1", + counter1.getAndIncrement, + delay = 1, + period = 1) + mockTime.scheduler.schedule("test2", + counter2.getAndIncrement, + delay = 100, + period = 100) assertEquals("Counter1 should not be incremented prior to task running.", 0, counter1.get) @@ -84,8 +88,9 @@ class SchedulerTest { mockTime.scheduler.schedule( "test1", () => - mockTime.scheduler - .schedule("test2", counter2.getAndIncrement, delay = 0), + mockTime.scheduler.schedule("test2", + counter2.getAndIncrement, + delay = 0), delay = 1) mockTime.sleep(1) assertEquals(1, counter2.get) diff --git a/repos/kafka/core/src/test/scala/unit/kafka/utils/TestUtils.scala b/repos/kafka/core/src/test/scala/unit/kafka/utils/TestUtils.scala index bf6f6acf1f3..8558b3cebfb 100755 --- a/repos/kafka/core/src/test/scala/unit/kafka/utils/TestUtils.scala +++ b/repos/kafka/core/src/test/scala/unit/kafka/utils/TestUtils.scala @@ -518,8 +518,8 @@ object TestUtils extends Logging { securityProtocol == SecurityProtocol.SSL, trustStoreFile, certAlias)) - props - .put(CommonClientConfigs.SECURITY_PROTOCOL_CONFIG, securityProtocol.name) + props.put(CommonClientConfigs.SECURITY_PROTOCOL_CONFIG, + securityProtocol.name) props } @@ -550,8 +550,8 @@ object TestUtils extends Logging { producerProps.put(ProducerConfig.MAX_BLOCK_MS_CONFIG, maxBlockMs.toString) producerProps.put(ProducerConfig.BUFFER_MEMORY_CONFIG, bufferSize.toString) producerProps.put(ProducerConfig.RETRIES_CONFIG, retries.toString) - producerProps - .put(ProducerConfig.REQUEST_TIMEOUT_MS_CONFIG, requestTimeoutMs.toString) + producerProps.put(ProducerConfig.REQUEST_TIMEOUT_MS_CONFIG, + requestTimeoutMs.toString) /* Only use these if not already set */ val defaultProps = Map( @@ -954,8 +954,9 @@ object TestUtils extends Logging { result && Request.isValidBrokerId(leader) } }, - "Partition [%s,%d] metadata not propagated after %d ms" - .format(topic, partition, timeout), + "Partition [%s,%d] metadata not propagated after %d ms".format(topic, + partition, + timeout), waitTime = timeout) leader @@ -972,8 +973,9 @@ object TestUtils extends Logging { .getPartition(topic, partition) .exists(_.leaderReplicaIfLocal().isDefined) }, - "Partition [%s,%d] leaders not made yet after %d ms" - .format(topic, partition, timeout), + "Partition [%s,%d] leaders not made yet after %d ms".format(topic, + partition, + timeout), waitTime = timeout) } @@ -1267,8 +1269,11 @@ object TestUtils extends Logging { throw new Exception("SSL enabled but no trustStoreFile provided") } - val sslConfigs = TestSslUtils - .createSslConfig(clientCert, true, mode, trustStore, certAlias) + val sslConfigs = TestSslUtils.createSslConfig(clientCert, + true, + mode, + trustStore, + certAlias) val sslProps = new Properties() sslConfigs.foreach { case (k, v) => sslProps.put(k, v) } diff --git a/repos/lila/app/controllers/Opening.scala b/repos/lila/app/controllers/Opening.scala index 9e1e938921a..4a1348ac8b2 100644 --- a/repos/lila/app/controllers/Opening.scala +++ b/repos/lila/app/controllers/Opening.scala @@ -26,8 +26,10 @@ object Opening extends LilaController { private def renderShow(opening: OpeningModel)(implicit ctx: Context) = env userInfos ctx.me zip identify(opening) map { case (infos, identified) => - views.html.opening - .show(opening, identified, infos, env.AnimationDuration) + views.html.opening.show(opening, + identified, + infos, + env.AnimationDuration) } private def makeData( diff --git a/repos/lila/app/controllers/Puzzle.scala b/repos/lila/app/controllers/Puzzle.scala index 09471f3a74f..ce463719042 100644 --- a/repos/lila/app/controllers/Puzzle.scala +++ b/repos/lila/app/controllers/Puzzle.scala @@ -23,8 +23,10 @@ object Puzzle extends LilaController { private def renderShow(puzzle: PuzzleModel, mode: String)( implicit ctx: Context) = env userInfos ctx.me map { infos => - views.html.puzzle - .show(puzzle, infos, mode, animationDuration = env.AnimationDuration) + views.html.puzzle.show(puzzle, + infos, + mode, + animationDuration = env.AnimationDuration) } def daily = Open { implicit ctx => diff --git a/repos/lila/app/controllers/QaQuestion.scala b/repos/lila/app/controllers/QaQuestion.scala index 15c771086de..2608bde5c62 100644 --- a/repos/lila/app/controllers/QaQuestion.scala +++ b/repos/lila/app/controllers/QaQuestion.scala @@ -108,9 +108,10 @@ object QaQuestion extends QaController { def remove(questionId: QuestionId) = Secure(_.ModerateQa) { implicit ctx => me => WithQuestion(questionId) { q => - (api.question remove q.id) >> Env.mod.logApi - .deleteQaQuestion(me.id, q.userId, q.title) inject Redirect( - routes.QaQuestion.index()) + (api.question remove q.id) >> Env.mod.logApi.deleteQaQuestion( + me.id, + q.userId, + q.title) inject Redirect(routes.QaQuestion.index()) } } } diff --git a/repos/lila/app/controllers/Round.scala b/repos/lila/app/controllers/Round.scala index ab2f64c8dd8..4e24bddc664 100644 --- a/repos/lila/app/controllers/Round.scala +++ b/repos/lila/app/controllers/Round.scala @@ -200,8 +200,10 @@ object Round extends LilaController with TheftPrevention { } }.mon(_.http.response.watcher.website), api = apiVersion => - Env.api.roundApi - .watcher(pov, apiVersion, tv = none, withOpening = false) map { + Env.api.roundApi.watcher(pov, + apiVersion, + tv = none, + withOpening = false) map { Ok(_) } ) map NoCache diff --git a/repos/lila/app/controllers/Team.scala b/repos/lila/app/controllers/Team.scala index 83e16364c77..140db1e9f06 100644 --- a/repos/lila/app/controllers/Team.scala +++ b/repos/lila/app/controllers/Team.scala @@ -108,9 +108,10 @@ object Team extends LilaController { def close(id: String) = Secure(_.CloseTeam) { implicit ctx => me => OptionFuResult(api team id) { team => - (api delete team) >> Env.mod.logApi - .deleteTeam(me.id, team.name, team.description) inject Redirect( - routes.Team all 1) + (api delete team) >> Env.mod.logApi.deleteTeam( + me.id, + team.name, + team.description) inject Redirect(routes.Team all 1) } } diff --git a/repos/lila/app/controllers/User.scala b/repos/lila/app/controllers/User.scala index d56cd9587d8..5b9be00537e 100644 --- a/repos/lila/app/controllers/User.scala +++ b/repos/lila/app/controllers/User.scala @@ -216,8 +216,12 @@ object User extends LilaController { .userHistory(user.id) flatMap { case ((((email, spy), playerAggregateAssessment), history)) => (Env.playban.api bans spy.usersSharingIp.map(_.id)) map { bans => - html.user - .mod(user, email, spy, playerAggregateAssessment, bans, history) + html.user.mod(user, + email, + spy, + playerAggregateAssessment, + bans, + history) } } } diff --git a/repos/lila/modules/db/src/main/Tube.scala b/repos/lila/modules/db/src/main/Tube.scala index a177aa9b6b1..22b18342a9c 100644 --- a/repos/lila/modules/db/src/main/Tube.scala +++ b/repos/lila/modules/db/src/main/Tube.scala @@ -20,8 +20,8 @@ case class BsTube[Doc](handler: BSONHandler[BSONDocument, Doc]) def read(bson: BSONDocument): Option[Doc] = handler readTry bson match { case Success(doc) => Some(doc) case Failure(err) => - logger - .error(s"[tube] Cannot read ${lila.db.BSON.debug(bson)}\n$err\n", err) + logger.error(s"[tube] Cannot read ${lila.db.BSON.debug(bson)}\n$err\n", + err) None } diff --git a/repos/lila/modules/fishnet/src/main/UciToPgn.scala b/repos/lila/modules/fishnet/src/main/UciToPgn.scala index b3051065bae..e25c35a1180 100644 --- a/repos/lila/modules/fishnet/src/main/UciToPgn.scala +++ b/repos/lila/modules/fishnet/src/main/UciToPgn.scala @@ -40,8 +40,7 @@ private object UciToPgn { moves ← ucis.foldLeft[Valid[(Situation, List[Move])]]( success(situation -> Nil)) { case (scalaz.Success((sit, moves)), uci) => - sit - .move(uci.orig, uci.dest, uci.promotion) prefixFailuresWith s"ply $ply " map { + sit.move(uci.orig, uci.dest, uci.promotion) prefixFailuresWith s"ply $ply " map { move => move.situationAfter -> (move :: moves) } diff --git a/repos/lila/modules/forum/src/main/TopicApi.scala b/repos/lila/modules/forum/src/main/TopicApi.scala index 31c2bab0562..ceb1c3c185f 100644 --- a/repos/lila/modules/forum/src/main/TopicApi.scala +++ b/repos/lila/modules/forum/src/main/TopicApi.scala @@ -42,8 +42,7 @@ private[forum] final class TopicApi( def makeTopic(categ: Categ, data: DataForm.TopicData)( implicit ctx: UserContext): Fu[Topic] = - TopicRepo - .nextSlug(categ, data.name) zip detectLanguage(data.post.text) flatMap { + TopicRepo.nextSlug(categ, data.name) zip detectLanguage(data.post.text) flatMap { case (slug, lang) => val topic = Topic.make(categId = categ.slug, slug = slug, @@ -101,8 +100,11 @@ private[forum] final class TopicApi( def toggleClose(categ: Categ, topic: Topic, mod: User): Funit = TopicRepo.close(topic.id, topic.open) >> { - MasterGranter(_.ModerateForum)(mod) ?? modLog - .toggleCloseTopic(mod, categ.name, topic.name, topic.open) + MasterGranter(_.ModerateForum)(mod) ?? modLog.toggleCloseTopic( + mod, + categ.name, + topic.name, + topic.open) } def toggleHide(categ: Categ, topic: Topic, mod: User): Funit = diff --git a/repos/lila/modules/game/src/main/Captcher.scala b/repos/lila/modules/game/src/main/Captcher.scala index 314b1c4c68d..94be566b88d 100644 --- a/repos/lila/modules/game/src/main/Captcher.scala +++ b/repos/lila/modules/game/src/main/Captcher.scala @@ -99,8 +99,7 @@ private final class Captcher extends Actor { } toNel private def rewind(game: Game, moves: List[String]): Option[ChessGame] = - pgn.Reader - .movesWithSans(moves, safeInit, tags = Nil) map (_.state) toOption + pgn.Reader.movesWithSans(moves, safeInit, tags = Nil) map (_.state) toOption private def safeInit[A](list: List[A]): List[A] = list match { case x :: Nil => Nil diff --git a/repos/lila/modules/game/src/main/Event.scala b/repos/lila/modules/game/src/main/Event.scala index 4900201a767..2cf895d6368 100644 --- a/repos/lila/modules/game/src/main/Event.scala +++ b/repos/lila/modules/game/src/main/Event.scala @@ -259,8 +259,8 @@ object Event { case class Clock(white: Float, black: Float) extends Event { def typ = "clock" def data = - Json - .obj("white" -> truncateAt(white, 2), "black" -> truncateAt(black, 2)) + Json.obj("white" -> truncateAt(white, 2), + "black" -> truncateAt(black, 2)) } object Clock { def apply(clock: ChessClock): Clock = diff --git a/repos/lila/modules/insight/src/main/Storage.scala b/repos/lila/modules/insight/src/main/Storage.scala index 9e442243043..9355af93ca6 100644 --- a/repos/lila/modules/insight/src/main/Storage.scala +++ b/repos/lila/modules/insight/src/main/Storage.scala @@ -47,8 +47,7 @@ private final class Storage(coll: Coll) { def find(id: String) = coll.find(selectId(id)).one[Entry] def ecos(userId: String): Fu[Set[String]] = - coll - .distinct(F.eco, selectUserId(userId).some) map lila.db.BSON.asStringSet + coll.distinct(F.eco, selectUserId(userId).some) map lila.db.BSON.asStringSet def nbByPerf(userId: String): Fu[Map[PerfType, Int]] = coll diff --git a/repos/lila/modules/lobby/src/main/Lobby.scala b/repos/lila/modules/lobby/src/main/Lobby.scala index 8302fb41a58..0488575cffc 100644 --- a/repos/lila/modules/lobby/src/main/Lobby.scala +++ b/repos/lila/modules/lobby/src/main/Lobby.scala @@ -25,8 +25,10 @@ private[lobby] final class Lobby( val scheduler = context.system.scheduler override def preStart { - scheduler - .schedule(5 seconds, broomPeriod, self, lila.socket.actorApi.Broom) + scheduler.schedule(5 seconds, + broomPeriod, + self, + lila.socket.actorApi.Broom) scheduler.schedule(10 seconds, resyncIdsPeriod, self, actorApi.Resync) } diff --git a/repos/lila/modules/lobby/src/main/Socket.scala b/repos/lila/modules/lobby/src/main/Socket.scala index 0078c9690b6..051cff8ce40 100644 --- a/repos/lila/modules/lobby/src/main/Socket.scala +++ b/repos/lila/modules/lobby/src/main/Socket.scala @@ -30,8 +30,11 @@ private[lobby] final class Socket(val history: History[Messadata], override def preStart { super.preStart - context.system.lilaBus - .subscribe(self, 'changeFeaturedGame, 'streams, 'nbMembers, 'nbRounds) + context.system.lilaBus.subscribe(self, + 'changeFeaturedGame, + 'streams, + 'nbMembers, + 'nbRounds) } def receiveSpecific = { diff --git a/repos/lila/modules/message/src/main/Api.scala b/repos/lila/modules/message/src/main/Api.scala index c616f7b6ce1..07554f8e15a 100644 --- a/repos/lila/modules/message/src/main/Api.scala +++ b/repos/lila/modules/message/src/main/Api.scala @@ -93,8 +93,9 @@ final class Api(unreadCache: UnreadCache, } } >>- { val toUserId = newThread otherUserId me - shutup ! lila.hub.actorApi.shutup - .RecordPrivateMessage(me.id, toUserId, text) + shutup ! lila.hub.actorApi.shutup.RecordPrivateMessage(me.id, + toUserId, + text) } inject newThread } } diff --git a/repos/lila/modules/mod/src/main/ModApi.scala b/repos/lila/modules/mod/src/main/ModApi.scala index ec4c32e2c4d..0133cf97a43 100644 --- a/repos/lila/modules/mod/src/main/ModApi.scala +++ b/repos/lila/modules/mod/src/main/ModApi.scala @@ -98,15 +98,13 @@ final class ModApi(logApi: ModlogApi, def setTitle(mod: String, username: String, title: Option[String]): Funit = withUser(username) { user => - UserRepo - .setTitle(user.id, title) >> lightUserApi.invalidate(user.id) >> logApi + UserRepo.setTitle(user.id, title) >> lightUserApi.invalidate(user.id) >> logApi .setTitle(mod, user.id, title) } def setEmail(mod: String, username: String, email: String): Funit = withUser(username) { user => - UserRepo - .email(user.id, email) >> UserRepo.setEmailConfirmed(user.id) >> logApi + UserRepo.email(user.id, email) >> UserRepo.setEmailConfirmed(user.id) >> logApi .setEmail(mod, user.id) } diff --git a/repos/lila/modules/puzzle/src/main/PuzzleApi.scala b/repos/lila/modules/puzzle/src/main/PuzzleApi.scala index f35ddbd51c3..1c7c8e18e63 100644 --- a/repos/lila/modules/puzzle/src/main/PuzzleApi.scala +++ b/repos/lila/modules/puzzle/src/main/PuzzleApi.scala @@ -107,13 +107,14 @@ private[puzzle] final class PuzzleApi(puzzleColl: Coll, case None => p1 withVote (_ add v) } val a2 = a1.copy(vote = v.some) - attemptColl - .update(BSONDocument("_id" -> a2.id), - BSONDocument("$set" -> BSONDocument( - Attempt.BSONFields.vote -> v))) zip puzzleColl.update( - BSONDocument("_id" -> p2.id), - BSONDocument( - "$set" -> BSONDocument(Puzzle.BSONFields.vote -> p2.vote))) map { + attemptColl.update(BSONDocument("_id" -> a2.id), + BSONDocument( + "$set" -> BSONDocument( + Attempt.BSONFields.vote -> v))) zip puzzleColl + .update(BSONDocument("_id" -> p2.id), + BSONDocument( + "$set" -> BSONDocument( + Puzzle.BSONFields.vote -> p2.vote))) map { case _ => p2 -> a2 } } diff --git a/repos/lila/modules/qa/src/main/QaApi.scala b/repos/lila/modules/qa/src/main/QaApi.scala index f19d3708b59..102963e497b 100644 --- a/repos/lila/modules/qa/src/main/QaApi.scala +++ b/repos/lila/modules/qa/src/main/QaApi.scala @@ -50,8 +50,7 @@ final class QaApi(questionColl: Coll, val q2 = q .copy(title = data.title, body = data.body, tags = data.tags) .editNow - questionColl - .update(BSONDocument("_id" -> q2.id), q2) >> tag.clearCache >> relation.clearCache inject q2.some + questionColl.update(BSONDocument("_id" -> q2.id), q2) >> tag.clearCache >> relation.clearCache inject q2.some } } @@ -297,8 +296,9 @@ final class QaApi(questionColl: Coll, } def remove(questionId: QuestionId, commentId: CommentId) = - question.removeComment(questionId, commentId) >> answer - .removeComment(questionId, commentId) + question.removeComment(questionId, commentId) >> answer.removeComment( + questionId, + commentId) private implicit val commentBSONHandler = Macros.handler[Comment] } diff --git a/repos/lila/modules/round/src/main/MoveMonitor.scala b/repos/lila/modules/round/src/main/MoveMonitor.scala index 16c7dc53333..313b909d9bc 100644 --- a/repos/lila/modules/round/src/main/MoveMonitor.scala +++ b/repos/lila/modules/round/src/main/MoveMonitor.scala @@ -11,8 +11,10 @@ private final class MoveMonitor(system: ActorSystem, channel: ActorRef) { lila.mon.round.move.full.count() } - Kamon.metrics - .subscribe("histogram", "round.move.full", system.actorOf(Props(new Actor { + Kamon.metrics.subscribe( + "histogram", + "round.move.full", + system.actorOf(Props(new Actor { def receive = { case tick: TickMetricSnapshot => tick.metrics.collectFirst { diff --git a/repos/lila/modules/round/src/main/Player.scala b/repos/lila/modules/round/src/main/Player.scala index 4d314b89b0d..ea5e8c17f63 100644 --- a/repos/lila/modules/round/src/main/Player.scala +++ b/repos/lila/modules/round/src/main/Player.scala @@ -38,8 +38,7 @@ private[round] final class Player(fishnetPlayer: lila.fishnet.Player, .flatMap { case (progress, moveOrDrop) => (GameRepo save progress).mon(_.round.move.segment.save) >>- - (pov.game.hasAi ! uciMemo - .add(pov.game, moveOrDrop)) >>- notifyMove( + (pov.game.hasAi ! uciMemo.add(pov.game, moveOrDrop)) >>- notifyMove( moveOrDrop, progress.game) >> progress.game.finished .fold(moveFinish(progress.game, color) map { @@ -83,8 +82,8 @@ private[round] final class Player(fishnetPlayer: lila.fishnet.Player, .fold(errs => fufail(ClientError(errs.shows)), fuccess) .flatMap { case (progress, moveOrDrop) => - (GameRepo save progress) >>- uciMemo - .add(progress.game, moveOrDrop) >>- notifyMove( + (GameRepo save progress) >>- uciMemo.add(progress.game, + moveOrDrop) >>- notifyMove( moveOrDrop, progress.game) >> progress.game.finished.fold( moveFinish(progress.game, game.turnColor) map { diff --git a/repos/lila/modules/round/src/main/Rematcher.scala b/repos/lila/modules/round/src/main/Rematcher.scala index a98f29131a5..20bd67dde9a 100644 --- a/repos/lila/modules/round/src/main/Rematcher.scala +++ b/repos/lila/modules/round/src/main/Rematcher.scala @@ -63,9 +63,9 @@ private[round] final class Rematcher(messenger: Messenger, private def rematchJoin(pov: Pov): Fu[Events] = for { nextGame ← returnGame(pov) map (_.start) - _ ← (GameRepo insertDenormalized nextGame) >> GameRepo - .saveNext(pov.game, nextGame.id) >>- messenger - .system(pov.game, _.rematchOfferAccepted) >>- { + _ ← (GameRepo insertDenormalized nextGame) >> GameRepo.saveNext( + pov.game, + nextGame.id) >>- messenger.system(pov.game, _.rematchOfferAccepted) >>- { isRematchCache.put(nextGame.id) if (pov.game.variant == Chess960 && !rematch960Cache.get(pov.game.id)) rematch960Cache.put(nextGame.id) diff --git a/repos/lila/modules/round/src/main/Socket.scala b/repos/lila/modules/round/src/main/Socket.scala index f4fa940f8fd..5f54e77b34f 100644 --- a/repos/lila/modules/round/src/main/Socket.scala +++ b/repos/lila/modules/round/src/main/Socket.scala @@ -81,8 +81,8 @@ private[round] final class Socket(gameId: String, override def postStop() { super.postStop() lilaBus.unsubscribe(self) - lilaBus - .publish(lila.hub.actorApi.round.SocketEvent.Stop(gameId), 'roundDoor) + lilaBus.publish(lila.hub.actorApi.round.SocketEvent.Stop(gameId), + 'roundDoor) } private def refreshSubscriptions { diff --git a/repos/lila/modules/round/src/main/StepBuilder.scala b/repos/lila/modules/round/src/main/StepBuilder.scala index 655f8602217..f703bf712f8 100644 --- a/repos/lila/modules/round/src/main/StepBuilder.scala +++ b/repos/lila/modules/round/src/main/StepBuilder.scala @@ -80,29 +80,27 @@ object StepBuilder { before <- steps lift (index - 1) after <- steps lift index } yield - steps - .updated(index, - after - .copy(nag = ad.nag.symbol.some, - comments = ad - .makeComment(false, true) :: after.comments, - variations = - if (ad.info.variation.isEmpty) - after.variations - else - makeVariation( - gameId, - before, - ad.info, - variant).toList :: after.variations))) | steps + steps.updated(index, + after + .copy(nag = ad.nag.symbol.some, + comments = ad + .makeComment(false, true) :: after.comments, + variations = + if (ad.info.variation.isEmpty) + after.variations + else + makeVariation( + gameId, + before, + ad.info, + variant).toList :: after.variations))) | steps } private def makeVariation(gameId: String, fromStep: Step, info: Info, variant: Variant): List[Step] = { - chess.Replay - .gameWhileValid(info.variation take 20, fromStep.fen, variant) match { + chess.Replay.gameWhileValid(info.variation take 20, fromStep.fen, variant) match { case (games, error) => error foreach logChessError(gameId) val lastPly = games.lastOption.??(_.turns) diff --git a/repos/lila/modules/security/src/main/Firewall.scala b/repos/lila/modules/security/src/main/Firewall.scala index 4f2a7fb6d55..1f660c1048b 100644 --- a/repos/lila/modules/security/src/main/Firewall.scala +++ b/repos/lila/modules/security/src/main/Firewall.scala @@ -64,8 +64,9 @@ final class Firewall(cookieName: Option[String], } private def formatReq(req: RequestHeader) = - "%s %s %s" - .format(req.remoteAddress, req.uri, req.headers.get("User-Agent") | "?") + "%s %s %s".format(req.remoteAddress, + req.uri, + req.headers.get("User-Agent") | "?") private def blocksCookies(cookies: Cookies, name: String) = (cookies get name).isDefined diff --git a/repos/lila/modules/site/src/main/Env.scala b/repos/lila/modules/site/src/main/Env.scala index 680f187884a..047f367ad37 100644 --- a/repos/lila/modules/site/src/main/Env.scala +++ b/repos/lila/modules/site/src/main/Env.scala @@ -11,8 +11,9 @@ final class Env(config: Config, hub: lila.hub.Env, system: ActorSystem) { private val SocketUidTtl = config duration "socket.uid.ttl" private val SocketName = config getString "socket.name" - private val socket = system - .actorOf(Props(new Socket(timeout = SocketUidTtl)), name = SocketName) + private val socket = system.actorOf( + Props(new Socket(timeout = SocketUidTtl)), + name = SocketName) lazy val socketHandler = new SocketHandler(socket, hub) } diff --git a/repos/lila/modules/socket/src/main/Handler.scala b/repos/lila/modules/socket/src/main/Handler.scala index e58c946b00c..b873ac27dc4 100644 --- a/repos/lila/modules/socket/src/main/Handler.scala +++ b/repos/lila/modules/socket/src/main/Handler.scala @@ -92,8 +92,8 @@ object Handler { Json.obj("opening" -> o) }) case None => - member push lila.socket.Socket - .makeMessage("destsFailure", "Bad dests request") + member push lila.socket.Socket.makeMessage("destsFailure", + "Bad dests request") } } case _ => // logwarn("Unhandled msg: " + msg) diff --git a/repos/lila/modules/team/src/main/TeamApi.scala b/repos/lila/modules/team/src/main/TeamApi.scala index 62dc86ce9ef..d51b2924860 100644 --- a/repos/lila/modules/team/src/main/TeamApi.scala +++ b/repos/lila/modules/team/src/main/TeamApi.scala @@ -139,8 +139,7 @@ final class TeamApi(cached: Cached, def doQuit(team: Team, userId: String): Funit = belongsTo(team.id, userId) ?? { - MemberRepo.remove(team.id, userId) >> TeamRepo - .incMembers(team.id, -1) >>- + MemberRepo.remove(team.id, userId) >> TeamRepo.incMembers(team.id, -1) >>- (cached.teamIdsCache invalidate userId) } diff --git a/repos/lila/modules/tournament/src/main/ApiActor.scala b/repos/lila/modules/tournament/src/main/ApiActor.scala index 9bf294b8507..3f0472c7621 100644 --- a/repos/lila/modules/tournament/src/main/ApiActor.scala +++ b/repos/lila/modules/tournament/src/main/ApiActor.scala @@ -8,8 +8,10 @@ import lila.game.actorApi.FinishGame private[tournament] final class ApiActor(api: TournamentApi) extends Actor { override def preStart { - context.system.lilaBus - .subscribe(self, 'finishGame, 'adjustCheater, 'adjustBooster) + context.system.lilaBus.subscribe(self, + 'finishGame, + 'adjustCheater, + 'adjustBooster) } def receive = { diff --git a/repos/lila/modules/tournament/src/main/JsonView.scala b/repos/lila/modules/tournament/src/main/JsonView.scala index 96e1dcf935b..3221ac99b8d 100644 --- a/repos/lila/modules/tournament/src/main/JsonView.scala +++ b/repos/lila/modules/tournament/src/main/JsonView.scala @@ -84,8 +84,8 @@ final class JsonView(getLightUser: String => Option[LightUser], def playerInfo(info: PlayerInfoExt): Fu[JsObject] = for { ranking <- cached ranking info.tour - pairings <- PairingRepo - .finishedByPlayerChronological(info.tour.id, info.user.id) + pairings <- PairingRepo.finishedByPlayerChronological(info.tour.id, + info.user.id) sheet = info.tour.system.scoringSystem .sheet(info.tour, info.user.id, pairings) tpr <- performance(info.tour, info.player, pairings) @@ -158,8 +158,9 @@ final class JsonView(getLightUser: String => Option[LightUser], private def computeStanding(tour: Tournament, page: Int): Fu[JsObject] = for { - rankedPlayers <- PlayerRepo - .bestByTourWithRankByPage(tour.id, 10, page max 1) + rankedPlayers <- PlayerRepo.bestByTourWithRankByPage(tour.id, + 10, + page max 1) sheets <- rankedPlayers.map { p => PairingRepo.finishedByPlayerChronological(tour.id, p.player.userId) map { pairings => diff --git a/repos/lila/modules/tournament/src/main/PlayerRepo.scala b/repos/lila/modules/tournament/src/main/PlayerRepo.scala index 3db0b3bad1c..4dbf6160861 100644 --- a/repos/lila/modules/tournament/src/main/PlayerRepo.scala +++ b/repos/lila/modules/tournament/src/main/PlayerRepo.scala @@ -134,8 +134,7 @@ object PlayerRepo { coll.distinct("uid", selectTour(tourId).some) map lila.db.BSON.asStrings def activeUserIds(tourId: String): Fu[List[String]] = - coll - .distinct("uid", (selectTour(tourId) ++ selectActive).some) map lila.db.BSON.asStrings + coll.distinct("uid", (selectTour(tourId) ++ selectActive).some) map lila.db.BSON.asStrings def winner(tourId: String): Fu[Option[Player]] = coll.find(selectTour(tourId)).sort(bestSort).one[Player] diff --git a/repos/lila/modules/tournament/src/main/TournamentApi.scala b/repos/lila/modules/tournament/src/main/TournamentApi.scala index 97955ccb66e..6cc941ef6e5 100644 --- a/repos/lila/modules/tournament/src/main/TournamentApi.scala +++ b/repos/lila/modules/tournament/src/main/TournamentApi.scala @@ -191,8 +191,7 @@ private[tournament] final class TournamentApi( def join(tourId: String, me: User) { Sequencing(tourId)(TournamentRepo.enterableById) { tour => - PlayerRepo - .join(tour.id, me, tour.perfLens) >> updateNbPlayers(tour.id) >>- { + PlayerRepo.join(tour.id, me, tour.perfLens) >> updateNbPlayers(tour.id) >>- { withdrawAllNonMarathonOrUniqueBut(tour.id, me.id) socketReload(tour.id) publish() @@ -220,12 +219,10 @@ private[tournament] final class TournamentApi( def withdraw(tourId: String, userId: String) { Sequencing(tourId)(TournamentRepo.enterableById) { case tour if tour.isCreated => - PlayerRepo - .remove(tour.id, userId) >> updateNbPlayers(tour.id) >>- socketReload( + PlayerRepo.remove(tour.id, userId) >> updateNbPlayers(tour.id) >>- socketReload( tour.id) >>- publish() case tour if tour.isStarted => - PlayerRepo - .withdraw(tour.id, userId) >>- socketReload(tour.id) >>- publish() + PlayerRepo.withdraw(tour.id, userId) >>- socketReload(tour.id) >>- publish() case _ => funit } } diff --git a/repos/lila/modules/tv/src/main/Tv.scala b/repos/lila/modules/tv/src/main/Tv.scala index 9286e28f5e8..320f1984425 100644 --- a/repos/lila/modules/tv/src/main/Tv.scala +++ b/repos/lila/modules/tv/src/main/Tv.scala @@ -23,8 +23,7 @@ final class Tv(actor: ActorRef) { } flatMap { _ ?? GameRepo.game } def getGames(channel: Tv.Channel, max: Int): Fu[List[Game]] = - (actor ? TvActor - .GetGameIds(channel, max) mapTo manifest[List[String]]) recover { + (actor ? TvActor.GetGameIds(channel, max) mapTo manifest[List[String]]) recover { case e: Exception => Nil } flatMap GameRepo.games diff --git a/repos/lila/modules/user/src/main/Env.scala b/repos/lila/modules/user/src/main/Env.scala index 0a5af276125..7e2b4b40cc8 100644 --- a/repos/lila/modules/user/src/main/Env.scala +++ b/repos/lila/modules/user/src/main/Env.scala @@ -58,8 +58,10 @@ final class Env(config: Config, system.actorOf(Props(new Actor { override def preStart() { - system.lilaBus - .subscribe(self, 'adjustCheater, 'adjustBooster, 'userActive) + system.lilaBus.subscribe(self, + 'adjustCheater, + 'adjustBooster, + 'userActive) } def receive = { case lila.hub.actorApi.mod.MarkCheater(userId) => diff --git a/repos/marathon/mesos-simulation/src/main/scala/mesosphere/mesos/simulation/DriverActor.scala b/repos/marathon/mesos-simulation/src/main/scala/mesosphere/mesos/simulation/DriverActor.scala index e159bfc410c..21189bfceb4 100644 --- a/repos/marathon/mesos-simulation/src/main/scala/mesosphere/mesos/simulation/DriverActor.scala +++ b/repos/marathon/mesos-simulation/src/main/scala/mesosphere/mesos/simulation/DriverActor.scala @@ -133,8 +133,8 @@ class DriverActor(schedulerProps: Props) extends Actor { import context.dispatcher periodicOffers = Some( - context.system.scheduler - .schedule(1.second, 1.seconds)(scheduler ! offers) + context.system.scheduler.schedule(1.second, 1.seconds)( + scheduler ! offers) ) } diff --git a/repos/marathon/src/main/scala/mesosphere/marathon/MarathonSchedulerActor.scala b/repos/marathon/src/main/scala/mesosphere/marathon/MarathonSchedulerActor.scala index 93ca17bb686..0fd0210330b 100644 --- a/repos/marathon/src/main/scala/mesosphere/marathon/MarathonSchedulerActor.scala +++ b/repos/marathon/src/main/scala/mesosphere/marathon/MarathonSchedulerActor.scala @@ -67,8 +67,9 @@ class MarathonSchedulerActor private ( override def preStart(): Unit = { schedulerActions = createSchedulerActions(self) - deploymentManager = context - .actorOf(deploymentManagerProps(schedulerActions), "DeploymentManager") + deploymentManager = context.actorOf( + deploymentManagerProps(schedulerActions), + "DeploymentManager") historyActor = context.actorOf(historyActorProps, "HistoryActor") leaderInfo.subscribe(self) diff --git a/repos/marathon/src/main/scala/mesosphere/marathon/MarathonSchedulerService.scala b/repos/marathon/src/main/scala/mesosphere/marathon/MarathonSchedulerService.scala index e342720298b..5ffbffb9f42 100644 --- a/repos/marathon/src/main/scala/mesosphere/marathon/MarathonSchedulerService.scala +++ b/repos/marathon/src/main/scala/mesosphere/marathon/MarathonSchedulerService.scala @@ -130,8 +130,10 @@ class MarathonSchedulerService @Inject()( def deploy(plan: DeploymentPlan, force: Boolean = false): Future[Unit] = { log.info(s"Deploy plan with force=$force:\n$plan ") - val future: Future[Any] = PromiseActor - .askWithoutTimeout(system, schedulerActor, Deploy(plan, force)) + val future: Future[Any] = PromiseActor.askWithoutTimeout( + system, + schedulerActor, + Deploy(plan, force)) future.map { case DeploymentStarted(_) => () case CommandFailed(_, t) => throw t @@ -445,14 +447,14 @@ class MarathonSchedulerService @Inject()( } private def startLeaderDurationMetric() = { - metrics - .gauge("service.mesosphere.marathon.leaderDuration", new Gauge[Long] { - val startedAt = System.currentTimeMillis() - - override def getValue: Long = { - System.currentTimeMillis() - startedAt - } - }) + metrics.gauge("service.mesosphere.marathon.leaderDuration", + new Gauge[Long] { + val startedAt = System.currentTimeMillis() + + override def getValue: Long = { + System.currentTimeMillis() - startedAt + } + }) } private def stopLeaderDurationMetric() = { metrics.registry.remove("service.mesosphere.marathon.leaderDuration") diff --git a/repos/marathon/src/main/scala/mesosphere/marathon/SchedulerDriverFactory.scala b/repos/marathon/src/main/scala/mesosphere/marathon/SchedulerDriverFactory.scala index 3fa02db3ed2..e2a4d966142 100644 --- a/repos/marathon/src/main/scala/mesosphere/marathon/SchedulerDriverFactory.scala +++ b/repos/marathon/src/main/scala/mesosphere/marathon/SchedulerDriverFactory.scala @@ -24,8 +24,10 @@ class MesosSchedulerDriverFactory @Inject()( override def createDriver(): SchedulerDriver = { implicit val zkTimeout = config.zkTimeoutDuration val frameworkId = frameworkIdUtil.fetch() - val driver = MarathonSchedulerDriver - .newDriver(config, httpConfig, scheduler, frameworkId) + val driver = MarathonSchedulerDriver.newDriver(config, + httpConfig, + scheduler, + frameworkId) holder.driver = Some(driver) driver } diff --git a/repos/marathon/src/main/scala/mesosphere/marathon/api/CORSFilter.scala b/repos/marathon/src/main/scala/mesosphere/marathon/api/CORSFilter.scala index 422aa7439fb..6c39e35e0f1 100644 --- a/repos/marathon/src/main/scala/mesosphere/marathon/api/CORSFilter.scala +++ b/repos/marathon/src/main/scala/mesosphere/marathon/api/CORSFilter.scala @@ -40,8 +40,8 @@ class CORSFilter @Inject()(config: MarathonConf) extends Filter { httpResponse.setHeader("Access-Control-Allow-Headers", accessControlRequestHeaders.mkString(", ")) - httpResponse - .setHeader("Access-Control-Allow-Methods", "GET, HEAD, OPTIONS") + httpResponse.setHeader("Access-Control-Allow-Methods", + "GET, HEAD, OPTIONS") httpResponse.setHeader("Access-Control-Max-Age", "86400") case _ => // Ignore other responses diff --git a/repos/marathon/src/main/scala/mesosphere/marathon/core/flow/FlowModule.scala b/repos/marathon/src/main/scala/mesosphere/marathon/core/flow/FlowModule.scala index 4461ea17815..d064b100673 100644 --- a/repos/marathon/src/main/scala/mesosphere/marathon/core/flow/FlowModule.scala +++ b/repos/marathon/src/main/scala/mesosphere/marathon/core/flow/FlowModule.scala @@ -45,8 +45,8 @@ class FlowModule(leadershipModule: LeadershipModule) { offersWanted, driverHolder ) - val actorRef = leadershipModule - .startWhenLeader(reviveOffersActor, "reviveOffersWhenWanted") + val actorRef = leadershipModule.startWhenLeader(reviveOffersActor, + "reviveOffersWhenWanted") log.info( s"Calling reviveOffers is enabled. Use --disable_revive_offers_for_new_apps to disable.") Some(new OfferReviverDelegate(actorRef)) diff --git a/repos/marathon/src/main/scala/mesosphere/marathon/core/launcher/TaskOp.scala b/repos/marathon/src/main/scala/mesosphere/marathon/core/launcher/TaskOp.scala index 31ab0c8710d..6524a178950 100644 --- a/repos/marathon/src/main/scala/mesosphere/marathon/core/launcher/TaskOp.scala +++ b/repos/marathon/src/main/scala/mesosphere/marathon/core/launcher/TaskOp.scala @@ -44,8 +44,8 @@ object TaskOp { def applyToOffer(offer: MesosProtos.Offer): MesosProtos.Offer = { import scala.collection.JavaConverters._ - ResourceUtil - .consumeResourcesFromOffer(offer, taskInfo.getResourcesList.asScala) + ResourceUtil.consumeResourcesFromOffer(offer, + taskInfo.getResourcesList.asScala) } } diff --git a/repos/marathon/src/main/scala/mesosphere/marathon/core/launcher/impl/TaskOpFactoryHelper.scala b/repos/marathon/src/main/scala/mesosphere/marathon/core/launcher/impl/TaskOpFactoryHelper.scala index 9c147070f59..ef691cdd7f9 100644 --- a/repos/marathon/src/main/scala/mesosphere/marathon/core/launcher/impl/TaskOpFactoryHelper.scala +++ b/repos/marathon/src/main/scala/mesosphere/marathon/core/launcher/impl/TaskOpFactoryHelper.scala @@ -35,8 +35,9 @@ class TaskOpFactoryHelper(private val principalOpt: Option[String], def createOperations = Seq( offerOperationFactory.reserve(frameworkId, newTask.taskId, resources), - offerOperationFactory - .createVolumes(frameworkId, newTask.taskId, localVolumes)) + offerOperationFactory.createVolumes(frameworkId, + newTask.taskId, + localVolumes)) TaskOp.ReserveAndCreateVolumes(newTask, resources, diff --git a/repos/marathon/src/main/scala/mesosphere/marathon/core/launchqueue/impl/AppTaskLauncherActor.scala b/repos/marathon/src/main/scala/mesosphere/marathon/core/launchqueue/impl/AppTaskLauncherActor.scala index 09e433c18ff..4fdc07828f9 100644 --- a/repos/marathon/src/main/scala/mesosphere/marathon/core/launchqueue/impl/AppTaskLauncherActor.scala +++ b/repos/marathon/src/main/scala/mesosphere/marathon/core/launchqueue/impl/AppTaskLauncherActor.scala @@ -253,8 +253,8 @@ private class AppTaskLauncherActor(config: LaunchQueueConfig, op.taskId) case TaskOpSourceDelegate.TaskOpRejected(op, reason) => - log - .warning("Unexpected task launch rejected for taskId '{}'.", op.taskId) + log.warning("Unexpected task launch rejected for taskId '{}'.", + op.taskId) case TaskOpSourceDelegate.TaskOpAccepted(op) => inFlightTaskOperations -= op.taskId diff --git a/repos/marathon/src/main/scala/mesosphere/marathon/core/launchqueue/impl/RateLimiterActor.scala b/repos/marathon/src/main/scala/mesosphere/marathon/core/launchqueue/impl/RateLimiterActor.scala index bdefc7d3ffd..83202460285 100644 --- a/repos/marathon/src/main/scala/mesosphere/marathon/core/launchqueue/impl/RateLimiterActor.scala +++ b/repos/marathon/src/main/scala/mesosphere/marathon/core/launchqueue/impl/RateLimiterActor.scala @@ -47,8 +47,10 @@ private class RateLimiterActor private (rateLimiter: RateLimiter, override def preStart(): Unit = { import context.dispatcher - cleanup = context.system.scheduler - .schedule(10.seconds, 10.seconds, self, CleanupOverdueDelays) + cleanup = context.system.scheduler.schedule(10.seconds, + 10.seconds, + self, + CleanupOverdueDelays) log.info("started RateLimiterActor") } diff --git a/repos/marathon/src/main/scala/mesosphere/marathon/core/matcher/manager/impl/OfferMatcherManagerActor.scala b/repos/marathon/src/main/scala/mesosphere/marathon/core/matcher/manager/impl/OfferMatcherManagerActor.scala index bb5768a4893..bc92749b85e 100644 --- a/repos/marathon/src/main/scala/mesosphere/marathon/core/matcher/manager/impl/OfferMatcherManagerActor.scala +++ b/repos/marathon/src/main/scala/mesosphere/marathon/core/matcher/manager/impl/OfferMatcherManagerActor.scala @@ -174,16 +174,20 @@ private[impl] class OfferMatcherManagerActor private ( case ActorOfferMatcher.MatchOffer(deadline, offer: Offer) if !offersWanted => log.debug(s"Ignoring offer ${offer.getId.getValue}: No one interested.") - sender() ! OfferMatcher - .MatchedTaskOps(offer.getId, Seq.empty, resendThisOffer = false) + sender() ! OfferMatcher.MatchedTaskOps(offer.getId, + Seq.empty, + resendThisOffer = false) case ActorOfferMatcher.MatchOffer(deadline, offer: Offer) => log.debug(s"Start processing offer ${offer.getId.getValue}") // setup initial offer data val randomizedMatchers = offerMatchers(offer) - val data = OfferMatcherManagerActor - .OfferData(offer, deadline, sender(), randomizedMatchers, Seq.empty) + val data = OfferMatcherManagerActor.OfferData(offer, + deadline, + sender(), + randomizedMatchers, + Seq.empty) offerQueues += offer.getId -> data metrics.currentOffersGauge.setValue(offerQueues.size) @@ -298,8 +302,9 @@ private[impl] class OfferMatcherManagerActor private ( private[this] def sendMatchResult(data: OfferData, resendThisOffer: Boolean): Unit = { - data.sender ! OfferMatcher - .MatchedTaskOps(data.offer.getId, data.ops, resendThisOffer) + data.sender ! OfferMatcher.MatchedTaskOps(data.offer.getId, + data.ops, + resendThisOffer) offerQueues -= data.offer.getId metrics.currentOffersGauge.setValue(offerQueues.size) //scalastyle:off magic.number diff --git a/repos/marathon/src/main/scala/mesosphere/marathon/core/task/jobs/TaskJobsModule.scala b/repos/marathon/src/main/scala/mesosphere/marathon/core/task/jobs/TaskJobsModule.scala index 4927fca85c5..7dd215e3e2e 100644 --- a/repos/marathon/src/main/scala/mesosphere/marathon/core/task/jobs/TaskJobsModule.scala +++ b/repos/marathon/src/main/scala/mesosphere/marathon/core/task/jobs/TaskJobsModule.scala @@ -16,8 +16,10 @@ class TaskJobsModule(config: MarathonConf, taskTracker: TaskTracker, marathonSchedulerDriverHolder: MarathonSchedulerDriverHolder): Unit = { leadershipModule.startWhenLeader( - KillOverdueTasksActor - .props(config, taskTracker, marathonSchedulerDriverHolder, clock), + KillOverdueTasksActor.props(config, + taskTracker, + marathonSchedulerDriverHolder, + clock), "killOverdueStagedTasks") } } diff --git a/repos/marathon/src/main/scala/mesosphere/marathon/core/task/tracker/TaskTrackerModule.scala b/repos/marathon/src/main/scala/mesosphere/marathon/core/task/tracker/TaskTrackerModule.scala index 076a881b5b4..529e49a825c 100644 --- a/repos/marathon/src/main/scala/mesosphere/marathon/core/task/tracker/TaskTrackerModule.scala +++ b/repos/marathon/src/main/scala/mesosphere/marathon/core/task/tracker/TaskTrackerModule.scala @@ -35,13 +35,16 @@ class TaskTrackerModule(clock: Clock, private[this] lazy val taskUpdaterActorMetrics = new TaskUpdateActor.ActorMetrics(metrics) private[this] def taskUpdaterActorProps(taskTrackerRef: ActorRef) = - TaskUpdateActor - .props(clock, taskUpdaterActorMetrics, taskOpProcessor(taskTrackerRef)) + TaskUpdateActor.props(clock, + taskUpdaterActorMetrics, + taskOpProcessor(taskTrackerRef)) private[this] lazy val taskLoader = new TaskLoaderImpl(taskRepository) private[this] lazy val taskTrackerMetrics = new TaskTrackerActor.ActorMetrics(metrics) - private[this] lazy val taskTrackerActorProps = TaskTrackerActor - .props(taskTrackerMetrics, taskLoader, taskUpdaterActorProps) + private[this] lazy val taskTrackerActorProps = TaskTrackerActor.props( + taskTrackerMetrics, + taskLoader, + taskUpdaterActorProps) protected lazy val taskTrackerActorName = "taskTracker" private[this] lazy val taskTrackerActorRef = leadershipModule.startWhenLeader( diff --git a/repos/marathon/src/main/scala/mesosphere/marathon/health/HealthCheckWorkerActor.scala b/repos/marathon/src/main/scala/mesosphere/marathon/health/HealthCheckWorkerActor.scala index ddf829f0193..27d45d53b00 100644 --- a/repos/marathon/src/main/scala/mesosphere/marathon/health/HealthCheckWorkerActor.scala +++ b/repos/marathon/src/main/scala/mesosphere/marathon/health/HealthCheckWorkerActor.scala @@ -161,8 +161,9 @@ class HealthCheckWorkerActor extends Actor with ActorLogging { val context = SSLContext.getInstance("Default") //scalastyle:off null - context - .init(Array[KeyManager](), Array(BlindFaithX509TrustManager), null) + context.init(Array[KeyManager](), + Array(BlindFaithX509TrustManager), + null) //scalastyle:on context } diff --git a/repos/marathon/src/main/scala/mesosphere/marathon/state/GroupManager.scala b/repos/marathon/src/main/scala/mesosphere/marathon/state/GroupManager.scala index b7ee1c77952..8f5b0d1fc6e 100644 --- a/repos/marathon/src/main/scala/mesosphere/marathon/state/GroupManager.scala +++ b/repos/marathon/src/main/scala/mesosphere/marathon/state/GroupManager.scala @@ -166,8 +166,9 @@ class GroupManager @Singleton @Inject()( from <- rootGroup() (toUnversioned, resolve) <- resolveStoreUrls( assignDynamicServicePorts(from, change(from))) - to = GroupVersioningUtil - .updateVersionInfoForChangedApps(version, from, toUnversioned) + to = GroupVersioningUtil.updateVersionInfoForChangedApps(version, + from, + toUnversioned) _ = validateOrThrow(to)(Group.validGroupWithConfig(config.maxApps.get)) plan = DeploymentPlan(from, to, resolve, version, toKill) _ = validateOrThrow(plan) diff --git a/repos/marathon/src/main/scala/mesosphere/marathon/state/Migration.scala b/repos/marathon/src/main/scala/mesosphere/marathon/state/Migration.scala index 784b8c28272..ddddee06e24 100644 --- a/repos/marathon/src/main/scala/mesosphere/marathon/state/Migration.scala +++ b/repos/marathon/src/main/scala/mesosphere/marathon/state/Migration.scala @@ -162,8 +162,9 @@ class MigrationTo0_11(groupRepository: GroupRepository, updatedApps: Iterable[AppDefinition]): Future[Unit] = { val updatedGroup = updatedApps.foldLeft(rootGroup) { (updatedGroup, updatedApp) => - updatedGroup - .updateApp(updatedApp.id, _ => updatedApp, updatedApp.version) + updatedGroup.updateApp(updatedApp.id, + _ => updatedApp, + updatedApp.version) } groupRepository .store(groupRepository.zkRootName, updatedGroup) diff --git a/repos/marathon/src/main/scala/mesosphere/marathon/upgrade/DeploymentActor.scala b/repos/marathon/src/main/scala/mesosphere/marathon/upgrade/DeploymentActor.scala index 4391e1bb8dc..af0741b7de9 100644 --- a/repos/marathon/src/main/scala/mesosphere/marathon/upgrade/DeploymentActor.scala +++ b/repos/marathon/src/main/scala/mesosphere/marathon/upgrade/DeploymentActor.scala @@ -135,8 +135,10 @@ private class DeploymentActor(parent: ActorRef, Constraints.selectTasksToKill(app, notSentencedAndRunning, toKillCount) val ScalingProposition(tasksToKill, tasksToStart) = - ScalingProposition - .propose(runningTasks, toKill, killToMeetConstraints, scaleTo) + ScalingProposition.propose(runningTasks, + toKill, + killToMeetConstraints, + scaleTo) def killTasksIfNeeded: Future[Unit] = tasksToKill.fold(Future.successful(())) { diff --git a/repos/marathon/src/main/scala/mesosphere/mesos/ResourceMatcher.scala b/repos/marathon/src/main/scala/mesosphere/mesos/ResourceMatcher.scala index 6b93165f4f0..a784ad733d3 100644 --- a/repos/marathon/src/main/scala/mesosphere/mesos/ResourceMatcher.scala +++ b/repos/marathon/src/main/scala/mesosphere/mesos/ResourceMatcher.scala @@ -197,8 +197,9 @@ object ResourceMatcher { if (nextResource.hasReservation) Option(nextResource.getReservation) else None - val consumedValue = ScalarMatch - .Consumption(consume, nextResource.getRole, reservation) + val consumedValue = ScalarMatch.Consumption(consume, + nextResource.getRole, + reservation) findMatches(newValueLeft, resourcesLeft.tail, consumedValue :: resourcesConsumed) diff --git a/repos/marathon/src/main/scala/mesosphere/util/CapConcurrentExecutions.scala b/repos/marathon/src/main/scala/mesosphere/util/CapConcurrentExecutions.scala index f0dc3ce483b..dc5fbf34c7f 100644 --- a/repos/marathon/src/main/scala/mesosphere/util/CapConcurrentExecutions.scala +++ b/repos/marathon/src/main/scala/mesosphere/util/CapConcurrentExecutions.scala @@ -64,8 +64,10 @@ class CapConcurrentExecutions private (metrics: CapConcurrentExecutionsMetrics, import CapConcurrentExecutions.log private[util] val serializeExecutionActorRef = { - val serializeExecutionActorProps = RestrictParallelExecutionsActor - .props(metrics, maxParallel = maxParallel, maxQueued = maxQueued) + val serializeExecutionActorProps = RestrictParallelExecutionsActor.props( + metrics, + maxParallel = maxParallel, + maxQueued = maxQueued) actorRefFactory.actorOf(serializeExecutionActorProps, actorName) } @@ -74,8 +76,9 @@ class CapConcurrentExecutions private (metrics: CapConcurrentExecutionsMetrics, */ def apply[T](block: => Future[T]): Future[T] = { val promise = Promise[T]() - serializeExecutionActorRef ! RestrictParallelExecutionsActor - .Execute(promise, () => block) + serializeExecutionActorRef ! RestrictParallelExecutionsActor.Execute( + promise, + () => block) promise.future } diff --git a/repos/marathon/src/test/scala/mesosphere/marathon/DebugConfTest.scala b/repos/marathon/src/test/scala/mesosphere/marathon/DebugConfTest.scala index 1d24033f181..2222bd71f37 100644 --- a/repos/marathon/src/test/scala/mesosphere/marathon/DebugConfTest.scala +++ b/repos/marathon/src/test/scala/mesosphere/marathon/DebugConfTest.scala @@ -25,8 +25,9 @@ class DebugConfTest extends MarathonSpec { } test("tracing can be disabled") { - val conf = MarathonTestHelper - .makeConfig("" + "--master", "127.0.0.1:5050", "--disable_tracing") + val conf = MarathonTestHelper.makeConfig("" + "--master", + "127.0.0.1:5050", + "--disable_tracing") assert(!conf.enableDebugTracing) } diff --git a/repos/marathon/src/test/scala/mesosphere/marathon/MarathonTestHelper.scala b/repos/marathon/src/test/scala/mesosphere/marathon/MarathonTestHelper.scala index 0c566d71fa3..a23a0cd913b 100644 --- a/repos/marathon/src/test/scala/mesosphere/marathon/MarathonTestHelper.scala +++ b/repos/marathon/src/test/scala/mesosphere/marathon/MarathonTestHelper.scala @@ -434,8 +434,9 @@ object MarathonTestHelper { def taskLaunched: Task.Launched = { val now = Timestamp.now() - Task - .Launched(now, status = Task.Status(now), networking = Task.NoNetworking) + Task.Launched(now, + status = Task.Status(now), + networking = Task.NoNetworking) } def taskLaunchedOp: TaskStateOp.Launch = { diff --git a/repos/marathon/src/test/scala/mesosphere/marathon/api/v2/AppTasksResourceTest.scala b/repos/marathon/src/test/scala/mesosphere/marathon/api/v2/AppTasksResourceTest.scala index b0b74d6c523..44c584aa53b 100644 --- a/repos/marathon/src/test/scala/mesosphere/marathon/api/v2/AppTasksResourceTest.scala +++ b/repos/marathon/src/test/scala/mesosphere/marathon/api/v2/AppTasksResourceTest.scala @@ -41,8 +41,11 @@ class AppTasksResourceTest groupManager.app(appId.toRootPath) returns Future.successful( Some(AppDefinition(appId.toRootPath))) - val response = appsTaskResource - .deleteMany(appId, host, scale = false, force = false, auth.request) + val response = appsTaskResource.deleteMany(appId, + host, + scale = false, + force = false, + auth.request) response.getStatus shouldEqual 200 JsonTestHelper .assertThatJsonString(response.getEntity.asInstanceOf[String]) diff --git a/repos/marathon/src/test/scala/mesosphere/marathon/api/v2/AppsResourceTest.scala b/repos/marathon/src/test/scala/mesosphere/marathon/api/v2/AppsResourceTest.scala index 77963c0d912..bf764317c9f 100644 --- a/repos/marathon/src/test/scala/mesosphere/marathon/api/v2/AppsResourceTest.scala +++ b/repos/marathon/src/test/scala/mesosphere/marathon/api/v2/AppsResourceTest.scala @@ -366,8 +366,11 @@ class AppsResourceTest .successful(Seq(appInfo)) When("The the index is fetched without any filters") - val response = appsResource - .index(null, null, null, new java.util.HashSet(), auth.request) + val response = appsResource.index(null, + null, + null, + new java.util.HashSet(), + auth.request) Then("The response holds counts and deployments") val appJson = Json.parse(response.getEntity.asInstanceOf[String]) diff --git a/repos/marathon/src/test/scala/mesosphere/marathon/api/v2/GroupsResourceTest.scala b/repos/marathon/src/test/scala/mesosphere/marathon/api/v2/GroupsResourceTest.scala index d1ba5766c97..529eddcbfea 100644 --- a/repos/marathon/src/test/scala/mesosphere/marathon/api/v2/GroupsResourceTest.scala +++ b/repos/marathon/src/test/scala/mesosphere/marathon/api/v2/GroupsResourceTest.scala @@ -39,8 +39,11 @@ class GroupsResourceTest When("Doing a dry run update") val body = Json.stringify(Json.toJson(update)).getBytes - val result = groupsResource - .update("/test", force = false, dryRun = true, body, auth.request) + val result = groupsResource.update("/test", + force = false, + dryRun = true, + body, + auth.request) val json = Json.parse(result.getEntity.toString) Then("The deployment plan is correct") @@ -80,8 +83,10 @@ class GroupsResourceTest create.getStatus should be(auth.NotAuthenticatedStatus) When(s"the group is created") - val createWithPath = groupsResource - .createWithPath("/my/id", false, body.getBytes("UTF-8"), req) + val createWithPath = groupsResource.createWithPath("/my/id", + false, + body.getBytes("UTF-8"), + req) Then("we receive a NotAuthenticated response") createWithPath.getStatus should be(auth.NotAuthenticatedStatus) @@ -128,8 +133,10 @@ class GroupsResourceTest create.getStatus should be(auth.UnauthorizedStatus) When(s"the group is created") - val createWithPath = groupsResource - .createWithPath("/my/id", false, body.getBytes("UTF-8"), req) + val createWithPath = groupsResource.createWithPath("/my/id", + false, + body.getBytes("UTF-8"), + req) Then("we receive a Not Authorized response") createWithPath.getStatus should be(auth.UnauthorizedStatus) diff --git a/repos/marathon/src/test/scala/mesosphere/marathon/api/v2/QueueResourceTest.scala b/repos/marathon/src/test/scala/mesosphere/marathon/api/v2/QueueResourceTest.scala index ebd1fda992d..4053ce2f8e3 100644 --- a/repos/marathon/src/test/scala/mesosphere/marathon/api/v2/QueueResourceTest.scala +++ b/repos/marathon/src/test/scala/mesosphere/marathon/api/v2/QueueResourceTest.scala @@ -137,8 +137,11 @@ class QueueResourceTest When(s"one delay is reset") val appId = "appId".toRootPath - val taskCount = LaunchQueue - .QueuedTaskInfo(AppDefinition(appId), 0, 0, 0, Timestamp.now()) + val taskCount = LaunchQueue.QueuedTaskInfo(AppDefinition(appId), + 0, + 0, + 0, + Timestamp.now()) queue.list returns Seq(taskCount) val resetDelay = queueResource.resetDelay("appId", req) diff --git a/repos/marathon/src/test/scala/mesosphere/marathon/api/v2/TasksResourceTest.scala b/repos/marathon/src/test/scala/mesosphere/marathon/api/v2/TasksResourceTest.scala index 7100d2ab96e..07b3805ea13 100644 --- a/repos/marathon/src/test/scala/mesosphere/marathon/api/v2/TasksResourceTest.scala +++ b/repos/marathon/src/test/scala/mesosphere/marathon/api/v2/TasksResourceTest.scala @@ -51,8 +51,10 @@ class TasksResourceTest groupManager.app(app2) returns Future.successful(Some(AppDefinition(app2))) When("we ask to kill both tasks") - val response = taskResource - .killTasks(scale = false, force = false, body = bodyBytes, auth.request) + val response = taskResource.killTasks(scale = false, + force = false, + body = bodyBytes, + auth.request) Then("The response should be OK") response.getStatus shouldEqual 200 @@ -94,8 +96,10 @@ class TasksResourceTest groupManager.app(app2) returns Future.successful(Some(AppDefinition(app2))) When("we ask to kill both tasks") - val response = taskResource - .killTasks(scale = true, force = true, body = bodyBytes, auth.request) + val response = taskResource.killTasks(scale = true, + force = true, + body = bodyBytes, + auth.request) Then("The response should be OK") response.getStatus shouldEqual 200 diff --git a/repos/marathon/src/test/scala/mesosphere/marathon/api/v2/json/AppDefinitionTest.scala b/repos/marathon/src/test/scala/mesosphere/marathon/api/v2/json/AppDefinitionTest.scala index b36e077989a..614a37e2e1c 100644 --- a/repos/marathon/src/test/scala/mesosphere/marathon/api/v2/json/AppDefinitionTest.scala +++ b/repos/marathon/src/test/scala/mesosphere/marathon/api/v2/json/AppDefinitionTest.scala @@ -22,8 +22,9 @@ import scala.concurrent.duration._ class AppDefinitionTest extends MarathonSpec with Matchers { test("Validation") { - def shouldViolate( - app: AppDefinition, path: String, template: String): Unit = { + def shouldViolate(app: AppDefinition, + path: String, + template: String): Unit = { validate(app) match { case Success => fail() case f: Failure => @@ -35,8 +36,9 @@ class AppDefinitionTest extends MarathonSpec with Matchers { } } - def shouldNotViolate( - app: AppDefinition, path: String, template: String): Unit = { + def shouldNotViolate(app: AppDefinition, + path: String, + template: String): Unit = { validate(app) match { case Success => case f: Failure => @@ -70,281 +72,284 @@ class AppDefinitionTest extends MarathonSpec with Matchers { shouldViolate(app, "/id", idError) app = AppDefinition( - id = "test".toPath, - instances = -3, - portDefinitions = PortDefinitions(9000, 8080, 9000) + id = "test".toPath, + instances = -3, + portDefinitions = PortDefinitions(9000, 8080, 9000) ) shouldViolate( - app, - "/portDefinitions", - "Ports must be unique." + app, + "/portDefinitions", + "Ports must be unique." ) MarathonTestHelper.validateJsonSchema(app, false) app = AppDefinition( - id = "test".toPath, - portDefinitions = PortDefinitions(0, 0, 8080), - cmd = Some("true") + id = "test".toPath, + portDefinitions = PortDefinitions(0, 0, 8080), + cmd = Some("true") ) shouldNotViolate( - app, - "/portDefinitions", - "Ports must be unique." + app, + "/portDefinitions", + "Ports must be unique." ) MarathonTestHelper.validateJsonSchema(app, true) app = AppDefinition( - id = "test".toPath, - cmd = Some("true"), - container = Some( - Container( - docker = Some(Docker( - image = "mesosphere/marathon", - network = Some( - mesos.ContainerInfo.DockerInfo.Network.BRIDGE), - portMappings = Some(Seq( - Docker.PortMapping( - 8080, 0, 0, "tcp", Some("foo")), - Docker.PortMapping( - 8081, 0, 0, "tcp", Some("foo")) - )) - )) - )), - portDefinitions = Nil + id = "test".toPath, + cmd = Some("true"), + container = Some( + Container( + docker = Some( + Docker( + image = "mesosphere/marathon", + network = Some(mesos.ContainerInfo.DockerInfo.Network.BRIDGE), + portMappings = Some( + Seq( + Docker.PortMapping(8080, 0, 0, "tcp", Some("foo")), + Docker.PortMapping(8081, 0, 0, "tcp", Some("foo")) + )) + )) + )), + portDefinitions = Nil ) shouldViolate( - app, - "/container/docker/portMappings", - "Port names must be unique." + app, + "/container/docker/portMappings", + "Port names must be unique." ) app = AppDefinition( - id = "test".toPath, - cmd = Some("true"), - portDefinitions = Seq( - PortDefinition(port = 9000, name = Some("foo")), - PortDefinition(port = 9001, name = Some("foo")) - ) + id = "test".toPath, + cmd = Some("true"), + portDefinitions = Seq( + PortDefinition(port = 9000, name = Some("foo")), + PortDefinition(port = 9001, name = Some("foo")) + ) ) shouldViolate( - app, - "/portDefinitions", - "Port names must be unique." + app, + "/portDefinitions", + "Port names must be unique." ) val correct = AppDefinition(id = "test".toPath) app = correct.copy( - container = Some( - Container( - docker = Some(Docker( - image = "mesosphere/marathon", - network = Some( - mesos.ContainerInfo.DockerInfo.Network.BRIDGE), - portMappings = Some(Seq( - Docker.PortMapping( - 8080, 0, 0, "tcp", Some("foo")), - Docker.PortMapping( - 8081, 0, 0, "tcp", Some("bar")) - )) - )) - )), - portDefinitions = Nil) + container = Some( + Container( + docker = Some( + Docker( + image = "mesosphere/marathon", + network = Some(mesos.ContainerInfo.DockerInfo.Network.BRIDGE), + portMappings = Some( + Seq( + Docker.PortMapping(8080, 0, 0, "tcp", Some("foo")), + Docker.PortMapping(8081, 0, 0, "tcp", Some("bar")) + )) + )) + )), + portDefinitions = Nil) shouldNotViolate( - app, - "/container/docker/portMappings", - "Port names must be unique." + app, + "/container/docker/portMappings", + "Port names must be unique." ) app = correct.copy( - portDefinitions = Seq( - PortDefinition(port = 9000, name = Some("foo")), - PortDefinition(port = 9001, name = Some("bar")) - ) + portDefinitions = Seq( + PortDefinition(port = 9000, name = Some("foo")), + PortDefinition(port = 9001, name = Some("bar")) + ) ) shouldNotViolate( - app, - "/portDefinitions", - "Port names must be unique." + app, + "/portDefinitions", + "Port names must be unique." ) app = correct.copy(executor = "//cmd") shouldNotViolate( - app, - "/executor", - "{javax.validation.constraints.Pattern.message}" + app, + "/executor", + "{javax.validation.constraints.Pattern.message}" ) MarathonTestHelper.validateJsonSchema(app) app = correct.copy(executor = "some/relative/path.mte") shouldNotViolate( - app, - "/executor", - "{javax.validation.constraints.Pattern.message}" + app, + "/executor", + "{javax.validation.constraints.Pattern.message}" ) MarathonTestHelper.validateJsonSchema(app) app = correct.copy(executor = "/some/absolute/path") shouldNotViolate( - app, - "/executor", - "{javax.validation.constraints.Pattern.message}" + app, + "/executor", + "{javax.validation.constraints.Pattern.message}" ) MarathonTestHelper.validateJsonSchema(app) app = correct.copy(executor = "") shouldNotViolate( - app, - "/executor", - "{javax.validation.constraints.Pattern.message}" + app, + "/executor", + "{javax.validation.constraints.Pattern.message}" ) MarathonTestHelper.validateJsonSchema(app) app = correct.copy(executor = "/test/") shouldViolate( - app, - "/executor", - "must fully match regular expression '^(//cmd)|(/?[^/]+(/[^/]+)*)|$'" + app, + "/executor", + "must fully match regular expression '^(//cmd)|(/?[^/]+(/[^/]+)*)|$'" ) MarathonTestHelper.validateJsonSchema(app, false) app = correct.copy(executor = "/test//path") shouldViolate( - app, - "/executor", - "must fully match regular expression '^(//cmd)|(/?[^/]+(/[^/]+)*)|$'" + app, + "/executor", + "must fully match regular expression '^(//cmd)|(/?[^/]+(/[^/]+)*)|$'" ) MarathonTestHelper.validateJsonSchema(app, false) app = correct.copy(cmd = Some("command"), args = Some(Seq("a", "b", "c"))) shouldViolate( - app, - "/", - "AppDefinition must either contain one of 'cmd' or 'args', and/or a 'container'." + app, + "/", + "AppDefinition must either contain one of 'cmd' or 'args', and/or a 'container'." ) MarathonTestHelper.validateJsonSchema(app, false) app = correct.copy(cmd = None, args = Some(Seq("a", "b", "c"))) shouldNotViolate( - app, - "/", - "AppDefinition must either contain one of 'cmd' or 'args', and/or a 'container'." + app, + "/", + "AppDefinition must either contain one of 'cmd' or 'args', and/or a 'container'." ) MarathonTestHelper.validateJsonSchema(app) app = correct.copy(upgradeStrategy = UpgradeStrategy(1.2)) shouldViolate( - app, - "/upgradeStrategy/minimumHealthCapacity", - "got 1.2, expected between 0.0 and 1.0" + app, + "/upgradeStrategy/minimumHealthCapacity", + "got 1.2, expected between 0.0 and 1.0" ) MarathonTestHelper.validateJsonSchema(app, false) app = correct.copy(upgradeStrategy = UpgradeStrategy(0.5, 1.2)) shouldViolate( - app, - "/upgradeStrategy/maximumOverCapacity", - "got 1.2, expected between 0.0 and 1.0" + app, + "/upgradeStrategy/maximumOverCapacity", + "got 1.2, expected between 0.0 and 1.0" ) MarathonTestHelper.validateJsonSchema(app, false) app = correct.copy(upgradeStrategy = UpgradeStrategy(-1.2)) shouldViolate( - app, - "/upgradeStrategy/minimumHealthCapacity", - "got -1.2, expected between 0.0 and 1.0" + app, + "/upgradeStrategy/minimumHealthCapacity", + "got -1.2, expected between 0.0 and 1.0" ) MarathonTestHelper.validateJsonSchema(app, false) app = correct.copy(upgradeStrategy = UpgradeStrategy(0.5, -1.2)) shouldViolate( - app, - "/upgradeStrategy/maximumOverCapacity", - "got -1.2, expected between 0.0 and 1.0" + app, + "/upgradeStrategy/maximumOverCapacity", + "got -1.2, expected between 0.0 and 1.0" ) MarathonTestHelper.validateJsonSchema(app, false) app = correct.copy( - container = Some( - Container( - docker = Some(Docker( - network = Some( - mesos.ContainerInfo.DockerInfo.Network.BRIDGE), - portMappings = Some(Seq( - Docker.PortMapping(8080, 0, 0, "tcp"), - Docker.PortMapping(8081, 0, 0, "tcp") - )) - )) - )), - portDefinitions = Nil, - healthChecks = Set(HealthCheck(portIndex = Some(1))) + container = Some( + Container( + docker = Some( + Docker( + network = Some(mesos.ContainerInfo.DockerInfo.Network.BRIDGE), + portMappings = Some( + Seq( + Docker.PortMapping(8080, 0, 0, "tcp"), + Docker.PortMapping(8081, 0, 0, "tcp") + )) + )) + )), + portDefinitions = Nil, + healthChecks = Set(HealthCheck(portIndex = Some(1))) ) shouldNotViolate( - app, - "/healthCecks(0)", - "Health check port indices must address an element of the ports array or container port mappings." + app, + "/healthCecks(0)", + "Health check port indices must address an element of the ports array or container port mappings." ) MarathonTestHelper.validateJsonSchema(app, false) // missing image app = correct.copy( - container = Some( - Container( - docker = Some(Docker( - network = Some( - mesos.ContainerInfo.DockerInfo.Network.BRIDGE), - portMappings = None - )) - )), - portDefinitions = Nil, - healthChecks = Set(HealthCheck(protocol = Protocol.COMMAND)) + container = Some( + Container( + docker = Some( + Docker( + network = Some(mesos.ContainerInfo.DockerInfo.Network.BRIDGE), + portMappings = None + )) + )), + portDefinitions = Nil, + healthChecks = Set(HealthCheck(protocol = Protocol.COMMAND)) ) shouldNotViolate( - app, - "/healthChecks(0)", - "Health check port indices must address an element of the ports array or container port mappings." + app, + "/healthChecks(0)", + "Health check port indices must address an element of the ports array or container port mappings." ) MarathonTestHelper.validateJsonSchema(app, false) // missing image app = correct.copy( - healthChecks = Set(HealthCheck(portIndex = Some(1))) + healthChecks = Set(HealthCheck(portIndex = Some(1))) ) shouldViolate( - app, - "/healthChecks(0)", - "Health check port indices must address an element of the ports array or container port mappings." + app, + "/healthChecks(0)", + "Health check port indices must address an element of the ports array or container port mappings." ) MarathonTestHelper.validateJsonSchema(app) app = correct.copy( - fetch = Seq(FetchUri(uri = "http://example.com/valid"), - FetchUri(uri = "d://\not-a-uri")) + fetch = Seq(FetchUri(uri = "http://example.com/valid"), + FetchUri(uri = "d://\not-a-uri")) ) shouldViolate( - app, - "/fetch(1)", - "URI has invalid syntax." + app, + "/fetch(1)", + "URI has invalid syntax." ) MarathonTestHelper.validateJsonSchema(app) app = correct.copy( - fetch = Seq(FetchUri(uri = "http://example.com/valid"), - FetchUri(uri = "/root/file")) + fetch = Seq(FetchUri(uri = "http://example.com/valid"), + FetchUri(uri = "/root/file")) ) shouldNotViolate(app, "/fetch(1)", "URI has invalid syntax.") - shouldViolate( - app.copy(mem = -3.0), "/mem", "got -3.0, expected 0.0 or more") - shouldViolate( - app.copy(cpus = -3.0), "/cpus", "got -3.0, expected 0.0 or more") - shouldViolate( - app.copy(disk = -3.0), "/disk", "got -3.0, expected 0.0 or more") - shouldViolate( - app.copy(instances = -3), "/instances", "got -3, expected 0 or more") + shouldViolate(app.copy(mem = -3.0), + "/mem", + "got -3.0, expected 0.0 or more") + shouldViolate(app.copy(cpus = -3.0), + "/cpus", + "got -3.0, expected 0.0 or more") + shouldViolate(app.copy(disk = -3.0), + "/disk", + "got -3.0, expected 0.0 or more") + shouldViolate(app.copy(instances = -3), + "/instances", + "got -3, expected 0 or more") } test("SerializationRoundtrip empty") { @@ -389,34 +394,34 @@ class AppDefinitionTest extends MarathonSpec with Matchers { import Formats._ val app3 = AppDefinition( - id = PathId("/prod/product/frontend/my-app"), - cmd = Some("sleep 30"), - user = Some("nobody"), - env = Map("key1" -> "value1", "key2" -> "value2"), - instances = 5, - cpus = 5.0, - mem = 55.0, - disk = 550.0, - executor = "", - constraints = Set( - Constraint.newBuilder - .setField("attribute") - .setOperator(Constraint.Operator.GROUP_BY) - .setValue("value") - .build - ), - storeUrls = Seq("http://my.org.com/artifacts/foo.bar"), - portDefinitions = PortDefinitions(9001, 9002), - requirePorts = true, - backoff = 5.seconds, - backoffFactor = 1.5, - maxLaunchDelay = 3.minutes, - container = Some( - Container(docker = Some(Container.Docker("group/image"))) - ), - healthChecks = Set(HealthCheck(portIndex = Some(0))), - dependencies = Set(PathId("/prod/product/backend")), - upgradeStrategy = UpgradeStrategy(minimumHealthCapacity = 0.75) + id = PathId("/prod/product/frontend/my-app"), + cmd = Some("sleep 30"), + user = Some("nobody"), + env = Map("key1" -> "value1", "key2" -> "value2"), + instances = 5, + cpus = 5.0, + mem = 55.0, + disk = 550.0, + executor = "", + constraints = Set( + Constraint.newBuilder + .setField("attribute") + .setOperator(Constraint.Operator.GROUP_BY) + .setValue("value") + .build + ), + storeUrls = Seq("http://my.org.com/artifacts/foo.bar"), + portDefinitions = PortDefinitions(9001, 9002), + requirePorts = true, + backoff = 5.seconds, + backoffFactor = 1.5, + maxLaunchDelay = 3.minutes, + container = Some( + Container(docker = Some(Container.Docker("group/image"))) + ), + healthChecks = Set(HealthCheck(portIndex = Some(0))), + dependencies = Set(PathId("/prod/product/backend")), + upgradeStrategy = UpgradeStrategy(minimumHealthCapacity = 0.75) ) JsonTestHelper.assertSerializationRoundtripWorks(app3) } @@ -425,10 +430,10 @@ class AppDefinitionTest extends MarathonSpec with Matchers { import Formats._ val app3 = AppDefinition( - id = PathId("/prod/product/frontend/my-app"), - cmd = Some("sleep 30"), - portDefinitions = PortDefinitions(9001, 9002), - healthChecks = Set(HealthCheck(portIndex = Some(1))) + id = PathId("/prod/product/frontend/my-app"), + cmd = Some("sleep 30"), + portDefinitions = PortDefinitions(9001, 9002), + healthChecks = Set(HealthCheck(portIndex = Some(1))) ) JsonTestHelper.assertSerializationRoundtripWorks(app3) } @@ -437,10 +442,10 @@ class AppDefinitionTest extends MarathonSpec with Matchers { import Formats._ val app = AppDefinition( - id = PathId("/prod/product/frontend/my-app"), - cmd = Some("sleep 30"), - portDefinitions = PortDefinitions(9001, 9002), - healthChecks = Set(HealthCheck()) + id = PathId("/prod/product/frontend/my-app"), + cmd = Some("sleep 30"), + portDefinitions = PortDefinitions(9001, 9002), + healthChecks = Set(HealthCheck()) ) val json = Json.toJson(app) @@ -454,10 +459,10 @@ class AppDefinitionTest extends MarathonSpec with Matchers { import Formats._ val app = AppDefinition( - id = PathId("/prod/product/frontend/my-app"), - cmd = Some("sleep 30"), - portDefinitions = Seq.empty, - healthChecks = Set(HealthCheck()) + id = PathId("/prod/product/frontend/my-app"), + cmd = Some("sleep 30"), + portDefinitions = Seq.empty, + healthChecks = Set(HealthCheck()) ) val json = Json.toJson(app) @@ -468,25 +473,25 @@ class AppDefinitionTest extends MarathonSpec with Matchers { } test( - "Reading AppDefinition adds portIndex if you have at least one portMapping") { + "Reading AppDefinition adds portIndex if you have at least one portMapping") { import Formats._ val app = AppDefinition( - id = PathId("/prod/product/frontend/my-app"), - cmd = Some("sleep 30"), - portDefinitions = Seq.empty, - container = Some( - Container( - docker = Some( - Docker( - portMappings = Some( - Seq(Docker.PortMapping()) - ) - ) - ) + id = PathId("/prod/product/frontend/my-app"), + cmd = Some("sleep 30"), + portDefinitions = Seq.empty, + container = Some( + Container( + docker = Some( + Docker( + portMappings = Some( + Seq(Docker.PortMapping()) ) - ), - healthChecks = Set(HealthCheck()) + ) + ) + ) + ), + healthChecks = Set(HealthCheck()) ) val json = Json.toJson(app) @@ -497,23 +502,23 @@ class AppDefinitionTest extends MarathonSpec with Matchers { } test( - "Reading AppDefinition does not add portIndex if there are no ports nor portMappings") { + "Reading AppDefinition does not add portIndex if there are no ports nor portMappings") { import Formats._ val app = AppDefinition( - id = PathId("/prod/product/frontend/my-app"), - cmd = Some("sleep 30"), - portDefinitions = Seq.empty, - container = Some( - Container( - docker = Some( - Docker( - portMappings = Some(Seq.empty) - ) - ) - ) - ), - healthChecks = Set(HealthCheck()) + id = PathId("/prod/product/frontend/my-app"), + cmd = Some("sleep 30"), + portDefinitions = Seq.empty, + container = Some( + Container( + docker = Some( + Docker( + portMappings = Some(Seq.empty) + ) + ) + ) + ), + healthChecks = Set(HealthCheck()) ) val json = Json.toJson(app) @@ -528,21 +533,23 @@ class AppDefinitionTest extends MarathonSpec with Matchers { import org.apache.mesos.Protos.ContainerInfo.DockerInfo.Network val app4 = AppDefinition( - id = "bridged-webapp".toPath, - cmd = Some("python3 -m http.server 8080"), - container = Some( - Container( - docker = Some(Docker( - image = "python:3", - network = Some(Network.BRIDGE), - portMappings = Some(Seq( - PortMapping(containerPort = 8080, - hostPort = 0, - servicePort = 9000, - protocol = "tcp") - )) - )) - )) + id = "bridged-webapp".toPath, + cmd = Some("python3 -m http.server 8080"), + container = Some( + Container( + docker = Some( + Docker( + image = "python:3", + network = Some(Network.BRIDGE), + portMappings = Some( + Seq( + PortMapping(containerPort = 8080, + hostPort = 0, + servicePort = 9000, + protocol = "tcp") + )) + )) + )) ) val json4 = @@ -570,18 +577,18 @@ class AppDefinitionTest extends MarathonSpec with Matchers { test("Read app with fetch definition") { val app = AppDefinition( - id = "app-with-fetch".toPath, - cmd = Some("brew update"), - fetch = Seq( - new FetchUri(uri = "http://example.com/file1", - executable = false, - extract = true, - cache = true), - new FetchUri(uri = "http://example.com/file2", - executable = true, - extract = false, - cache = false) - ) + id = "app-with-fetch".toPath, + cmd = Some("brew update"), + fetch = Seq( + new FetchUri(uri = "http://example.com/file1", + executable = false, + extract = true, + cache = true), + new FetchUri(uri = "http://example.com/file2", + executable = true, + extract = false, + cache = false) + ) ) val json = """ @@ -629,18 +636,18 @@ class AppDefinitionTest extends MarathonSpec with Matchers { test("Serialize deserialize path with fetch") { val app = AppDefinition( - id = "app-with-fetch".toPath, - cmd = Some("brew update"), - fetch = Seq( - new FetchUri(uri = "http://example.com/file1", - executable = false, - extract = true, - cache = true), - new FetchUri(uri = "http://example.com/file2", - executable = true, - extract = false, - cache = false) - ) + id = "app-with-fetch".toPath, + cmd = Some("brew update"), + fetch = Seq( + new FetchUri(uri = "http://example.com/file1", + executable = false, + extract = true, + cache = true), + new FetchUri(uri = "http://example.com/file2", + executable = true, + extract = false, + cache = false) + ) ) val proto = app.toProto @@ -660,23 +667,21 @@ class AppDefinitionTest extends MarathonSpec with Matchers { test("Read app with ip address and discovery info") { val app = AppDefinition( - id = "app-with-ip-address".toPath, - cmd = Some("python3 -m http.server 8080"), - portDefinitions = Nil, - ipAddress = Some( - IpAddress( - groups = Seq("a", "b", "c"), - labels = Map( - "foo" -> "bar", - "baz" -> "buzz" - ), - discoveryInfo = DiscoveryInfo( - ports = Seq(Port(name = "http", - number = 80, - protocol = "tcp")) - ) - )), - maxLaunchDelay = 3600.seconds + id = "app-with-ip-address".toPath, + cmd = Some("python3 -m http.server 8080"), + portDefinitions = Nil, + ipAddress = Some( + IpAddress( + groups = Seq("a", "b", "c"), + labels = Map( + "foo" -> "bar", + "baz" -> "buzz" + ), + discoveryInfo = DiscoveryInfo( + ports = Seq(Port(name = "http", number = 80, protocol = "tcp")) + ) + )), + maxLaunchDelay = 3600.seconds ) val json = """ @@ -706,19 +711,19 @@ class AppDefinitionTest extends MarathonSpec with Matchers { test("Read app with ip address without discovery info") { val app = AppDefinition( - id = "app-with-ip-address".toPath, - cmd = Some("python3 -m http.server 8080"), - portDefinitions = Nil, - ipAddress = Some( - IpAddress( - groups = Seq("a", "b", "c"), - labels = Map( - "foo" -> "bar", - "baz" -> "buzz" - ), - discoveryInfo = DiscoveryInfo.empty - )), - maxLaunchDelay = 3600.seconds + id = "app-with-ip-address".toPath, + cmd = Some("python3 -m http.server 8080"), + portDefinitions = Nil, + ipAddress = Some( + IpAddress( + groups = Seq("a", "b", "c"), + labels = Map( + "foo" -> "bar", + "baz" -> "buzz" + ), + discoveryInfo = DiscoveryInfo.empty + )), + maxLaunchDelay = 3600.seconds ) val json = """ @@ -742,10 +747,10 @@ class AppDefinitionTest extends MarathonSpec with Matchers { test("Read app with ip address and an empty ports list") { val app = AppDefinition( - id = "app-with-network-isolation".toPath, - cmd = Some("python3 -m http.server 8080"), - portDefinitions = Nil, - ipAddress = Some(IpAddress()) + id = "app-with-network-isolation".toPath, + cmd = Some("python3 -m http.server 8080"), + portDefinitions = Nil, + ipAddress = Some(IpAddress()) ) val json = """ @@ -780,8 +785,9 @@ class AppDefinitionTest extends MarathonSpec with Matchers { import Formats._ val result = Json.fromJson[AppDefinition](Json.parse(json)) - assert(result == JsError(ValidationError( - "You cannot specify both an IP address and ports"))) + assert( + result == JsError( + ValidationError("You cannot specify both an IP address and ports"))) } test("App may not have both uris and fetch") { @@ -795,16 +801,18 @@ class AppDefinitionTest extends MarathonSpec with Matchers { import Formats._ val result = Json.fromJson[AppDefinition](Json.parse(json)) - assert(result == JsError( - ValidationError("You cannot specify both uris and fetch fields"))) + assert( + result == JsError( + ValidationError("You cannot specify both uris and fetch fields"))) } test("Residency serialization (toProto) and deserialization (fromProto)") { val app = AppDefinition( - id = "/test".toRootPath, - residency = Some(Residency( - relaunchEscalationTimeoutSeconds = 3600, - taskLostBehavior = Protos.ResidencyDefinition.TaskLostBehavior.WAIT_FOREVER))) + id = "/test".toRootPath, + residency = Some( + Residency(relaunchEscalationTimeoutSeconds = 3600, + taskLostBehavior = + Protos.ResidencyDefinition.TaskLostBehavior.WAIT_FOREVER))) val proto = app.toProto proto.hasResidency shouldBe true diff --git a/repos/marathon/src/test/scala/mesosphere/marathon/core/appinfo/TaskStatsByVersionTest.scala b/repos/marathon/src/test/scala/mesosphere/marathon/core/appinfo/TaskStatsByVersionTest.scala index fbd32ceb053..92bcc54517c 100644 --- a/repos/marathon/src/test/scala/mesosphere/marathon/core/appinfo/TaskStatsByVersionTest.scala +++ b/repos/marathon/src/test/scala/mesosphere/marathon/core/appinfo/TaskStatsByVersionTest.scala @@ -111,7 +111,8 @@ class TaskStatsByVersionTest version: Timestamp, startingDelay: FiniteDuration): Task = { val startedAt = (version + startingDelay).toDateTime.getMillis - MarathonTestHelper - .runningTask(newTaskId(), appVersion = version, startedAt = startedAt) + MarathonTestHelper.runningTask(newTaskId(), + appVersion = version, + startedAt = startedAt) } } diff --git a/repos/marathon/src/test/scala/mesosphere/marathon/core/launcher/impl/OfferProcessorImplTest.scala b/repos/marathon/src/test/scala/mesosphere/marathon/core/launcher/impl/OfferProcessorImplTest.scala index f386bfa057b..8ff55f88fe5 100644 --- a/repos/marathon/src/test/scala/mesosphere/marathon/core/launcher/impl/OfferProcessorImplTest.scala +++ b/repos/marathon/src/test/scala/mesosphere/marathon/core/launcher/impl/OfferProcessorImplTest.scala @@ -233,8 +233,7 @@ class OfferProcessorImplTest val deadline: Timestamp = clock.now() + 1.second And("a cooperative taskLauncher") - taskLauncher - .acceptOffer(offerId, tasksWithSource.map(_.op).take(1)) returns true + taskLauncher.acceptOffer(offerId, tasksWithSource.map(_.op).take(1)) returns true And("a cooperative offerMatcher") offerMatcher.matchOffer(deadline, offer) returns Future.successful( diff --git a/repos/marathon/src/test/scala/mesosphere/marathon/core/launchqueue/impl/AppTaskLauncherActorTest.scala b/repos/marathon/src/test/scala/mesosphere/marathon/core/launchqueue/impl/AppTaskLauncherActorTest.scala index 366e2d40c0f..66d507f7bce 100644 --- a/repos/marathon/src/test/scala/mesosphere/marathon/core/launchqueue/impl/AppTaskLauncherActorTest.scala +++ b/repos/marathon/src/test/scala/mesosphere/marathon/core/launchqueue/impl/AppTaskLauncherActorTest.scala @@ -170,9 +170,10 @@ class AppTaskLauncherActorTest extends MarathonSpec with GivenWhenThen { launcherRef ! RateLimiterActor.DelayUpdate(app, clock.now()) Await - .result(launcherRef ? ActorOfferMatcher - .MatchOffer(clock.now() + 1.seconds, offer), - 3.seconds) + .result( + launcherRef ? ActorOfferMatcher.MatchOffer(clock.now() + 1.seconds, + offer), + 3.seconds) .asInstanceOf[MatchedTaskOps] val counts = Await @@ -204,9 +205,10 @@ class AppTaskLauncherActorTest extends MarathonSpec with GivenWhenThen { launcherRef ! RateLimiterActor.DelayUpdate(app, clock.now()) val matched = Await - .result(launcherRef ? ActorOfferMatcher - .MatchOffer(clock.now() + 1.seconds, offer), - 3.seconds) + .result( + launcherRef ? ActorOfferMatcher.MatchOffer(clock.now() + 1.seconds, + offer), + 3.seconds) .asInstanceOf[MatchedTaskOps] val testProbe = TestProbe() @@ -236,9 +238,10 @@ class AppTaskLauncherActorTest extends MarathonSpec with GivenWhenThen { launcherRef ! RateLimiterActor.DelayUpdate(app, clock.now()) val matchedTasks = Await - .result(launcherRef ? ActorOfferMatcher - .MatchOffer(clock.now() + 1.seconds, offer), - 3.seconds) + .result( + launcherRef ? ActorOfferMatcher.MatchOffer(clock.now() + 1.seconds, + offer), + 3.seconds) .asInstanceOf[MatchedTaskOps] matchedTasks.opsWithSource.foreach(_.reject("stuff")) @@ -293,17 +296,19 @@ class AppTaskLauncherActorTest extends MarathonSpec with GivenWhenThen { launcherRef ! RateLimiterActor.DelayUpdate(app, clock.now()) Await - .result(launcherRef ? ActorOfferMatcher - .MatchOffer(clock.now() + 1.seconds, offer), - 3.seconds) + .result( + launcherRef ? ActorOfferMatcher.MatchOffer(clock.now() + 1.seconds, + offer), + 3.seconds) .asInstanceOf[MatchedTaskOps] // just make sure that prior messages have been processed, will not launch further tasks Await - .result(launcherRef ? ActorOfferMatcher - .MatchOffer(clock.now() + 1.seconds, offer), - 3.seconds) + .result( + launcherRef ? ActorOfferMatcher.MatchOffer(clock.now() + 1.seconds, + offer), + 3.seconds) .asInstanceOf[MatchedTaskOps] assert(scheduleCalled) @@ -327,9 +332,10 @@ class AppTaskLauncherActorTest extends MarathonSpec with GivenWhenThen { launcherRef ! RateLimiterActor.DelayUpdate(app, clock.now()) val matchedTasks = Await - .result(launcherRef ? ActorOfferMatcher - .MatchOffer(clock.now() + 1.seconds, offer), - 3.seconds) + .result( + launcherRef ? ActorOfferMatcher.MatchOffer(clock.now() + 1.seconds, + offer), + 3.seconds) .asInstanceOf[MatchedTaskOps] matchedTasks.opsWithSource.foreach(_.accept()) diff --git a/repos/marathon/src/test/scala/mesosphere/marathon/core/task/tracker/impl/TaskOpProcessorImplTest.scala b/repos/marathon/src/test/scala/mesosphere/marathon/core/task/tracker/impl/TaskOpProcessorImplTest.scala index f7bd89fabe6..8f811e04969 100644 --- a/repos/marathon/src/test/scala/mesosphere/marathon/core/task/tracker/impl/TaskOpProcessorImplTest.scala +++ b/repos/marathon/src/test/scala/mesosphere/marathon/core/task/tracker/impl/TaskOpProcessorImplTest.scala @@ -302,8 +302,10 @@ class TaskOpProcessorImplTest When("the processor processes an update") val result = f.processor.process( - TaskOpProcessor - .Operation(deadline, testActor, taskId, TaskOpProcessor.Action.Expunge) + TaskOpProcessor.Operation(deadline, + testActor, + taskId, + TaskOpProcessor.Action.Expunge) ) Then("it replies with unit immediately") diff --git a/repos/marathon/src/test/scala/mesosphere/marathon/core/task/tracker/impl/TaskTrackerActorTest.scala b/repos/marathon/src/test/scala/mesosphere/marathon/core/task/tracker/impl/TaskTrackerActorTest.scala index b568eb96b3c..bccc0e90084 100644 --- a/repos/marathon/src/test/scala/mesosphere/marathon/core/task/tracker/impl/TaskTrackerActorTest.scala +++ b/repos/marathon/src/test/scala/mesosphere/marathon/core/task/tracker/impl/TaskTrackerActorTest.scala @@ -172,9 +172,10 @@ class TaskTrackerActorTest val stagedTaskNowRunning = MarathonTestHelper.runningTaskProto(stagedTask.getId) val taskState = TaskSerializer.fromProto(stagedTaskNowRunning) - probe.send(f.taskTrackerActor, - TaskTrackerActor - .TaskUpdated(taskState, TaskTrackerActor.Ack(probe.ref, ()))) + probe.send( + f.taskTrackerActor, + TaskTrackerActor.TaskUpdated(taskState, + TaskTrackerActor.Ack(probe.ref, ()))) probe.expectMsg(()) Then("it will have set the correct metric counts") @@ -199,9 +200,10 @@ class TaskTrackerActorTest val probe = TestProbe() val newTask = MarathonTestHelper.stagedTaskProto(appId) val taskState = TaskSerializer.fromProto(newTask) - probe.send(f.taskTrackerActor, - TaskTrackerActor - .TaskUpdated(taskState, TaskTrackerActor.Ack(probe.ref, ()))) + probe.send( + f.taskTrackerActor, + TaskTrackerActor.TaskUpdated(taskState, + TaskTrackerActor.Ack(probe.ref, ()))) probe.expectMsg(()) Then("it will have set the correct metric counts") diff --git a/repos/marathon/src/test/scala/mesosphere/marathon/core/task/update/impl/TaskStatusUpdateProcessorImplTest.scala b/repos/marathon/src/test/scala/mesosphere/marathon/core/task/update/impl/TaskStatusUpdateProcessorImplTest.scala index af9f20181a3..7860eaf1f36 100644 --- a/repos/marathon/src/test/scala/mesosphere/marathon/core/task/update/impl/TaskStatusUpdateProcessorImplTest.scala +++ b/repos/marathon/src/test/scala/mesosphere/marathon/core/task/update/impl/TaskStatusUpdateProcessorImplTest.scala @@ -196,8 +196,8 @@ class TaskStatusUpdateProcessorImplTest lazy val task = MarathonTestHelper .makeOneCPUTask(Task.Id.forApp(appId).mesosTaskId.getValue) .build() - lazy val taskState = MarathonTestHelper - .stagedTask(task.getTaskId.getValue, appVersion = version) + lazy val taskState = MarathonTestHelper.stagedTask(task.getTaskId.getValue, + appVersion = version) lazy val marathonTask = taskState.marathonTask after { diff --git a/repos/marathon/src/test/scala/mesosphere/marathon/integration/GroupDeployIntegrationTest.scala b/repos/marathon/src/test/scala/mesosphere/marathon/integration/GroupDeployIntegrationTest.scala index 8d7b4240be0..a60fa0bd5e9 100644 --- a/repos/marathon/src/test/scala/mesosphere/marathon/integration/GroupDeployIntegrationTest.scala +++ b/repos/marathon/src/test/scala/mesosphere/marathon/integration/GroupDeployIntegrationTest.scala @@ -152,8 +152,9 @@ class GroupDeployIntegrationTest When("The group is updated") check.afterDelay(1.second, state = false) check.afterDelay(3.seconds, state = true) - val update = marathon - .updateGroup(id, group.copy(apps = Some(Set(appProxy(appId, "v2", 1))))) + val update = marathon.updateGroup( + id, + group.copy(apps = Some(Set(appProxy(appId, "v2", 1))))) Then("A success event is send and the application has been started") waitForChange(update) @@ -208,8 +209,9 @@ class GroupDeployIntegrationTest When("The new application is not healthy") val v2Check = appProxyCheck(appId, "v2", state = false) //will always fail - val update = marathon - .updateGroup(id, group.copy(apps = Some(Set(appProxy(appId, "v2", 2))))) + val update = marathon.updateGroup( + id, + group.copy(apps = Some(Set(appProxy(appId, "v2", 2))))) Then("All v1 applications are kept alive") v1Check.healthy @@ -231,12 +233,14 @@ class GroupDeployIntegrationTest val create = marathon.createGroup(group) waitForChange(create) appProxyCheck(appId, "v2", state = false) //will always fail - marathon - .updateGroup(id, group.copy(apps = Some(Set(appProxy(appId, "v2", 2))))) + marathon.updateGroup( + id, + group.copy(apps = Some(Set(appProxy(appId, "v2", 2))))) When("Another upgrade is triggered, while the old one is not completed") - val result = marathon - .updateGroup(id, group.copy(apps = Some(Set(appProxy(appId, "v3", 2))))) + val result = marathon.updateGroup( + id, + group.copy(apps = Some(Set(appProxy(appId, "v3", 2))))) Then("An error is indicated") result.code should be(HttpStatus.SC_CONFLICT) diff --git a/repos/marathon/src/test/scala/mesosphere/marathon/tasks/ResourceUtilTest.scala b/repos/marathon/src/test/scala/mesosphere/marathon/tasks/ResourceUtilTest.scala index 99ff2d97974..65f4360c3f0 100644 --- a/repos/marathon/src/test/scala/mesosphere/marathon/tasks/ResourceUtilTest.scala +++ b/repos/marathon/src/test/scala/mesosphere/marathon/tasks/ResourceUtilTest.scala @@ -65,8 +65,11 @@ class ResourceUtilTest .newBuilder() .setPersistence(Persistence.newBuilder().setId("persistenceId")) .build() - val resourceWithReservation = MTH - .scalarResource("disk", 1024, "role", Some(reservationInfo), Some(disk)) + val resourceWithReservation = MTH.scalarResource("disk", + 1024, + "role", + Some(reservationInfo), + Some(disk)) val resourceWithoutReservation = MTH.scalarResource("disk", 1024, "role", None, None) @@ -198,8 +201,11 @@ class ResourceUtilTest .newBuilder() .setPersistence(Persistence.newBuilder().setId("persistenceId")) .build() - val resource = MTH - .scalarResource("disk", 1024, "role", Some(reservationInfo), Some(disk)) + val resource = MTH.scalarResource("disk", + 1024, + "role", + Some(reservationInfo), + Some(disk)) val resourceString = ResourceUtil.displayResources(Seq(resource), maxRanges = 10) resourceString should equal( diff --git a/repos/marathon/src/test/scala/mesosphere/marathon/tasks/TaskOpFactoryImplTest.scala b/repos/marathon/src/test/scala/mesosphere/marathon/tasks/TaskOpFactoryImplTest.scala index 9e8db1599b2..644e375a5de 100644 --- a/repos/marathon/src/test/scala/mesosphere/marathon/tasks/TaskOpFactoryImplTest.scala +++ b/repos/marathon/src/test/scala/mesosphere/marathon/tasks/TaskOpFactoryImplTest.scala @@ -201,8 +201,8 @@ class TaskOpFactoryImplTest class Fixture { import mesosphere.marathon.{MarathonTestHelper => MTH} val taskTracker = mock[TaskTracker] - val config: MarathonConf = MTH - .defaultConfig(mesosRole = Some("test"), principal = Some("principal")) + val config: MarathonConf = MTH.defaultConfig(mesosRole = Some("test"), + principal = Some("principal")) val clock = ConstantClock() val taskOpFactory: TaskOpFactory = new TaskOpFactoryImpl(config, clock) diff --git a/repos/marathon/src/test/scala/mesosphere/marathon/upgrade/DeploymentActorTest.scala b/repos/marathon/src/test/scala/mesosphere/marathon/upgrade/DeploymentActorTest.scala index c7aeeb06230..c6333ba2c98 100644 --- a/repos/marathon/src/test/scala/mesosphere/marathon/upgrade/DeploymentActorTest.scala +++ b/repos/marathon/src/test/scala/mesosphere/marathon/upgrade/DeploymentActorTest.scala @@ -72,10 +72,12 @@ class DeploymentActorTest val targetGroup = Group(PathId("/foo/bar"), Set(app1New, app2New, app3)) // setting started at to 0 to make sure this survives - val task1_1 = MarathonTestHelper - .runningTask("task1_1", appVersion = app1.version, startedAt = 0) - val task1_2 = MarathonTestHelper - .runningTask("task1_2", appVersion = app1.version, startedAt = 1000) + val task1_1 = MarathonTestHelper.runningTask("task1_1", + appVersion = app1.version, + startedAt = 0) + val task1_2 = MarathonTestHelper.runningTask("task1_2", + appVersion = app1.version, + startedAt = 1000) val task2_1 = MarathonTestHelper.runningTask("task2_1", appVersion = app2.version) val task3_1 = @@ -231,10 +233,12 @@ class DeploymentActorTest val targetGroup = Group(PathId("/foo/bar"), Set(appNew)) - val task1_1 = MarathonTestHelper - .runningTask("task1_1", appVersion = app.version, startedAt = 0) - val task1_2 = MarathonTestHelper - .runningTask("task1_2", appVersion = app.version, startedAt = 1000) + val task1_1 = MarathonTestHelper.runningTask("task1_1", + appVersion = app.version, + startedAt = 0) + val task1_2 = MarathonTestHelper.runningTask("task1_2", + appVersion = app.version, + startedAt = 1000) when(tracker.appTasksLaunchedSync(app.id)) .thenReturn(Set(task1_1, task1_2)) @@ -386,12 +390,15 @@ class DeploymentActorTest val targetGroup = Group(PathId("/foo/bar"), Set(app1New)) - val task1_1 = MarathonTestHelper - .runningTask("task1_1", appVersion = app1.version, startedAt = 0) - val task1_2 = MarathonTestHelper - .runningTask("task1_2", appVersion = app1.version, startedAt = 500) - val task1_3 = MarathonTestHelper - .runningTask("task1_3", appVersion = app1.version, startedAt = 1000) + val task1_1 = MarathonTestHelper.runningTask("task1_1", + appVersion = app1.version, + startedAt = 0) + val task1_2 = MarathonTestHelper.runningTask("task1_2", + appVersion = app1.version, + startedAt = 500) + val task1_3 = MarathonTestHelper.runningTask("task1_3", + appVersion = app1.version, + startedAt = 1000) val plan = DeploymentPlan(original = origGroup, target = targetGroup, diff --git a/repos/marathon/src/test/scala/mesosphere/marathon/upgrade/DeploymentPlanRevertTest.scala b/repos/marathon/src/test/scala/mesosphere/marathon/upgrade/DeploymentPlanRevertTest.scala index 84296e0dfe8..fcd7921a152 100644 --- a/repos/marathon/src/test/scala/mesosphere/marathon/upgrade/DeploymentPlanRevertTest.scala +++ b/repos/marathon/src/test/scala/mesosphere/marathon/upgrade/DeploymentPlanRevertTest.scala @@ -118,8 +118,9 @@ class DeploymentPlanRevertTest When("we remove an app and try to revert that without concurrent changes") val appId = "/changeme/app1".toRootPath - val target = original - .update(appId.parent, _.removeApplication(appId), Timestamp.now()) + val target = original.update(appId.parent, + _.removeApplication(appId), + Timestamp.now()) target.app(appId) should be('empty) val plan = DeploymentPlan(original, target) val revertToOriginal = plan.revert(target) diff --git a/repos/marathon/src/test/scala/mesosphere/marathon/upgrade/GroupVersioningUtilTest.scala b/repos/marathon/src/test/scala/mesosphere/marathon/upgrade/GroupVersioningUtilTest.scala index ce086575abe..d954f3090ab 100644 --- a/repos/marathon/src/test/scala/mesosphere/marathon/upgrade/GroupVersioningUtilTest.scala +++ b/repos/marathon/src/test/scala/mesosphere/marathon/upgrade/GroupVersioningUtilTest.scala @@ -54,24 +54,30 @@ class GroupVersioningUtilTest test("No changes for empty group") { When("Calculating version infos for an empty group") - val updated = GroupVersioningUtil - .updateVersionInfoForChangedApps(Timestamp(10), emptyGroup, emptyGroup) + val updated = + GroupVersioningUtil.updateVersionInfoForChangedApps(Timestamp(10), + emptyGroup, + emptyGroup) Then("nothing is changed") updated should be(emptyGroup) } test("No changes for nested app") { When("Calculating version infos with no changes") - val updated = GroupVersioningUtil - .updateVersionInfoForChangedApps(Timestamp(10), nestedApp, nestedApp) + val updated = + GroupVersioningUtil.updateVersionInfoForChangedApps(Timestamp(10), + nestedApp, + nestedApp) Then("nothing is changed") updated should be(nestedApp) } test("A new app should get proper versionInfo") { When("Calculating version infos with an added app") - val updated = GroupVersioningUtil - .updateVersionInfoForChangedApps(Timestamp(10), emptyGroup, nestedApp) + val updated = + GroupVersioningUtil.updateVersionInfoForChangedApps(Timestamp(10), + emptyGroup, + nestedApp) Then("The timestamp of the app and groups are updated appropriately") def update(maybeApp: Option[AppDefinition]): AppDefinition = maybeApp diff --git a/repos/marathon/src/test/scala/mesosphere/marathon/upgrade/TaskStartActorTest.scala b/repos/marathon/src/test/scala/mesosphere/marathon/upgrade/TaskStartActorTest.scala index 65a47899e71..23049e32e90 100644 --- a/repos/marathon/src/test/scala/mesosphere/marathon/upgrade/TaskStartActorTest.scala +++ b/repos/marathon/src/test/scala/mesosphere/marathon/upgrade/TaskStartActorTest.scala @@ -153,8 +153,9 @@ class TaskStartActorTest val app = AppDefinition("/myApp".toPath, instances = 5) when(launchQueue.get(app.id)).thenReturn(None) - val task = MarathonTestHelper - .startingTaskForApp(app.id, appVersion = Timestamp(1024)) + val task = MarathonTestHelper.startingTaskForApp(app.id, + appVersion = + Timestamp(1024)) taskCreationHandler.created(task).futureValue val ref = TestActorRef( diff --git a/repos/marathon/src/test/scala/mesosphere/mesos/ConstraintsTest.scala b/repos/marathon/src/test/scala/mesosphere/mesos/ConstraintsTest.scala index 63b9bef6b94..5c26033620f 100644 --- a/repos/marathon/src/test/scala/mesosphere/mesos/ConstraintsTest.scala +++ b/repos/marathon/src/test/scala/mesosphere/mesos/ConstraintsTest.scala @@ -196,8 +196,10 @@ class ConstraintsTest extends MarathonSpec with GivenWhenThen with Matchers { assert(!clusterRackNotMet, "Should not meet cluster constraint.") - val clusterNoAttributeNotMet = Constraints - .meetsConstraint(freshRack, makeOffer("foohost", Set()), clusterByRackId) + val clusterNoAttributeNotMet = + Constraints.meetsConstraint(freshRack, + makeOffer("foohost", Set()), + clusterByRackId) assert(!clusterNoAttributeNotMet, "Should not meet cluster constraint.") @@ -228,8 +230,10 @@ class ConstraintsTest extends MarathonSpec with GivenWhenThen with Matchers { assert(!uniqueRackNotMet, "Should not meet unique constraint for rack.") - val uniqueNoAttributeNotMet = Constraints - .meetsConstraint(freshRack, makeOffer("foohost", Set()), uniqueRackId) + val uniqueNoAttributeNotMet = + Constraints.meetsConstraint(freshRack, + makeOffer("foohost", Set()), + uniqueRackId) assert(!uniqueNoAttributeNotMet, "Should not meet unique constraint.") } @@ -349,8 +353,10 @@ class ConstraintsTest extends MarathonSpec with GivenWhenThen with Matchers { assert(!groupByRackNotMet, "Should not meet group-by-rack constraint.") - val groupByNoAttributeNotMet = Constraints - .meetsConstraint(sameRack, makeOffer("foohost", Set()), group2ByRack) + val groupByNoAttributeNotMet = + Constraints.meetsConstraint(sameRack, + makeOffer("foohost", Set()), + group2ByRack) assert(!groupByNoAttributeNotMet, "Should not meet group-by-no-attribute constraints.") } @@ -433,56 +439,74 @@ class ConstraintsTest extends MarathonSpec with GivenWhenThen with Matchers { val groupByHost = makeConstraint("hostname", Constraint.Operator.GROUP_BY, "2") - val groupByFreshHostMet = Constraints - .meetsConstraint(groupHost, makeOffer("host1", attributes), groupByHost) + val groupByFreshHostMet = + Constraints.meetsConstraint(groupHost, + makeOffer("host1", attributes), + groupByHost) assert(groupByFreshHostMet, "Should be able to schedule in fresh host.") groupHost ++= Set(task1_host1) - val groupByHostMet = Constraints - .meetsConstraint(groupHost, makeOffer("host1", attributes), groupByHost) + val groupByHostMet = + Constraints.meetsConstraint(groupHost, + makeOffer("host1", attributes), + groupByHost) assert(!groupByHostMet, "Should not meet group-by-host constraint.") - val groupByHostMet2 = Constraints - .meetsConstraint(groupHost, makeOffer("host2", attributes), groupByHost) + val groupByHostMet2 = + Constraints.meetsConstraint(groupHost, + makeOffer("host2", attributes), + groupByHost) assert(groupByHostMet2, "Should meet group-by-host constraint.") groupHost ++= Set(task3_host2) - val groupByHostMet3 = Constraints - .meetsConstraint(groupHost, makeOffer("host1", attributes), groupByHost) + val groupByHostMet3 = + Constraints.meetsConstraint(groupHost, + makeOffer("host1", attributes), + groupByHost) assert(groupByHostMet3, "Should meet group-by-host constraint.") groupHost ++= Set(task2_host1) - val groupByHostNotMet = Constraints - .meetsConstraint(groupHost, makeOffer("host1", attributes), groupByHost) + val groupByHostNotMet = + Constraints.meetsConstraint(groupHost, + makeOffer("host1", attributes), + groupByHost) assert(!groupByHostNotMet, "Should not meet group-by-host constraint.") - val groupByHostMet4 = Constraints - .meetsConstraint(groupHost, makeOffer("host3", attributes), groupByHost) + val groupByHostMet4 = + Constraints.meetsConstraint(groupHost, + makeOffer("host3", attributes), + groupByHost) assert(groupByHostMet4, "Should meet group-by-host constraint.") groupHost ++= Set(task4_host3) - val groupByHostNotMet2 = Constraints - .meetsConstraint(groupHost, makeOffer("host1", attributes), groupByHost) + val groupByHostNotMet2 = + Constraints.meetsConstraint(groupHost, + makeOffer("host1", attributes), + groupByHost) assert(!groupByHostNotMet2, "Should not meet group-by-host constraint.") - val groupByHostMet5 = Constraints - .meetsConstraint(groupHost, makeOffer("host3", attributes), groupByHost) + val groupByHostMet5 = + Constraints.meetsConstraint(groupHost, + makeOffer("host3", attributes), + groupByHost) assert(groupByHostMet5, "Should meet group-by-host constraint.") - val groupByHostMet6 = Constraints - .meetsConstraint(groupHost, makeOffer("host2", attributes), groupByHost) + val groupByHostMet6 = + Constraints.meetsConstraint(groupHost, + makeOffer("host2", attributes), + groupByHost) assert(groupByHostMet6, "Should meet group-by-host constraint.") } diff --git a/repos/marathon/src/test/scala/mesosphere/mesos/ResourceMatcherTest.scala b/repos/marathon/src/test/scala/mesosphere/mesos/ResourceMatcherTest.scala index 81b47111204..d90125ede10 100644 --- a/repos/marathon/src/test/scala/mesosphere/mesos/ResourceMatcherTest.scala +++ b/repos/marathon/src/test/scala/mesosphere/mesos/ResourceMatcherTest.scala @@ -72,8 +72,9 @@ class ResourceMatcherTest extends MarathonSpec with Matchers { test("match resources success with preserved reservations") { // have unique reservation to make sure that the reservations are really preserved - val cpuReservation = MarathonTestHelper - .reservation(principal = "cpuPrincipal", labels = Map("some" -> "label")) + val cpuReservation = + MarathonTestHelper.reservation(principal = "cpuPrincipal", + labels = Map("some" -> "label")) val cpuReservation2 = MarathonTestHelper.reservation(principal = "cpuPrincipal", labels = Map("some" -> "label2")) @@ -150,8 +151,9 @@ class ResourceMatcherTest extends MarathonSpec with Matchers { res.portsMatch.hostPortsWithRole.toSet should be( Set( - PortsMatcher - .PortWithRole("*", 80, reservation = Some(portsReservation))) + PortsMatcher.PortWithRole("*", + 80, + reservation = Some(portsReservation))) ) // reserved resources should not be matched by selector with reserved = false @@ -163,8 +165,9 @@ class ResourceMatcherTest extends MarathonSpec with Matchers { } test("match resources should not consider resources with disk infos") { - val cpuReservation = MarathonTestHelper - .reservation(principal = "cpuPrincipal", labels = Map("some" -> "label")) + val cpuReservation = + MarathonTestHelper.reservation(principal = "cpuPrincipal", + labels = Map("some" -> "label")) val memReservation = MarathonTestHelper.reservation(principal = "memPrincipal", labels = Map("resource" -> "mem")) diff --git a/repos/pickling/core/src/main/scala/scala/pickling/generator/WillRobinsonPickling.scala b/repos/pickling/core/src/main/scala/scala/pickling/generator/WillRobinsonPickling.scala index 6a1a650e538..18f8f8c6c27 100644 --- a/repos/pickling/core/src/main/scala/scala/pickling/generator/WillRobinsonPickling.scala +++ b/repos/pickling/core/src/main/scala/scala/pickling/generator/WillRobinsonPickling.scala @@ -44,10 +44,14 @@ private[pickling] object WillRobinsonPickling extends PicklingAlgorithm { Seq(AllocateInstance(tpe)) ++ fields.map(f => f.setter).toSeq) val pickleBasic = PickleEntry((fields.map(f => f.getter))) - val pickle = SubclassDispatch - .apply(Nil, tpe, Some(pickleBasic), lookupRuntime = true) - val unpickle = SubclassUnpicklerDelegation - .apply(Nil, tpe, Some(unpickleBasic), lookupRuntime = true) + val pickle = SubclassDispatch.apply(Nil, + tpe, + Some(pickleBasic), + lookupRuntime = true) + val unpickle = SubclassUnpicklerDelegation.apply(Nil, + tpe, + Some(unpickleBasic), + lookupRuntime = true) AlgorithmSucccess(PickleUnpickleImplementation(pickle, unpickle)) // We special case AnyRef to be PURE reflection-based pickling. } else if ((tpe.className == "java.lang.Object") || diff --git a/repos/pickling/core/src/test/scala/pickling/run/externalizable.scala b/repos/pickling/core/src/test/scala/pickling/run/externalizable.scala index b3794010c26..f831f40e34f 100644 --- a/repos/pickling/core/src/test/scala/pickling/run/externalizable.scala +++ b/repos/pickling/core/src/test/scala/pickling/run/externalizable.scala @@ -43,8 +43,10 @@ class StorageLevel private (private var useDisk_ : Boolean, } override def toString: String = - "StorageLevel(%b, %b, %b, %d)" - .format(useDisk, useMemory, deserialized, replication) + "StorageLevel(%b, %b, %b, %d)".format(useDisk, + useMemory, + deserialized, + replication) def toInt: Int = { var ret = 0 diff --git a/repos/pickling/core/src/test/scala/pickling/run/wrapped-array.scala b/repos/pickling/core/src/test/scala/pickling/run/wrapped-array.scala index c1dde07f56a..ccce2ed8a1a 100644 --- a/repos/pickling/core/src/test/scala/pickling/run/wrapped-array.scala +++ b/repos/pickling/core/src/test/scala/pickling/run/wrapped-array.scala @@ -32,8 +32,7 @@ class WrappedArrayTest extends FunSuite { // TODO: allow passing in ClassLoader to picklers selected from registry val classLoader: ClassLoader = elemClass.getClassLoader val elemTag = - FastTypeTag - .mkRaw(elemClass, mirror) // slow: `mkRaw` is called for each element + FastTypeTag.mkRaw(elemClass, mirror) // slow: `mkRaw` is called for each element val pickler = internal.currentRuntime.picklers .genPickler(classLoader, elemClass, elemTag) .asInstanceOf[Pickler[AnyRef]] diff --git a/repos/platform/accounts/src/main/scala/com/precog/accounts/AccountServiceHandlers.scala b/repos/platform/accounts/src/main/scala/com/precog/accounts/AccountServiceHandlers.scala index 21150fd980e..ff894cbd1e4 100644 --- a/repos/platform/accounts/src/main/scala/com/precog/accounts/AccountServiceHandlers.scala +++ b/repos/platform/accounts/src/main/scala/com/precog/accounts/AccountServiceHandlers.scala @@ -608,8 +608,7 @@ class AccountServiceHandlers( futureContent flatMap { jvalue => (jvalue \ "password").validated[String] match { case Success(newPassword) => - accountManager - .updateAccountPassword(account, newPassword) map { + accountManager.updateAccountPassword(account, newPassword) map { case true => logger.info( "Password for account %s successfully updated by %s" diff --git a/repos/platform/accounts/src/main/scala/com/precog/accounts/MongoAccountsServer.scala b/repos/platform/accounts/src/main/scala/com/precog/accounts/MongoAccountsServer.scala index d5c019d27f4..d7d8bc308f6 100644 --- a/repos/platform/accounts/src/main/scala/com/precog/accounts/MongoAccountsServer.scala +++ b/repos/platform/accounts/src/main/scala/com/precog/accounts/MongoAccountsServer.scala @@ -88,11 +88,11 @@ object MongoAccountServer def Emailer(config: Configuration) = { val emailProps = new java.util.Properties - emailProps - .setProperty("mail.smtp.host", config[String]("host", "localhost")) + emailProps.setProperty("mail.smtp.host", + config[String]("host", "localhost")) emailProps.setProperty("mail.smtp.port", config[String]("port", "25")) - emailProps - .setProperty("mail.from", config[String]("from", "support@precog.com")) + emailProps.setProperty("mail.from", + config[String]("from", "support@precog.com")) val templateDir = new File(config[String]("template_dir")) require( templateDir.isDirectory, diff --git a/repos/platform/auth/src/test/scala/com/precog/auth/MongoAPIKeyManagerSpec.scala b/repos/platform/auth/src/test/scala/com/precog/auth/MongoAPIKeyManagerSpec.scala index 83e15918c32..c4a7f8f3b52 100644 --- a/repos/platform/auth/src/test/scala/com/precog/auth/MongoAPIKeyManagerSpec.scala +++ b/repos/platform/auth/src/test/scala/com/precog/auth/MongoAPIKeyManagerSpec.scala @@ -216,8 +216,10 @@ class MongoAPIKeyManagerSpec apiKeyManager.createAPIKey(Some("child2"), None, rootAPIKey, Set.empty), to) val grantChild1 = Await.result( - apiKeyManager - .createAPIKey(Some("grantChild1"), None, child1.apiKey, Set.empty), + apiKeyManager.createAPIKey(Some("grantChild1"), + None, + child1.apiKey, + Set.empty), to) // wait until the keys appear in the DB (some delay between insert request and actor insert) diff --git a/repos/platform/auth/src/test/scala/com/precog/auth/SecurityServiceSpec.scala b/repos/platform/auth/src/test/scala/com/precog/auth/SecurityServiceSpec.scala index f570e240c6b..95157761975 100644 --- a/repos/platform/auth/src/test/scala/com/precog/auth/SecurityServiceSpec.scala +++ b/repos/platform/auth/src/test/scala/com/precog/auth/SecurityServiceSpec.scala @@ -237,14 +237,16 @@ class SecurityServiceSpec None), to) - val user5 = Await.result( - apiKeyManager - .createAPIKey(Some("user5-key"), None, user1.apiKey, Set.empty), - to) - val user6 = Await.result( - apiKeyManager - .createAPIKey(Some("user6-key"), None, user1.apiKey, Set.empty), - to) + val user5 = Await.result(apiKeyManager.createAPIKey(Some("user5-key"), + None, + user1.apiKey, + Set.empty), + to) + val user6 = Await.result(apiKeyManager.createAPIKey(Some("user6-key"), + None, + user1.apiKey, + Set.empty), + to) val expiredGrant = Await.result( apiKeyManager.createGrant(None, @@ -255,8 +257,10 @@ class SecurityServiceSpec Some(new DateTime().minusYears(1000))), to) val expired = Await.result( - apiKeyManager - .createAPIKey(None, None, user1.apiKey, Set(expiredGrant.grantId)), + apiKeyManager.createAPIKey(None, + None, + user1.apiKey, + Set(expiredGrant.grantId)), to) val allAPIKeys = Await.result(apiKeyManager.listAPIKeys(), to) diff --git a/repos/platform/bifrost/src/main/scala/com/precog/bifrost/ManagedQueryModule.scala b/repos/platform/bifrost/src/main/scala/com/precog/bifrost/ManagedQueryModule.scala index da949025aa2..5c6ca2ce11c 100644 --- a/repos/platform/bifrost/src/main/scala/com/precog/bifrost/ManagedQueryModule.scala +++ b/repos/platform/bifrost/src/main/scala/com/precog/bifrost/ManagedQueryModule.scala @@ -180,8 +180,9 @@ trait ManagedQueryModule extends YggConfigComponent with Logging { value case Cancelled => M.jobId map - (jobManager - .abort(_, "Query was cancelled.", yggConfig.clock.now())) + (jobManager.abort(_, + "Query was cancelled.", + yggConfig.clock.now())) throw QueryCancelledException( "Query was cancelled before it was completed.") case Expired => diff --git a/repos/platform/bifrost/src/main/scala/com/precog/bifrost/nihdb/NIHDBQueryExecutor.scala b/repos/platform/bifrost/src/main/scala/com/precog/bifrost/nihdb/NIHDBQueryExecutor.scala index 38b6b3e4601..c48dce81cc0 100644 --- a/repos/platform/bifrost/src/main/scala/com/precog/bifrost/nihdb/NIHDBQueryExecutor.scala +++ b/repos/platform/bifrost/src/main/scala/com/precog/bifrost/nihdb/NIHDBQueryExecutor.scala @@ -263,8 +263,9 @@ trait NIHDBQueryExecutorComponent { ingestSystem .map(_.stoppable) .getOrElse(Stoppable.fromFuture(Future(())))) - _ <- IngestSystem - .actorStop(yggConfig, projectionsActor, "projections") + _ <- IngestSystem.actorStop(yggConfig, + projectionsActor, + "projections") _ <- IngestSystem.actorStop(yggConfig, masterChef, "masterChef") _ <- Stoppable.stop(scheduleStorageStoppable) _ <- chefs diff --git a/repos/platform/bifrost/src/main/scala/com/precog/bifrost/service/ShardServiceCombinators.scala b/repos/platform/bifrost/src/main/scala/com/precog/bifrost/service/ShardServiceCombinators.scala index ba0d1bae37a..5f042ecbbf3 100644 --- a/repos/platform/bifrost/src/main/scala/com/precog/bifrost/service/ShardServiceCombinators.scala +++ b/repos/platform/bifrost/src/main/scala/com/precog/bifrost/service/ShardServiceCombinators.scala @@ -124,8 +124,8 @@ object ShardServiceCombinators extends Logging { import blueeyes.json.serialization.Extractor._ val onError: Error => String = { case err @ Thrown(ex) => - logger - .warn("Exceptiion thrown from JSON parsing of sortOn parameter", ex) + logger.warn("Exceptiion thrown from JSON parsing of sortOn parameter", + ex) err.message case other => other.message diff --git a/repos/platform/bifrost/src/test/scala/com/precog/bifrost/ManagedQueryExecutorSpec.scala b/repos/platform/bifrost/src/test/scala/com/precog/bifrost/ManagedQueryExecutorSpec.scala index cd6cdeb3a67..fad0f1396a4 100644 --- a/repos/platform/bifrost/src/test/scala/com/precog/bifrost/ManagedQueryExecutorSpec.scala +++ b/repos/platform/bifrost/src/test/scala/com/precog/bifrost/ManagedQueryExecutorSpec.scala @@ -94,8 +94,9 @@ class ManagedQueryExecutorSpec extends TestManagedPlatform with Specification { Path("/\\\\/\\///\\/"), Path.Root, clock.now()) - result <- executor - .execute(numTicks.toString, ctx, QueryOptions(timeout = timeout)) + result <- executor.execute(numTicks.toString, + ctx, + QueryOptions(timeout = timeout)) } yield result executionResult.valueOr(err => sys.error(err.toString)) diff --git a/repos/platform/bytecode/src/test/scala/com/precog/bytecode/StaticLibrary.scala b/repos/platform/bytecode/src/test/scala/com/precog/bytecode/StaticLibrary.scala index c7bd70b3532..3e35fa44b29 100644 --- a/repos/platform/bytecode/src/test/scala/com/precog/bytecode/StaticLibrary.scala +++ b/repos/platform/bytecode/src/test/scala/com/precog/bytecode/StaticLibrary.scala @@ -99,8 +99,8 @@ trait StaticLibrary extends Library { } object M1Product extends Morphism1(Vector(), "bin8", 0x0339) { - override val idPolicy = IdentityPolicy - .Product(IdentityPolicy.Synthesize, IdentityPolicy.Retain.Merge) + override val idPolicy = IdentityPolicy.Product(IdentityPolicy.Synthesize, + IdentityPolicy.Retain.Merge) } object M2RetainMerge extends Morphism2(Vector(), "bin9", 0x1000) { @@ -128,8 +128,8 @@ trait StaticLibrary extends Library { } object M2Product extends Morphism2(Vector(), "bin12", 0x1339) { - override val idPolicy = IdentityPolicy - .Product(IdentityPolicy.Synthesize, IdentityPolicy.Retain.Merge) + override val idPolicy = IdentityPolicy.Product(IdentityPolicy.Synthesize, + IdentityPolicy.Retain.Merge) } object M11 extends Morphism1(Vector("std", "random"), "foobar", 0x0006) { diff --git a/repos/platform/common/src/main/scala/com/precog/common/ingest/IngestMessage.scala b/repos/platform/common/src/main/scala/com/precog/common/ingest/IngestMessage.scala index eed8e3489fc..f0df8671f7d 100644 --- a/repos/platform/common/src/main/scala/com/precog/common/ingest/IngestMessage.scala +++ b/repos/platform/common/src/main/scala/com/precog/common/ingest/IngestMessage.scala @@ -136,8 +136,13 @@ case class IngestMessage(apiKey: APIKey, } override def toString = - "IngestMessage(%s, %s, %s, (%d records), %s, %s, %s)" - .format(apiKey, path, writeAs, data.size, jobId, timestamp, streamRef) + "IngestMessage(%s, %s, %s, (%d records), %s, %s, %s)".format(apiKey, + path, + writeAs, + data.size, + jobId, + timestamp, + streamRef) } object IngestMessage { diff --git a/repos/platform/common/src/main/scala/com/precog/common/security/APIKeyFinder.scala b/repos/platform/common/src/main/scala/com/precog/common/security/APIKeyFinder.scala index 6e2f70eaba6..bfb3286b526 100644 --- a/repos/platform/common/src/main/scala/com/precog/common/security/APIKeyFinder.scala +++ b/repos/platform/common/src/main/scala/com/precog/common/security/APIKeyFinder.scala @@ -135,8 +135,7 @@ class DirectAPIKeyFinder[M[+ _]](underlying: APIKeyManager[M])( def createAPIKey(accountId: AccountId, keyName: Option[String] = None, keyDesc: Option[String] = None): M[v1.APIKeyDetails] = { - underlying - .newStandardAPIKeyRecord(accountId, keyName, keyDesc) flatMap recordDetails + underlying.newStandardAPIKeyRecord(accountId, keyName, keyDesc) flatMap recordDetails } def addGrant(accountKey: APIKey, grantId: GrantId): M[Boolean] = { diff --git a/repos/platform/common/src/test/scala/com/precog/common/security/APIKeyManagerSpec.scala b/repos/platform/common/src/test/scala/com/precog/common/security/APIKeyManagerSpec.scala index 65ac12cede7..fa42f7d40b4 100644 --- a/repos/platform/common/src/test/scala/com/precog/common/security/APIKeyManagerSpec.scala +++ b/repos/platform/common/src/test/scala/com/precog/common/security/APIKeyManagerSpec.scala @@ -49,8 +49,10 @@ trait APIKeyManagerSpec[M[+ _]] extends Specification { Set(rootGrantId), perms, None) - record <- mgr - .newAPIKeyWithGrants(Some("test"), None, rootKey, Set(grantRequest)) + record <- mgr.newAPIKeyWithGrants(Some("test"), + None, + rootKey, + Set(grantRequest)) grants <- record.toList.flatMap(_.grants).map(mgr.findGrant).sequence } yield { (grants.flatten.flatMap(_.parentIds), rootGrantId) diff --git a/repos/platform/common/src/test/scala/com/precog/common/util/ArbitraryEventMessage.scala b/repos/platform/common/src/test/scala/com/precog/common/util/ArbitraryEventMessage.scala index a2e4f9f9caa..23d4285352a 100644 --- a/repos/platform/common/src/test/scala/com/precog/common/util/ArbitraryEventMessage.scala +++ b/repos/platform/common/src/test/scala/com/precog/common/util/ArbitraryEventMessage.scala @@ -36,8 +36,8 @@ import Arbitrary.arbitrary trait ArbitraryEventMessage extends ArbitraryJValue { def genStreamId: Gen[Option[UUID]] = - Gen - .oneOf(Gen.resultOf[Int, Option[UUID]](_ => Some(UUID.randomUUID)), None) + Gen.oneOf(Gen.resultOf[Int, Option[UUID]](_ => Some(UUID.randomUUID)), + None) def genContentJValue: Gen[JValue] = frequency( diff --git a/repos/platform/dvergr/src/main/scala/com/precog/dvergr/JobServiceHandlers.scala b/repos/platform/dvergr/src/main/scala/com/precog/dvergr/JobServiceHandlers.scala index c00d3b72f27..d2ad1401003 100644 --- a/repos/platform/dvergr/src/main/scala/com/precog/dvergr/JobServiceHandlers.scala +++ b/repos/platform/dvergr/src/main/scala/com/precog/dvergr/JobServiceHandlers.scala @@ -450,8 +450,8 @@ class PutJobStateHandler(jobs: JobManager[Future])( (obj \ "state") match { case JString("started") => transition(obj) { (timestamp, _) => - jobs - .start(jobId, timestamp) map (Validation.fromEither(_)) map + jobs.start(jobId, timestamp) map (Validation + .fromEither(_)) map (_ map (_.state)) } diff --git a/repos/platform/ingest/src/main/scala/com/precog/ingest/EventIdSequence.scala b/repos/platform/ingest/src/main/scala/com/precog/ingest/EventIdSequence.scala index 241966d5a3b..36937ab5acf 100644 --- a/repos/platform/ingest/src/main/scala/com/precog/ingest/EventIdSequence.scala +++ b/repos/platform/ingest/src/main/scala/com/precog/ingest/EventIdSequence.scala @@ -63,12 +63,12 @@ class SystemEventIdSequence private ( } def saveState(offset: Long) = { - state = coordination - .saveEventRelayState(agent, currentRelayState(offset)) match { - case Success(ers @ EventRelayState(_, _, _)) => InternalState(ers) - case Failure(e) => - sys.error("Error trying to save relay agent state: " + e) - } + state = + coordination.saveEventRelayState(agent, currentRelayState(offset)) match { + case Success(ers @ EventRelayState(_, _, _)) => InternalState(ers) + case Failure(e) => + sys.error("Error trying to save relay agent state: " + e) + } PrecogUnit } diff --git a/repos/platform/ingest/src/main/scala/com/precog/ingest/kafka/KafkaRelayAgent.scala b/repos/platform/ingest/src/main/scala/com/precog/ingest/kafka/KafkaRelayAgent.scala index c1257dbb3aa..3e8825465be 100644 --- a/repos/platform/ingest/src/main/scala/com/precog/ingest/kafka/KafkaRelayAgent.scala +++ b/repos/platform/ingest/src/main/scala/com/precog/ingest/kafka/KafkaRelayAgent.scala @@ -319,14 +319,16 @@ final class KafkaRelayAgent( case Ingest(apiKey, path, writeAs, _, _, timestamp, _) => if (writeAs.isDefined) Promise.successful(writeAs) else - permissionsFinder - .inferWriteAuthorities(apiKey, path, Some(timestamp)) + permissionsFinder.inferWriteAuthorities(apiKey, + path, + Some(timestamp)) case StoreFile(apiKey, path, writeAs, _, _, timestamp, _) => if (writeAs.isDefined) Promise successful writeAs else - permissionsFinder - .inferWriteAuthorities(apiKey, path, Some(timestamp)) + permissionsFinder.inferWriteAuthorities(apiKey, + path, + Some(timestamp)) case _ => Promise.successful(None) } diff --git a/repos/platform/ingest/src/main/scala/com/precog/ingest/service/IngestServiceHandler.scala b/repos/platform/ingest/src/main/scala/com/precog/ingest/service/IngestServiceHandler.scala index 911cb0a6041..aeb0843b941 100644 --- a/repos/platform/ingest/src/main/scala/com/precog/ingest/service/IngestServiceHandler.scala +++ b/repos/platform/ingest/src/main/scala/com/precog/ingest/service/IngestServiceHandler.scala @@ -252,8 +252,10 @@ class IngestServiceHandler(val permissionsFinder: PermissionsFinder[Future], storeMode) flatMap { case NotIngested(reason) => val message = - "Ingest to %s by %s failed with reason: %s " - .format(path, apiKey, reason) + "Ingest to %s by %s failed with reason: %s ".format( + path, + apiKey, + reason) logger.warn(message) notifyJob(durability, JobManager.channels.Warning, @@ -266,8 +268,10 @@ class IngestServiceHandler(val permissionsFinder: PermissionsFinder[Future], case StreamingResult(ingested, None) => val message = - "Ingest to %s by %s succeeded (%d records)" - .format(path, apiKey, ingested) + "Ingest to %s by %s succeeded (%d records)".format( + path, + apiKey, + ingested) logger.info(message) notifyJob(durability, JobManager.channels.Info, @@ -312,8 +316,10 @@ class IngestServiceHandler(val permissionsFinder: PermissionsFinder[Future], ) val message = - "Ingest to %s with %s succeeded. Result: %s" - .format(path, apiKey, responseContent.renderPretty) + "Ingest to %s with %s succeeded. Result: %s".format( + path, + apiKey, + responseContent.renderPretty) logger.info(message) notifyJob(durability, JobManager.channels.Info, diff --git a/repos/platform/ingest/src/test/scala/com/precog/ingest/util/DirectIngestBenchmark.scala b/repos/platform/ingest/src/test/scala/com/precog/ingest/util/DirectIngestBenchmark.scala index 29696766335..eb420776d1e 100644 --- a/repos/platform/ingest/src/test/scala/com/precog/ingest/util/DirectIngestBenchmark.scala +++ b/repos/platform/ingest/src/test/scala/com/precog/ingest/util/DirectIngestBenchmark.scala @@ -85,8 +85,8 @@ object DirectKafkaProducer extends App { val config = new Properties() config.put("broker.list", "0:localhost:9092") config.put("enable.zookeeper", "false") - config - .put("serializer.class", "com.precog.ingest.kafka.KafkaIngestMessageCodec") + config.put("serializer.class", + "com.precog.ingest.kafka.KafkaIngestMessageCodec") val producer = new Producer[String, IngestMessage](new ProducerConfig(config)) diff --git a/repos/platform/miklagard/jdbc/src/test/scala/com/precog/yggdrasil/jdbc/JDBCPlatformSpecs.scala b/repos/platform/miklagard/jdbc/src/test/scala/com/precog/yggdrasil/jdbc/JDBCPlatformSpecs.scala index b6849ca7200..62c44c91ba5 100644 --- a/repos/platform/miklagard/jdbc/src/test/scala/com/precog/yggdrasil/jdbc/JDBCPlatformSpecs.scala +++ b/repos/platform/miklagard/jdbc/src/test/scala/com/precog/yggdrasil/jdbc/JDBCPlatformSpecs.scala @@ -171,8 +171,10 @@ object JDBCPlatformSpecEngine extends Logging { val columns = properties.map(_._1).mkString(", ") val values = properties.map(_._2._2).mkString(", ") - val insert = "INSERT INTO %s (%s) VALUES (%s);" - .format(tableName, columns, values) + val insert = + "INSERT INTO %s (%s) VALUES (%s);".format(tableName, + columns, + values) logger.debug("Inserting with " + insert) diff --git a/repos/platform/mimir/src/main/scala/com/precog/mimir/ArrayLib.scala b/repos/platform/mimir/src/main/scala/com/precog/mimir/ArrayLib.scala index 173145fbacc..f91fe6b8bb1 100644 --- a/repos/platform/mimir/src/main/scala/com/precog/mimir/ArrayLib.scala +++ b/repos/platform/mimir/src/main/scala/com/precog/mimir/ArrayLib.scala @@ -37,8 +37,9 @@ trait ArrayLibModule[M[+ _]] extends ColumnarTableLibModule[M] { val tpe = UnaryOperationType(JArrayUnfixedT, JType.JUniverseT) - override val idPolicy = IdentityPolicy - .Product(IdentityPolicy.Retain.Merge, IdentityPolicy.Synthesize) + override val idPolicy = IdentityPolicy.Product( + IdentityPolicy.Retain.Merge, + IdentityPolicy.Synthesize) def apply(table: Table, ctx: MorphContext) = M point { val derefed = diff --git a/repos/platform/mimir/src/main/scala/com/precog/mimir/Clustering.scala b/repos/platform/mimir/src/main/scala/com/precog/mimir/Clustering.scala index 186961453d6..baca09e285a 100644 --- a/repos/platform/mimir/src/main/scala/com/precog/mimir/Clustering.scala +++ b/repos/platform/mimir/src/main/scala/com/precog/mimir/Clustering.scala @@ -79,8 +79,10 @@ trait KMediansCoreSetClustering { val coresets = tree map { case (_, coreset) => - CoreSet - .fromWeightedPoints(coreset._1, coreset._2, k, epsilon / 6.0) + CoreSet.fromWeightedPoints(coreset._1, + coreset._2, + k, + epsilon / 6.0) } coresets.foldLeft((new Array[Array[Double]](0), new Array[Long](0))) { diff --git a/repos/platform/mimir/src/main/scala/com/precog/mimir/Evaluator.scala b/repos/platform/mimir/src/main/scala/com/precog/mimir/Evaluator.scala index c1203241d3c..afc2172d976 100644 --- a/repos/platform/mimir/src/main/scala/com/precog/mimir/Evaluator.scala +++ b/repos/platform/mimir/src/main/scala/com/precog/mimir/Evaluator.scala @@ -464,8 +464,7 @@ trait EvaluatorModule[M[+ _]] def identityJoinSpec(ids: Vector[Int]): TransSpec1 = { if (ids.isEmpty) { - trans - .ConstLiteral(CEmptyArray, SourceKey.Single) // join with undefined, probably + trans.ConstLiteral(CEmptyArray, SourceKey.Single) // join with undefined, probably } else { val components = for (i <- ids) yield @@ -1069,11 +1068,12 @@ trait EvaluatorModule[M[+ _]] pair <- zip(leftSortedM, rightSortedM) (leftSorted, rightSorted) = pair - result = leftSorted - .cogroup(keyValueSpec, keyValueSpec, rightSorted)( - TransSpec1.Id, - TransSpec1.DeleteKeyValue, - TransSpec2.DeleteKeyValueLeft) + result = leftSorted.cogroup(keyValueSpec, + keyValueSpec, + rightSorted)( + TransSpec1.Id, + TransSpec1.DeleteKeyValue, + TransSpec2.DeleteKeyValueLeft) } yield { PendingTable(result, graph, diff --git a/repos/platform/mimir/src/test/scala/com/precog/mimir/ArrayLibSpecs.scala b/repos/platform/mimir/src/test/scala/com/precog/mimir/ArrayLibSpecs.scala index 6e269c4a470..3d5201f38e3 100644 --- a/repos/platform/mimir/src/test/scala/com/precog/mimir/ArrayLibSpecs.scala +++ b/repos/platform/mimir/src/test/scala/com/precog/mimir/ArrayLibSpecs.scala @@ -1,19 +1,19 @@ /* - * ____ ____ _____ ____ ___ ____ + * ____ ____ _____ ____ ___ ____ * | _ \ | _ \ | ____| / ___| / _/ / ___| Precog (R) * | |_) | | |_) | | _| | | | | /| | | _ Advanced Analytics Engine for NoSQL Data * | __/ | _ < | |___ | |___ |/ _| | | |_| | Copyright (C) 2010 - 2013 SlamData, Inc. * |_| |_| \_\ |_____| \____| /__/ \____| All Rights Reserved. * - * This program is free software: you can redistribute it and/or modify it under the terms of the - * GNU Affero General Public License as published by the Free Software Foundation, either version + * This program is free software: you can redistribute it and/or modify it under the terms of the + * GNU Affero General Public License as published by the Free Software Foundation, either version * 3 of the License, or (at your option) any later version. * - * This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; - * without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See + * This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; + * without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See * the GNU Affero General Public License for more details. * - * You should have received a copy of the GNU Affero General Public License along with this + * You should have received a copy of the GNU Affero General Public License along with this * program. If not, see . * */ @@ -29,9 +29,9 @@ import scalaz.std.list._ import com.precog.util.IdGen trait ArrayLibSpecs[M[+ _]] - extends Specification with EvaluatorTestSupport[M] - with LongIdMemoryDatasetConsumer[M] { - self => + extends Specification + with EvaluatorTestSupport[M] + with LongIdMemoryDatasetConsumer[M] { self => import Function._ @@ -52,8 +52,8 @@ trait ArrayLibSpecs[M[+ _]] val line = Line(1, 1, "") val input = dag.Morph1( - Flatten, - dag.AbsoluteLoad(Const(CString("/hom/arrays"))(line))(line))(line) + Flatten, + dag.AbsoluteLoad(Const(CString("/hom/arrays"))(line))(line))(line) val result = testEval(input) result must haveSize(25) @@ -94,8 +94,8 @@ trait ArrayLibSpecs[M[+ _]] val line = Line(1, 1, "") val input = dag.Morph1( - Flatten, - dag.AbsoluteLoad(Const(CString("/het/arrays"))(line))(line))(line) + Flatten, + dag.AbsoluteLoad(Const(CString("/het/arrays"))(line))(line))(line) val result = testEval(input) result must haveSize(26) @@ -130,11 +130,12 @@ trait ArrayLibSpecs[M[+ _]] SDecimal(244), SDecimal(13), SDecimal(11), - SArray(Vector(SDecimal(-9), - SDecimal(-42), - SDecimal(42), - SDecimal(87), - SDecimal(4)))) + SArray( + Vector(SDecimal(-9), + SDecimal(-42), + SDecimal(42), + SDecimal(87), + SDecimal(4)))) } "flattened set is related to original set" in { @@ -142,21 +143,20 @@ trait ArrayLibSpecs[M[+ _]] val input = dag.Join( - JoinObject, - IdentitySort, - dag.Join(WrapObject, - Cross(None), - Const(CString("arr"))(line), - dag.AbsoluteLoad(Const(CString("/het/arrays"))(line))( - line))(line), - dag.Join( - WrapObject, - Cross(None), - Const(CString("val"))(line), - dag.Morph1(Flatten, - dag.AbsoluteLoad( - Const(CString("/het/arrays"))(line))(line))(line))( - line))(line) + JoinObject, + IdentitySort, + dag.Join( + WrapObject, + Cross(None), + Const(CString("arr"))(line), + dag.AbsoluteLoad(Const(CString("/het/arrays"))(line))(line))(line), + dag.Join( + WrapObject, + Cross(None), + Const(CString("val"))(line), + dag.Morph1(Flatten, + dag.AbsoluteLoad(Const(CString("/het/arrays"))(line))( + line))(line))(line))(line) val result = testEval(input) result must haveSize(26) diff --git a/repos/platform/mirror/src/test/scala/com/precog/mirror/EvaluatorSpecs.scala b/repos/platform/mirror/src/test/scala/com/precog/mirror/EvaluatorSpecs.scala index ca8ce048c17..091b5045687 100644 --- a/repos/platform/mirror/src/test/scala/com/precog/mirror/EvaluatorSpecs.scala +++ b/repos/platform/mirror/src/test/scala/com/precog/mirror/EvaluatorSpecs.scala @@ -299,8 +299,9 @@ object EvaluatorSpecs extends Specification with EvaluatorModule { def message(q: String): String = { val actual = doEval(q) - "evaluates to [%s], not [%s]" - .format(actual map { _.renderCompact } mkString ",", expect map { + "evaluates to [%s], not [%s]".format( + actual map { _.renderCompact } mkString ",", + expect map { _.renderCompact } mkString ",") } diff --git a/repos/platform/niflheim/src/main/scala/com/precog/niflheim/Chef.scala b/repos/platform/niflheim/src/main/scala/com/precog/niflheim/Chef.scala index 48561d80fa1..b5a0a40ab5f 100644 --- a/repos/platform/niflheim/src/main/scala/com/precog/niflheim/Chef.scala +++ b/repos/platform/niflheim/src/main/scala/com/precog/niflheim/Chef.scala @@ -74,8 +74,9 @@ final case class Chef(blockFormat: CookedBlockFormat, format: SegmentFormat) files flatMap { segs => val metadata = CookedBlockMetadata(reader.id, reader.length, segs.toArray) - val mdFile = File - .createTempFile("block-%08x".format(reader.id), ".cookedmeta", root) + val mdFile = File.createTempFile("block-%08x".format(reader.id), + ".cookedmeta", + root) val channel = new FileOutputStream(mdFile).getChannel() try { blockFormat.writeCookedBlock(channel, metadata).toValidationNel.map { diff --git a/repos/platform/niflheim/src/main/scala/com/precog/niflheim/NIHDBActor.scala b/repos/platform/niflheim/src/main/scala/com/precog/niflheim/NIHDBActor.scala index 03066607588..b2e36485c34 100644 --- a/repos/platform/niflheim/src/main/scala/com/precog/niflheim/NIHDBActor.scala +++ b/repos/platform/niflheim/src/main/scala/com/precog/niflheim/NIHDBActor.scala @@ -103,8 +103,7 @@ object NIHDB { timeout: Timeout, txLogScheduler: ScheduledExecutorService)( implicit actorSystem: ActorSystem) = { - NIHDBActor - .open(chef, baseDir, cookThreshold, timeout, txLogScheduler) map { + NIHDBActor.open(chef, baseDir, cookThreshold, timeout, txLogScheduler) map { _ map { _ map { case (authorities, actor) => diff --git a/repos/platform/niflheim/src/main/scala/com/precog/niflheim/Segment.scala b/repos/platform/niflheim/src/main/scala/com/precog/niflheim/Segment.scala index 99dc4a8c4f7..0df698c232b 100644 --- a/repos/platform/niflheim/src/main/scala/com/precog/niflheim/Segment.scala +++ b/repos/platform/niflheim/src/main/scala/com/precog/niflheim/Segment.scala @@ -40,8 +40,11 @@ sealed trait Segment { def extend(amount: Int): Segment override def toString = - "Segment(%d, %s, %s, %d/%d)" - .format(blockid, cpath, ctype, defined.cardinality, length) + "Segment(%d, %s, %s, %d/%d)".format(blockid, + cpath, + ctype, + defined.cardinality, + length) } sealed trait ValueSegment[@spec(Boolean, Long, Double) A] extends Segment { diff --git a/repos/platform/niflheim/src/main/scala/com/precog/niflheim/StorageReader.scala b/repos/platform/niflheim/src/main/scala/com/precog/niflheim/StorageReader.scala index 6e9f07f904d..a0649650c61 100644 --- a/repos/platform/niflheim/src/main/scala/com/precog/niflheim/StorageReader.scala +++ b/repos/platform/niflheim/src/main/scala/com/precog/niflheim/StorageReader.scala @@ -37,6 +37,7 @@ trait StorageReader { def length: Int override def toString = - "StorageReader: id = %d, length = %d, structure = %s" - .format(id, length, structure) + "StorageReader: id = %d, length = %d, structure = %s".format(id, + length, + structure) } diff --git a/repos/platform/quirrel/src/main/scala/com/precog/quirrel/Phases.scala b/repos/platform/quirrel/src/main/scala/com/precog/quirrel/Phases.scala index b730bb7c2d6..321bdb3091e 100644 --- a/repos/platform/quirrel/src/main/scala/com/precog/quirrel/Phases.scala +++ b/repos/platform/quirrel/src/main/scala/com/precog/quirrel/Phases.scala @@ -40,8 +40,8 @@ trait Phases { val empty = Trace(Array.empty[(Sigma, Expr)], Array.empty[BitSet]) def safeCopy(trace: Trace, node: (Sigma, Expr), indices: BitSet) = - trace - .copy(nodes = trace.nodes :+ node, indices = trace.indices :+ indices) + trace.copy(nodes = trace.nodes :+ node, + indices = trace.indices :+ indices) } private val Phases: List[Phase] = diff --git a/repos/platform/quirrel/src/main/scala/com/precog/quirrel/typer/Binder.scala b/repos/platform/quirrel/src/main/scala/com/precog/quirrel/typer/Binder.scala index c1c057a990e..5333e0274fe 100644 --- a/repos/platform/quirrel/src/main/scala/com/precog/quirrel/typer/Binder.scala +++ b/repos/platform/quirrel/src/main/scala/com/precog/quirrel/typer/Binder.scala @@ -281,8 +281,7 @@ trait Binder extends parser.AST { case class ReductionBinding(red: Reduction) extends BuiltInBinding { val name = Identifier(red.namespace, red.name) override val toString = - "" - .format(red.name, 1) //assumes all reductions are arity 1 + "".format(red.name, 1) //assumes all reductions are arity 1 } case object DistinctBinding extends BuiltInBinding { diff --git a/repos/platform/ragnarok/src/main/scala/com/precog/ragnarok/EvaluatingPerfTestRunner.scala b/repos/platform/ragnarok/src/main/scala/com/precog/ragnarok/EvaluatingPerfTestRunner.scala index ffb6c9a4c26..68c75bf14ad 100644 --- a/repos/platform/ragnarok/src/main/scala/com/precog/ragnarok/EvaluatingPerfTestRunner.scala +++ b/repos/platform/ragnarok/src/main/scala/com/precog/ragnarok/EvaluatingPerfTestRunner.scala @@ -117,8 +117,9 @@ trait EvaluatingPerfTestRunner[M[+ _], T] case Right(dag) => for { - table <- Evaluator(M) - .eval(dag, dummyEvaluationContext, yggConfig.optimize) + table <- Evaluator(M).eval(dag, + dummyEvaluationContext, + yggConfig.optimize) size <- Timing.timeM("Counting stream")( countStream(table.renderJson("", ",", ""))) } yield size diff --git a/repos/platform/util/src/main/scala/com/precog/util/MapUtils.scala b/repos/platform/util/src/main/scala/com/precog/util/MapUtils.scala index 4a742c43a68..6e74aca33a8 100644 --- a/repos/platform/util/src/main/scala/com/precog/util/MapUtils.scala +++ b/repos/platform/util/src/main/scala/com/precog/util/MapUtils.scala @@ -47,8 +47,8 @@ class MapPimp[A, B, CC[B] <: GenTraversable[B]](left: GenMap[A, CC[B]]) { case (key, leftValues) => { right get key map { rightValues => resultBuilder += - (key -> Either3 - .middle3[B, (CC[B], CC2[C]), C]((leftValues, rightValues))) + (key -> Either3.middle3[B, (CC[B], CC2[C]), C]( + (leftValues, rightValues))) } getOrElse { leftValues foreach { b => resultBuilder += (key -> Either3.left3[B, (CC[B], CC2[C]), C](b)) diff --git a/repos/platform/yggdrasil/src/main/scala/com/precog/yggdrasil/actor/KafkaShardIngestActor.scala b/repos/platform/yggdrasil/src/main/scala/com/precog/yggdrasil/actor/KafkaShardIngestActor.scala index 26e5d42464f..9125eda7b43 100644 --- a/repos/platform/yggdrasil/src/main/scala/com/precog/yggdrasil/actor/KafkaShardIngestActor.scala +++ b/repos/platform/yggdrasil/src/main/scala/com/precog/yggdrasil/actor/KafkaShardIngestActor.scala @@ -480,8 +480,9 @@ abstract class KafkaShardIngestActor( val rawMessages = msTime({ t => logger.debug( - "Kafka fetch from %s:%d in %d ms" - .format(topic, lastCheckpoint.offset, t)) + "Kafka fetch from %s:%d in %d ms".format(topic, + lastCheckpoint.offset, + t)) }) { consumer.fetch(req) } @@ -528,8 +529,7 @@ abstract class KafkaShardIngestActor( case k @ (apiKey, path) => // infer write authorities without a timestamp here, because we'll only use this for legacy events //val inferStart = System.currentTimeMillis - permissionsFinder - .inferWriteAuthorities(apiKey, path, None) map { + permissionsFinder.inferWriteAuthorities(apiKey, path, None) map { inferred => //logger.trace("Write authorities inferred on %s in %d ms".format(k, System.currentTimeMillis - inferStart)) k -> inferred diff --git a/repos/platform/yggdrasil/src/main/scala/com/precog/yggdrasil/jdbm3/JDBMRawSortProjection.scala b/repos/platform/yggdrasil/src/main/scala/com/precog/yggdrasil/jdbm3/JDBMRawSortProjection.scala index e2c109c4c4e..a994f3d568b 100644 --- a/repos/platform/yggdrasil/src/main/scala/com/precog/yggdrasil/jdbm3/JDBMRawSortProjection.scala +++ b/repos/platform/yggdrasil/src/main/scala/com/precog/yggdrasil/jdbm3/JDBMRawSortProjection.scala @@ -143,8 +143,10 @@ class JDBMRawSortProjection[M[+ _]] private[yggdrasil] ( val valColumnDecoder = rowFormat.ColumnDecoder(valColumns.map(_._2)(collection.breakOut)) - val (firstKey, lastKey, rows) = JDBMSlice - .load(sliceSize, iteratorSetup, keyColumnDecoder, valColumnDecoder) + val (firstKey, lastKey, rows) = JDBMSlice.load(sliceSize, + iteratorSetup, + keyColumnDecoder, + valColumnDecoder) val slice = new Slice { val size = rows diff --git a/repos/platform/yggdrasil/src/main/scala/com/precog/yggdrasil/nihdb/NIHDBProjection.scala b/repos/platform/yggdrasil/src/main/scala/com/precog/yggdrasil/nihdb/NIHDBProjection.scala index 7f6ecb301e0..6840e0507aa 100644 --- a/repos/platform/yggdrasil/src/main/scala/com/precog/yggdrasil/nihdb/NIHDBProjection.scala +++ b/repos/platform/yggdrasil/src/main/scala/com/precog/yggdrasil/nihdb/NIHDBProjection.scala @@ -43,8 +43,9 @@ final class NIHDBProjection(snapshot: NIHDBSnapshot, val length = readers.map(_.length.toLong).sum override def toString = - "NIHDBProjection(id = %d, len = %d, authorities = %s)" - .format(projectionId, length, authorities) + "NIHDBProjection(id = %d, len = %d, authorities = %s)".format(projectionId, + length, + authorities) def structure(implicit M: Monad[Future]) = M.point(readers.flatMap(_.structure)(collection.breakOut): Set[ColumnRef]) diff --git a/repos/platform/yggdrasil/src/main/scala/com/precog/yggdrasil/table/BlockStoreColumnarTableModule.scala b/repos/platform/yggdrasil/src/main/scala/com/precog/yggdrasil/table/BlockStoreColumnarTableModule.scala index 7d52020c876..d1c3211f19f 100644 --- a/repos/platform/yggdrasil/src/main/scala/com/precog/yggdrasil/table/BlockStoreColumnarTableModule.scala +++ b/repos/platform/yggdrasil/src/main/scala/com/precog/yggdrasil/table/BlockStoreColumnarTableModule.scala @@ -627,8 +627,9 @@ trait BlockStoreColumnarTableModule[M[+ _]] } case GT => - val rightIdx = comparator.swap - .nextLeftIndex(rightRow + 1, rhead.size - 1, 0) + val rightIdx = comparator.swap.nextLeftIndex(rightRow + 1, + rhead.size - 1, + 0) //println("found next right index " + rightIdx + " from " + (rhead.size - 1, rhead.size, 0, rhead.size - rightRow - 1)) if (rightIdx == rhead.size) { MoreRight(NoSpan, leftRow, leq, req) @@ -1399,8 +1400,9 @@ trait BlockStoreColumnarTableModule[M[+ _]] joinSpec) } } else { - super - .join(left1, right1, orderHint)(leftKeySpec, rightKeySpec, joinSpec) + super.join(left1, right1, orderHint)(leftKeySpec, + rightKeySpec, + joinSpec) } } diff --git a/repos/platform/yggdrasil/src/main/scala/com/precog/yggdrasil/vfs/ActorVFS.scala b/repos/platform/yggdrasil/src/main/scala/com/precog/yggdrasil/vfs/ActorVFS.scala index e156a821b6b..0f8d43ce7d7 100644 --- a/repos/platform/yggdrasil/src/main/scala/com/precog/yggdrasil/vfs/ActorVFS.scala +++ b/repos/platform/yggdrasil/src/main/scala/com/precog/yggdrasil/vfs/ActorVFS.scala @@ -956,8 +956,8 @@ trait ActorVFSModule extends VFSModule[Future, Slice] { val io: IO[ReadResult] = version match { case Version.Current => versionLog.current map { v => - openResource(v.id) - .fold(PathOpFailure(path, _), ReadSuccess(path, _)) + openResource(v.id).fold(PathOpFailure(path, _), + ReadSuccess(path, _)) } getOrElse { IO( PathOpFailure( diff --git a/repos/platform/yggdrasil/src/main/scala/com/precog/yggdrasil/vfs/SecureVFS.scala b/repos/platform/yggdrasil/src/main/scala/com/precog/yggdrasil/vfs/SecureVFS.scala index 8bc656fb8a2..361068667e8 100644 --- a/repos/platform/yggdrasil/src/main/scala/com/precog/yggdrasil/vfs/SecureVFS.scala +++ b/repos/platform/yggdrasil/src/main/scala/com/precog/yggdrasil/vfs/SecureVFS.scala @@ -102,8 +102,9 @@ trait SecureVFSModule[M[+ _], Block] extends VFSModule[M, Block] { } EitherT { - permissionsFinder.apiKeyFinder - .hasCapability(apiKey, permissions, Some(clock.now())) map { + permissionsFinder.apiKeyFinder.hasCapability(apiKey, + permissions, + Some(clock.now())) map { case true => \/.right(resource) case false => \/.left( diff --git a/repos/platform/yggdrasil/src/main/scala/com/precog/yggdrasil/vfs/VersionLog.scala b/repos/platform/yggdrasil/src/main/scala/com/precog/yggdrasil/vfs/VersionLog.scala index 6ae113aeadc..d569e1e50a1 100644 --- a/repos/platform/yggdrasil/src/main/scala/com/precog/yggdrasil/vfs/VersionLog.scala +++ b/repos/platform/yggdrasil/src/main/scala/com/precog/yggdrasil/vfs/VersionLog.scala @@ -181,8 +181,7 @@ class VersionLog(logFiles: VersionLog.LogFiles, IO(PrecogUnit) } getOrElse { logger.debug("Adding version entry: " + entry) - IOUtils - .writeToFile(entry.serialize.renderCompact + "\n", logFile, true) map { + IOUtils.writeToFile(entry.serialize.renderCompact + "\n", logFile, true) map { _ => allVersions = allVersions :+ entry PrecogUnit @@ -209,8 +208,7 @@ class VersionLog(logFiles: VersionLog.LogFiles, currentVersion.exists(_.id == newHead) unlessM { allVersions.find(_.id == newHead) traverse { entry => logger.debug("Setting HEAD to " + newHead) - IOUtils - .writeToFile(entry.serialize.renderCompact + "\n", headFile) map { + IOUtils.writeToFile(entry.serialize.renderCompact + "\n", headFile) map { _ => currentVersion = Some(entry); } diff --git a/repos/platform/yggdrasil/src/test/scala/com/precog/yggdrasil/util/IdSourceScannerModuleSpec.scala b/repos/platform/yggdrasil/src/test/scala/com/precog/yggdrasil/util/IdSourceScannerModuleSpec.scala index b0af389966a..9c42ceebedc 100644 --- a/repos/platform/yggdrasil/src/test/scala/com/precog/yggdrasil/util/IdSourceScannerModuleSpec.scala +++ b/repos/platform/yggdrasil/src/test/scala/com/precog/yggdrasil/util/IdSourceScannerModuleSpec.scala @@ -44,8 +44,10 @@ trait IdSourceScannerModuleSpec[M[+ _]] val (idCols, _) = (0 until n).foldLeft((List.empty[LongColumn], scanner.init)) { case ((idCols, acc0), i) => - val (acc, replCols) = scanner - .scan(acc0, cols, (i * sliceSize) until ((i + 1) * sliceSize)) + val (acc, replCols) = + scanner.scan(acc0, + cols, + (i * sliceSize) until ((i + 1) * sliceSize)) val (col: LongColumn) :: Nil = replCols.values.toList (col :: idCols, acc) } diff --git a/repos/playframework/documentation/manual/working/scalaGuide/advanced/routing/code/scalaguide/binder/models/AgeRange.scala b/repos/playframework/documentation/manual/working/scalaGuide/advanced/routing/code/scalaguide/binder/models/AgeRange.scala index d228517eb47..57f0c6ca247 100644 --- a/repos/playframework/documentation/manual/working/scalaGuide/advanced/routing/code/scalaguide/binder/models/AgeRange.scala +++ b/repos/playframework/documentation/manual/working/scalaGuide/advanced/routing/code/scalaguide/binder/models/AgeRange.scala @@ -31,8 +31,9 @@ object AgeRange { } } override def unbind(key: String, ageRange: AgeRange): String = { - intBinder.unbind("from", ageRange.from) + "&" + intBinder - .unbind("to", ageRange.to) + intBinder.unbind("from", ageRange.from) + "&" + intBinder.unbind( + "to", + ageRange.to) } } //#bind diff --git a/repos/playframework/documentation/manual/working/scalaGuide/main/akka/code/ScalaAkka.scala b/repos/playframework/documentation/manual/working/scalaGuide/main/akka/code/ScalaAkka.scala index 5bf1843093c..aa81850ebf2 100644 --- a/repos/playframework/documentation/manual/working/scalaGuide/main/akka/code/ScalaAkka.scala +++ b/repos/playframework/documentation/manual/working/scalaGuide/main/akka/code/ScalaAkka.scala @@ -91,8 +91,10 @@ package scalaguide.akka { //#schedule-actor import scala.concurrent.duration._ - val cancellable = system.scheduler - .schedule(0.microseconds, 300.microseconds, testActor, "tick") + val cancellable = system.scheduler.schedule(0.microseconds, + 300.microseconds, + testActor, + "tick") //#schedule-actor ok } diff --git a/repos/playframework/documentation/manual/working/scalaGuide/main/async/code/ScalaWebSockets.scala b/repos/playframework/documentation/manual/working/scalaGuide/main/async/code/ScalaWebSockets.scala index c0929ae817f..9c18aeba01d 100644 --- a/repos/playframework/documentation/manual/working/scalaGuide/main/async/code/ScalaWebSockets.scala +++ b/repos/playframework/documentation/manual/working/scalaGuide/main/async/code/ScalaWebSockets.scala @@ -29,15 +29,14 @@ object ScalaWebSockets extends PlaySpecification { val promise = Promise[List[Message]]() if (expectOut == 0) promise.success(Nil) val flowResult = - in via flow runWith Sink - .fold[(List[Message], Int), Message]((Nil, expectOut)) { - (state, out) => - val (result, remaining) = state - if (remaining == 1) { - promise.success(result :+ out) - } - (result :+ out, remaining - 1) + in via flow runWith Sink.fold[(List[Message], Int), Message]( + (Nil, expectOut)) { (state, out) => + val (result, remaining) = state + if (remaining == 1) { + promise.success(result :+ out) } + (result :+ out, remaining - 1) + } import play.api.libs.iteratee.Execution.Implicits.trampoline await( Future.firstCompletedOf(Seq(promise.future, flowResult.map(_._1)))) diff --git a/repos/playframework/documentation/manual/working/scalaGuide/main/dependencyinjection/code/CompileTimeDependencyInjection.scala b/repos/playframework/documentation/manual/working/scalaGuide/main/dependencyinjection/code/CompileTimeDependencyInjection.scala index f34fef715ff..a5e76ea33f7 100644 --- a/repos/playframework/documentation/manual/working/scalaGuide/main/dependencyinjection/code/CompileTimeDependencyInjection.scala +++ b/repos/playframework/documentation/manual/working/scalaGuide/main/dependencyinjection/code/CompileTimeDependencyInjection.scala @@ -18,11 +18,11 @@ object CompileTimeDependencyInjection extends Specification { "compile time dependency injection" should { "allow creating an application with the built in components from context" in { - val context = ApplicationLoader - .createContext(environment, - Map( - "play.application.loader" -> classOf[ - basic.MyApplicationLoader].getName)) + val context = + ApplicationLoader.createContext(environment, + Map( + "play.application.loader" -> classOf[ + basic.MyApplicationLoader].getName)) val application = ApplicationLoader(context).load(context) application must beAnInstanceOf[Application] application.routes.documentation must beEmpty diff --git a/repos/playframework/documentation/manual/working/scalaGuide/main/forms/code/ScalaForms.scala b/repos/playframework/documentation/manual/working/scalaGuide/main/forms/code/ScalaForms.scala index 4de7d9d1cc4..05cc32464ac 100644 --- a/repos/playframework/documentation/manual/working/scalaGuide/main/forms/code/ScalaForms.scala +++ b/repos/playframework/documentation/manual/working/scalaGuide/main/forms/code/ScalaForms.scala @@ -112,8 +112,9 @@ package scalaguide.forms.scalaforms { "display global errors user template" in { val userForm = controllers.Application.userFormConstraintsAdHoc - implicit val request = FakeRequest() - .withFormUrlEncodedBody("name" -> "Johnny Utah", "age" -> "25") + implicit val request = + FakeRequest().withFormUrlEncodedBody("name" -> "Johnny Utah", + "age" -> "25") val boundForm = userForm.bindFromRequest boundForm.hasGlobalErrors must beTrue diff --git a/repos/playframework/documentation/manual/working/scalaGuide/main/http/code/ScalaResults.scala b/repos/playframework/documentation/manual/working/scalaGuide/main/http/code/ScalaResults.scala index dbcbf67ba4b..0d46fa85433 100644 --- a/repos/playframework/documentation/manual/working/scalaGuide/main/http/code/ScalaResults.scala +++ b/repos/playframework/documentation/manual/working/scalaGuide/main/http/code/ScalaResults.scala @@ -43,8 +43,9 @@ package scalaguide.http.scalaresults { "Manipulating HTTP headers" in { //#set-headers - val result = Ok("Hello World!") - .withHeaders(CACHE_CONTROL -> "max-age=3600", ETAG -> "xx") + val result = + Ok("Hello World!").withHeaders(CACHE_CONTROL -> "max-age=3600", + ETAG -> "xx") //#set-headers testHeader(result, CACHE_CONTROL, "max-age=3600") testHeader(result, ETAG, "xx") diff --git a/repos/playframework/framework/project/Tasks.scala b/repos/playframework/framework/project/Tasks.scala index 33a79009366..62ed67d93b3 100644 --- a/repos/playframework/framework/project/Tasks.scala +++ b/repos/playframework/framework/project/Tasks.scala @@ -66,7 +66,8 @@ object Commands { publishArtifact in GlobalScope := true ), structure) - Project - .setProject(session, newStructure, state.put(quickPublishToggle, toggle)) + Project.setProject(session, + newStructure, + state.put(quickPublishToggle, toggle)) } } diff --git a/repos/playframework/framework/src/fork-run/src/main/scala/play/forkrun/ForkRun.scala b/repos/playframework/framework/src/fork-run/src/main/scala/play/forkrun/ForkRun.scala index e8d04c8de36..18222b7061a 100644 --- a/repos/playframework/framework/src/fork-run/src/main/scala/play/forkrun/ForkRun.scala +++ b/repos/playframework/framework/src/fork-run/src/main/scala/play/forkrun/ForkRun.scala @@ -35,8 +35,9 @@ object ForkRun { val log = Logger(logLevel) val system = ActorSystem("play-fork-run", akkaNoLogging) - val sbt = system - .actorOf(SbtClient.props(new File(baseDirectory), log, logEvents), "sbt") + val sbt = system.actorOf( + SbtClient.props(new File(baseDirectory), log, logEvents), + "sbt") val forkRun = system.actorOf(props(sbt, configKey, runArgs, log), "fork-run") @@ -147,8 +148,11 @@ object ForkRun { defaultHttpAddress: String, address: InetSocketAddress): String = { val devSettings: Seq[(String, String)] = Seq.empty - val (properties, httpPort, httpsPort, httpAddress) = Reloader - .filterArgs(args, defaultHttpPort, defaultHttpAddress, devSettings) + val (properties, httpPort, httpsPort, httpAddress) = Reloader.filterArgs( + args, + defaultHttpPort, + defaultHttpAddress, + devSettings) val host = if (httpAddress == "0.0.0.0") "localhost" else httpAddress if (httpPort.isDefined) s"http://$host:${httpPort.get}" else if (httpsPort.isDefined) s"https://$host:${httpsPort.get}" diff --git a/repos/playframework/framework/src/fork-run/src/main/scala/play/forkrun/SbtClient.scala b/repos/playframework/framework/src/fork-run/src/main/scala/play/forkrun/SbtClient.scala index 5ba1386acb9..d794c5ca89d 100644 --- a/repos/playframework/framework/src/fork-run/src/main/scala/play/forkrun/SbtClient.scala +++ b/repos/playframework/framework/src/fork-run/src/main/scala/play/forkrun/SbtClient.scala @@ -71,13 +71,16 @@ class SbtClient(baseDirectory: File, log: Logger, logEvents: Boolean) def active(client: ActorRef): Receive = { case Terminated(`client`) => shutdownWithClient(client) case Execute(input) => - client ! SbtClientProxy.RequestExecution - .ByCommandOrTask(input, interaction = None, sendTo = self) + client ! SbtClientProxy.RequestExecution.ByCommandOrTask(input, + interaction = + None, + sendTo = self) case request @ Request(key, sendTo) => val name = java.net.URLEncoder.encode(key, "utf-8") val task = - context.child(name) getOrElse context - .actorOf(SbtTask.props(key, client), name) + context.child(name) getOrElse context.actorOf( + SbtTask.props(key, client), + name) task ! request case Shutdown => shutdownWithClient(client) } @@ -168,8 +171,10 @@ class SbtTask(name: String, client: ActorRef) extends Actor { def active(key: ScopedKey, requests: Seq[Request] = Seq.empty): Receive = { case request: Request => if (requests.isEmpty) - client ! SbtClientProxy.RequestExecution - .ByScopedKey(key, interaction = None, sendTo = self) + client ! SbtClientProxy.RequestExecution.ByScopedKey( + key, + interaction = None, + sendTo = self) context become active(key, requests :+ request) case SbtClientProxy.ExecutionId(Success(tid), _) => // ignore case SbtClientProxy.ExecutionId(Failure(error), _) => diff --git a/repos/playframework/framework/src/iteratees/src/test/scala/play/api/libs/iteratee/ConcurrentSpec.scala b/repos/playframework/framework/src/iteratees/src/test/scala/play/api/libs/iteratee/ConcurrentSpec.scala index 00c1efd6b87..9e773066f6c 100644 --- a/repos/playframework/framework/src/iteratees/src/test/scala/play/api/libs/iteratee/ConcurrentSpec.scala +++ b/repos/playframework/framework/src/iteratees/src/test/scala/play/api/libs/iteratee/ConcurrentSpec.scala @@ -91,8 +91,8 @@ object ConcurrentSpec // fastEnumerator can complete even though the slowIteratee // won't consume anything until it has finished. val result = - fastEnumerator &> Concurrent - .buffer(20, (_: Input[Int]) => 1)(bufferEC) |>>> slowIteratee + fastEnumerator &> Concurrent.buffer(20, (_: Input[Int]) => 1)( + bufferEC) |>>> slowIteratee await(result) must_== ((1 to 10).to[List]) } @@ -307,8 +307,7 @@ object ConcurrentSpec val e = Concurrent.patchPanel[Int] { pp => pp.patchIn(Enumerator.eof) }(ppEC) - Await - .result(e |>>> Iteratee.getChunks[Int], Duration.Inf) must equalTo( + Await.result(e |>>> Iteratee.getChunks[Int], Duration.Inf) must equalTo( Nil) } } diff --git a/repos/playframework/framework/src/iteratees/src/test/scala/play/api/libs/iteratee/EnumerateesSpec.scala b/repos/playframework/framework/src/iteratees/src/test/scala/play/api/libs/iteratee/EnumerateesSpec.scala index 92c03da03b8..40379a5f787 100644 --- a/repos/playframework/framework/src/iteratees/src/test/scala/play/api/libs/iteratee/EnumerateesSpec.scala +++ b/repos/playframework/framework/src/iteratees/src/test/scala/play/api/libs/iteratee/EnumerateesSpec.scala @@ -133,8 +133,7 @@ object EnumerateesSpec Enumeratee.dropWhile[String](_ != "4")(dropWhileEC) &>> Iteratee .consume[String]() val enumerator = Enumerator(Range(1, 20).map(_.toString): _*) - Await - .result(enumerator |>>> drop3AndConsume, Duration.Inf) must equalTo( + Await.result(enumerator |>>> drop3AndConsume, Duration.Inf) must equalTo( Range(4, 20).map(_.toString).mkString) } } @@ -156,8 +155,7 @@ object EnumerateesSpec (Enumeratee.take[String](3) &>> Iteratee.consume()).flatMap(_ => Iteratee.consume())(flatMapEC) val enumerator = Enumerator(Range(1, 20).map(_.toString): _*) - Await - .result(enumerator |>>> take3AndConsume, Duration.Inf) must equalTo( + Await.result(enumerator |>>> take3AndConsume, Duration.Inf) must equalTo( Range(4, 20).map(_.toString).mkString) } } @@ -185,8 +183,7 @@ object EnumerateesSpec Enumeratee.takeWhile[String](_ != "4")(takeWhileEC) &>> Iteratee .consume() val enumerator = Enumerator(Range(1, 20).map(_.toString): _*) - Await - .result(enumerator |>>> take3AndConsume, Duration.Inf) must equalTo( + Await.result(enumerator |>>> take3AndConsume, Duration.Inf) must equalTo( List(1, 2, 3).map(_.toString).mkString) } } @@ -197,8 +194,7 @@ object EnumerateesSpec (Enumeratee.takeWhile[String](_ != "4")(takeWhileEC) &>> Iteratee .consume()).flatMap(_ => Iteratee.consume())(consumeFlatMapEC) val enumerator = Enumerator(Range(1, 20).map(_.toString): _*) - Await - .result(enumerator |>>> take3AndConsume, Duration.Inf) must equalTo( + Await.result(enumerator |>>> take3AndConsume, Duration.Inf) must equalTo( Range(4, 20).map(_.toString).mkString) } } @@ -261,8 +257,7 @@ object EnumerateesSpec (Traversable.take[String](3) &>> Iteratee.consume()).flatMap(_ => Iteratee.consume())(consumeFlatMapEC) val enumerator = Enumerator("he", "ybbb", "bbb") - Await - .result(enumerator |>>> take3AndConsume, Duration.Inf) must equalTo( + Await.result(enumerator |>>> take3AndConsume, Duration.Inf) must equalTo( "bbbbbb") } } @@ -276,8 +271,7 @@ object EnumerateesSpec Enumeratee.map[Int](i => List(i + 1))(mapEC) &>> Iteratee .consume[List[Int]]() val enumerator = Enumerator(1, 2, 3, 4) - Await - .result(enumerator |>>> add1AndConsume, Duration.Inf) must equalTo( + Await.result(enumerator |>>> add1AndConsume, Duration.Inf) must equalTo( Seq(2, 3, 4, 5)) } } @@ -313,8 +307,7 @@ object EnumerateesSpec } val sum = Iteratee.fold[Int, Int](0)(_ + _)(sumEC) val enumerator = Enumerator(1, 2, 3, 4, 5, 6, 7, 8, 9) - Await - .result(enumerator |>>> passAlongFuture &>> sum, Duration.Inf) must equalTo( + Await.result(enumerator |>>> passAlongFuture &>> sum, Duration.Inf) must equalTo( 45) } } diff --git a/repos/playframework/framework/src/play-akka-http-server/src/main/scala/play/core/server/akkahttp/AkkaHttpServer.scala b/repos/playframework/framework/src/play-akka-http-server/src/main/scala/play/core/server/akkahttp/AkkaHttpServer.scala index 905cc9d781d..fcb7f19f3d7 100644 --- a/repos/playframework/framework/src/play-akka-http-server/src/main/scala/play/core/server/akkahttp/AkkaHttpServer.scala +++ b/repos/playframework/framework/src/play-akka-http-server/src/main/scala/play/core/server/akkahttp/AkkaHttpServer.scala @@ -184,8 +184,9 @@ class AkkaHttpServer(config: ServerConfig, websocket(taggedRequestHeader).map { case Left(result) => - modelConversion - .convertResult(taggedRequestHeader, result, request.protocol) + modelConversion.convertResult(taggedRequestHeader, + result, + request.protocol) case Right(flow) => WebSocketHandler.handleWebSocket(upgrade, flow, 16384) } @@ -240,8 +241,9 @@ class AkkaHttpServer(config: ServerConfig, val responseFuture: Future[HttpResponse] = resultFuture.map { result => val cleanedResult: Result = ServerResultUtils.cleanFlashCookie(taggedRequestHeader, result) - modelConversion - .convertResult(taggedRequestHeader, cleanedResult, request.protocol) + modelConversion.convertResult(taggedRequestHeader, + cleanedResult, + request.protocol) } responseFuture } diff --git a/repos/playframework/framework/src/play-docs/src/main/scala/play/docs/DocServerStart.scala b/repos/playframework/framework/src/play-docs/src/main/scala/play/docs/DocServerStart.scala index a4f47417f73..c398ad71146 100644 --- a/repos/playframework/framework/src/play-docs/src/main/scala/play/docs/DocServerStart.scala +++ b/repos/playframework/framework/src/play-docs/src/main/scala/play/docs/DocServerStart.scala @@ -67,8 +67,9 @@ class DocServerStart { mode = Mode.Test, properties = System.getProperties ) - val serverProvider: ServerProvider = ServerProvider - .fromConfiguration(getClass.getClassLoader, config.configuration) + val serverProvider: ServerProvider = ServerProvider.fromConfiguration( + getClass.getClassLoader, + config.configuration) val context = ServerProvider.Context( config, applicationProvider, diff --git a/repos/playframework/framework/src/play-filters-helpers/src/main/scala/play/filters/csrf/csrf.scala b/repos/playframework/framework/src/play-filters-helpers/src/main/scala/play/filters/csrf/csrf.scala index c0528dcbb93..00f7b33df39 100644 --- a/repos/playframework/framework/src/play-filters-helpers/src/main/scala/play/filters/csrf/csrf.scala +++ b/repos/playframework/framework/src/play-filters-helpers/src/main/scala/play/filters/csrf/csrf.scala @@ -285,8 +285,8 @@ object CSRF { class JavaCSRFErrorHandlerAdapter @Inject()(underlying: CSRFErrorHandler) extends ErrorHandler { def handle(request: RequestHeader, msg: String) = - JavaHelpers - .invokeWithContext(request, req => underlying.handle(req, msg)) + JavaHelpers.invokeWithContext(request, + req => underlying.handle(req, msg)) } class JavaCSRFErrorHandlerDelegate @Inject()(delegate: ErrorHandler) diff --git a/repos/playframework/framework/src/play-filters-helpers/src/main/scala/play/filters/gzip/Gzip.scala b/repos/playframework/framework/src/play-filters-helpers/src/main/scala/play/filters/gzip/Gzip.scala index 6cd2a5fdaff..5c009f62985 100644 --- a/repos/playframework/framework/src/play-filters-helpers/src/main/scala/play/filters/gzip/Gzip.scala +++ b/repos/playframework/framework/src/play-filters-helpers/src/main/scala/play/filters/gzip/Gzip.scala @@ -77,8 +77,9 @@ object Gzip { state: State, k: K[Bytes, A]): Iteratee[Bytes, Iteratee[Bytes, A]] = { // Deflate some bytes - val numBytes = state.deflater - .deflate(state.buffer, state.pos, bufferSize - state.pos) + val numBytes = state.deflater.deflate(state.buffer, + state.pos, + bufferSize - state.pos) if (numBytes == 0) { if (state.deflater.needsInput()) { // Deflater needs more input, so continue @@ -105,8 +106,9 @@ object Gzip { def deflateUntilFinished[A]( state: State, k: K[Bytes, A]): Iteratee[Bytes, Iteratee[Bytes, A]] = { - val numBytes = state.deflater - .deflate(state.buffer, state.pos, bufferSize - state.pos) + val numBytes = state.deflater.deflate(state.buffer, + state.pos, + bufferSize - state.pos) if (numBytes == 0) { if (state.deflater.finished()) { // Deflater is finished, send the trailer @@ -254,8 +256,9 @@ object Gzip { k: K[Bytes, A], input: Bytes): Iteratee[Bytes, Iteratee[Bytes, A]] = { // Inflate some bytes - val numBytes = state.inflater - .inflate(state.buffer, state.pos, bufferSize - state.pos) + val numBytes = state.inflater.inflate(state.buffer, + state.pos, + bufferSize - state.pos) if (numBytes == 0) { if (state.inflater.finished()) { // Feed the current buffer diff --git a/repos/playframework/framework/src/play-filters-helpers/src/test/scala/play/filters/gzip/GzipFilterSpec.scala b/repos/playframework/framework/src/play-filters-helpers/src/test/scala/play/filters/gzip/GzipFilterSpec.scala index 6d34318c979..88332b54a79 100644 --- a/repos/playframework/framework/src/play-filters-helpers/src/test/scala/play/filters/gzip/GzipFilterSpec.scala +++ b/repos/playframework/framework/src/play-filters-helpers/src/test/scala/play/filters/gzip/GzipFilterSpec.scala @@ -89,8 +89,9 @@ object GzipFilterSpec extends PlaySpecification with DataTables { "not buffer more than the configured threshold" in withApplication( Ok.sendEntity( - HttpEntity - .Streamed(Source.single(ByteString(body)), Some(1000), None)), + HttpEntity.Streamed(Source.single(ByteString(body)), + Some(1000), + None)), chunkedThreshold = 512) { implicit mat => val result = makeGzipRequest checkGzippedBody(result, body) diff --git a/repos/playframework/framework/src/play-integration-test/src/test/scala/play/it/ServerIntegrationSpecification.scala b/repos/playframework/framework/src/play-integration-test/src/test/scala/play/it/ServerIntegrationSpecification.scala index ed8c2a327ff..7bd0006ab3d 100644 --- a/repos/playframework/framework/src/play-integration-test/src/test/scala/play/it/ServerIntegrationSpecification.scala +++ b/repos/playframework/framework/src/play-integration-test/src/test/scala/play/it/ServerIntegrationSpecification.scala @@ -51,8 +51,10 @@ trait ServerIntegrationSpecification def TestServer(port: Int, application: Application = play.api.PlayCoreTestApplication(), sslPort: Option[Int] = None): play.api.test.TestServer = { - play.api.test - .TestServer(port, application, sslPort, Some(integrationServerProvider)) + play.api.test.TestServer(port, + application, + sslPort, + Some(integrationServerProvider)) } /** diff --git a/repos/playframework/framework/src/play-integration-test/src/test/scala/play/it/http/BasicHttpClient.scala b/repos/playframework/framework/src/play-integration-test/src/test/scala/play/it/http/BasicHttpClient.scala index 9c75621e4f2..69797a8f4a9 100644 --- a/repos/playframework/framework/src/play-integration-test/src/test/scala/play/it/http/BasicHttpClient.scala +++ b/repos/playframework/framework/src/play-integration-test/src/test/scala/play/it/http/BasicHttpClient.scala @@ -32,8 +32,9 @@ object BasicHttpClient { var requestNo = 0 val responses = requests.flatMap { request => requestNo += 1 - client - .sendRequest(request, requestNo.toString, trickleFeed = trickleFeed) + client.sendRequest(request, + requestNo.toString, + trickleFeed = trickleFeed) } if (checkClosed) { @@ -62,8 +63,9 @@ object BasicHttpClient { var requestNo = 0 requests.foreach { request => requestNo += 1 - client - .sendRequest(request, requestNo.toString, waitForResponses = false) + client.sendRequest(request, + requestNo.toString, + waitForResponses = false) } for (i <- 0 until requests.length) yield { client.readResponse(requestNo.toString) diff --git a/repos/playframework/framework/src/play-integration-test/src/test/scala/play/it/http/websocket/WebSocketClient.scala b/repos/playframework/framework/src/play-integration-test/src/test/scala/play/it/http/websocket/WebSocketClient.scala index b9b85b9caf8..ec1ad572f86 100644 --- a/repos/playframework/framework/src/play-integration-test/src/test/scala/play/it/http/websocket/WebSocketClient.scala +++ b/repos/playframework/framework/src/play-integration-test/src/test/scala/play/it/http/websocket/WebSocketClient.scala @@ -175,8 +175,10 @@ object WebSocketClient { ctx.name, "websocket-subscriber", subscriber) - ctx.pipeline - .addAfter(ctx.executor, ctx.name, "websocket-publisher", publisher) + ctx.pipeline.addAfter(ctx.executor, + ctx.name, + "websocket-publisher", + publisher) // Now remove ourselves from the chain ctx.pipeline.remove(ctx.name) diff --git a/repos/playframework/framework/src/play-java-ws/src/test/scala/play/libs/oauth/OAuthSpec.scala b/repos/playframework/framework/src/play-java-ws/src/test/scala/play/libs/oauth/OAuthSpec.scala index b153d6ec482..5cff9200cac 100644 --- a/repos/playframework/framework/src/play-java-ws/src/test/scala/play/libs/oauth/OAuthSpec.scala +++ b/repos/playframework/framework/src/play-java-ws/src/test/scala/play/libs/oauth/OAuthSpec.scala @@ -34,8 +34,11 @@ class OAuthSpec extends PlaySpecification { val (request, body, hostUrl) = receiveRequest { (client, hostUrl) => client.url(hostUrl + "/foo").sign(oauthCalculator).get() } - OAuthRequestVerifier - .verifyRequest(request, body, hostUrl, consumerKey, requestToken) + OAuthRequestVerifier.verifyRequest(request, + body, + hostUrl, + consumerKey, + requestToken) } "sign a get request with query parameters" in { @@ -46,8 +49,11 @@ class OAuthSpec extends PlaySpecification { .sign(oauthCalculator) .get() } - OAuthRequestVerifier - .verifyRequest(request, body, hostUrl, consumerKey, requestToken) + OAuthRequestVerifier.verifyRequest(request, + body, + hostUrl, + consumerKey, + requestToken) } "sign a post request with a body" in { @@ -58,8 +64,11 @@ class OAuthSpec extends PlaySpecification { .setContentType("application/x-www-form-urlencoded") .post("param=paramValue") } - OAuthRequestVerifier - .verifyRequest(request, body, hostUrl, consumerKey, requestToken) + OAuthRequestVerifier.verifyRequest(request, + body, + hostUrl, + consumerKey, + requestToken) } } diff --git a/repos/playframework/framework/src/play-java/src/main/scala/play/core/TemplateMagicForJava.scala b/repos/playframework/framework/src/play-java/src/main/scala/play/core/TemplateMagicForJava.scala index 91a1199fd80..0dbc1536417 100644 --- a/repos/playframework/framework/src/play-java/src/main/scala/play/core/TemplateMagicForJava.scala +++ b/repos/playframework/framework/src/play-java/src/main/scala/play/core/TemplateMagicForJava.scala @@ -38,8 +38,9 @@ object PlayMagicForJava { }, Option(jField.format).map(f => f._1 -> f._2.asScala), jField.errors.asScala.map { jE => - play.api.data - .FormError(jE.key, jE.messages.asScala, jE.arguments.asScala) + play.api.data.FormError(jE.key, + jE.messages.asScala, + jE.arguments.asScala) }, Option(jField.value)) { diff --git a/repos/playframework/framework/src/play-jdbc-evolutions/src/main/scala/play/api/db/evolutions/Evolutions.scala b/repos/playframework/framework/src/play-jdbc-evolutions/src/main/scala/play/api/db/evolutions/Evolutions.scala index 68040f0b241..f0f60ed279b 100644 --- a/repos/playframework/framework/src/play-jdbc-evolutions/src/main/scala/play/api/db/evolutions/Evolutions.scala +++ b/repos/playframework/framework/src/play-jdbc-evolutions/src/main/scala/play/api/db/evolutions/Evolutions.scala @@ -302,8 +302,9 @@ object OfflineEvolutions { autocommit: Boolean = true, schema: String = ""): Unit = { val evolutions = getEvolutions(appPath, classloader, dbApi) - val scripts = evolutions.evolutionsApi - .scripts(dbName, evolutions.evolutionsReader, schema) + val scripts = evolutions.evolutionsApi.scripts(dbName, + evolutions.evolutionsReader, + schema) if (!isTest) { logger.warn( "Applying evolution scripts for database '" + dbName + diff --git a/repos/playframework/framework/src/play-jdbc-evolutions/src/test/scala/play/api/db/evolutions/EvolutionsSpec.scala b/repos/playframework/framework/src/play-jdbc-evolutions/src/test/scala/play/api/db/evolutions/EvolutionsSpec.scala index 0076ab9f162..36c3a29e3bb 100644 --- a/repos/playframework/framework/src/play-jdbc-evolutions/src/test/scala/play/api/db/evolutions/EvolutionsSpec.scala +++ b/repos/playframework/framework/src/play-jdbc-evolutions/src/test/scala/play/api/db/evolutions/EvolutionsSpec.scala @@ -66,12 +66,12 @@ object EvolutionsSpec extends Specification { val broken = evolutions.scripts(Seq(c1, a2, a3)) val fixed = evolutions.scripts(Seq(a1, a2, a3)) - evolutions - .evolve(broken, autocommit = true) must throwAn[InconsistentDatabase] + evolutions.evolve(broken, autocommit = true) must throwAn[ + InconsistentDatabase] // inconsistent until resolved - evolutions - .evolve(fixed, autocommit = true) must throwAn[InconsistentDatabase] + evolutions.evolve(fixed, autocommit = true) must throwAn[ + InconsistentDatabase] evolutions.resolve(1) diff --git a/repos/playframework/framework/src/play-jdbc/src/main/scala/play/api/db/DatabaseConfig.scala b/repos/playframework/framework/src/play-jdbc/src/main/scala/play/api/db/DatabaseConfig.scala index 029ccd0c670..5c7081c0687 100644 --- a/repos/playframework/framework/src/play-jdbc/src/main/scala/play/api/db/DatabaseConfig.scala +++ b/repos/playframework/framework/src/play-jdbc/src/main/scala/play/api/db/DatabaseConfig.scala @@ -25,8 +25,9 @@ object DatabaseConfig { def fromConfig(config: PlayConfig, environment: Environment) = { val driver = config.get[Option[String]]("driver") - val (url, userPass) = ConnectionPool - .extractUrl(config.get[Option[String]]("url"), environment.mode) + val (url, userPass) = ConnectionPool.extractUrl( + config.get[Option[String]]("url"), + environment.mode) val username = config .getDeprecated[Option[String]]("username", "user") .orElse(userPass.map(_._1)) diff --git a/repos/playframework/framework/src/play-json/src/test/scala/play/api/libs/json/ReadsSpec.scala b/repos/playframework/framework/src/play-json/src/test/scala/play/api/libs/json/ReadsSpec.scala index 6b02fce7da0..15358774df7 100644 --- a/repos/playframework/framework/src/play-json/src/test/scala/play/api/libs/json/ReadsSpec.scala +++ b/repos/playframework/framework/src/play-json/src/test/scala/play/api/libs/json/ReadsSpec.scala @@ -309,8 +309,9 @@ object ReadsSpec extends org.specs2.mutable.Specification { lazy val correctedReads = Reads.localDateReads(DateTimeFormatter.ISO_DATE, _.drop(1)) - val CustomReads2 = Reads - .localDateReads(DateTimeFormatter.ofPattern("dd/MM/yyyy"), _.drop(2)) + val CustomReads2 = + Reads.localDateReads(DateTimeFormatter.ofPattern("dd/MM/yyyy"), + _.drop(2)) "be successfully read from number" in { val beforeMidnight = Instant.parse("1970-01-01T23:55:00Z") diff --git a/repos/playframework/framework/src/play-netty-server/src/main/scala/play/core/server/netty/NettyModelConversion.scala b/repos/playframework/framework/src/play-netty-server/src/main/scala/play/core/server/netty/NettyModelConversion.scala index d8fb82b709f..bb960751e56 100644 --- a/repos/playframework/framework/src/play-netty-server/src/main/scala/play/core/server/netty/NettyModelConversion.scala +++ b/repos/playframework/framework/src/play-netty-server/src/main/scala/play/core/server/netty/NettyModelConversion.scala @@ -293,8 +293,9 @@ private[server] class NettyModelConversion( stream: Source[ByteString, _], httpVersion: HttpVersion, responseStatus: HttpResponseStatus)(implicit mat: Materializer) = { - val publisher = SynchronousMappedStreams - .map(stream.runWith(Sink.asPublisher(false)), byteStringToHttpContent) + val publisher = SynchronousMappedStreams.map( + stream.runWith(Sink.asPublisher(false)), + byteStringToHttpContent) new DefaultStreamedHttpResponse(httpVersion, responseStatus, publisher) } diff --git a/repos/playframework/framework/src/play-netty-server/src/main/scala/play/core/server/netty/PlayRequestHandler.scala b/repos/playframework/framework/src/play-netty-server/src/main/scala/play/core/server/netty/PlayRequestHandler.scala index ca9c628e6c4..ff16bb52194 100644 --- a/repos/playframework/framework/src/play-netty-server/src/main/scala/play/core/server/netty/PlayRequestHandler.scala +++ b/repos/playframework/framework/src/play-netty-server/src/main/scala/play/core/server/netty/PlayRequestHandler.scala @@ -301,8 +301,9 @@ private[play] class PlayRequestHandler(val server: NettyServer) ServerResultUtils.cleanFlashCookie(requestHeader, result) val validated = ServerResultUtils.validateResult(requestHeader, cleanedResult) - modelConversion - .convertResult(validated, requestHeader, request.getProtocolVersion) + modelConversion.convertResult(validated, + requestHeader, + request.getProtocolVersion) } } diff --git a/repos/playframework/framework/src/play-server/src/main/scala/play/core/server/DevServerStart.scala b/repos/playframework/framework/src/play-server/src/main/scala/play/core/server/DevServerStart.scala index 54685d6f295..5c797e3a731 100644 --- a/repos/playframework/framework/src/play-server/src/main/scala/play/core/server/DevServerStart.scala +++ b/repos/playframework/framework/src/play-server/src/main/scala/play/core/server/DevServerStart.scala @@ -260,8 +260,9 @@ object DevServerStart { Await.result(actorSystem.whenTerminated, Duration.Inf) Future.successful(()) }) - val serverProvider = ServerProvider - .fromConfiguration(classLoader, serverConfig.configuration) + val serverProvider = + ServerProvider.fromConfiguration(classLoader, + serverConfig.configuration) serverProvider.createServer(serverContext) } catch { case e: ExceptionInInitializerError => throw e.getCause diff --git a/repos/playframework/framework/src/play-server/src/main/scala/play/core/server/ProdServerStart.scala b/repos/playframework/framework/src/play-server/src/main/scala/play/core/server/ProdServerStart.scala index edc9a383a18..3d87ebe0944 100644 --- a/repos/playframework/framework/src/play-server/src/main/scala/play/core/server/ProdServerStart.scala +++ b/repos/playframework/framework/src/play-server/src/main/scala/play/core/server/ProdServerStart.scala @@ -50,8 +50,9 @@ object ProdServerStart { Play.start(application) // Start the server - val serverProvider: ServerProvider = ServerProvider - .fromConfiguration(process.classLoader, config.configuration) + val serverProvider: ServerProvider = ServerProvider.fromConfiguration( + process.classLoader, + config.configuration) val server = serverProvider.createServer(config, application) process.addShutdownHook { server.stop() @@ -76,8 +77,10 @@ object ProdServerStart { val rootDirArg: Option[File] = process.args.headOption.map(new File(_)) val rootDirConfig = rootDirArg.fold(Map.empty[String, String])(dir => ServerConfig.rootDirConfig(dir)) - Configuration - .load(process.classLoader, process.properties, rootDirConfig, true) + Configuration.load(process.classLoader, + process.properties, + rootDirConfig, + true) } val rootDir: File = { diff --git a/repos/playframework/framework/src/play-server/src/main/scala/play/core/server/ssl/CertificateGenerator.scala b/repos/playframework/framework/src/play-server/src/main/scala/play/core/server/ssl/CertificateGenerator.scala index 27588013b37..e931f2a44dc 100644 --- a/repos/playframework/framework/src/play-server/src/main/scala/play/core/server/ssl/CertificateGenerator.scala +++ b/repos/playframework/framework/src/play-server/src/main/scala/play/core/server/ssl/CertificateGenerator.scala @@ -122,8 +122,8 @@ object CertificateGenerator { info.set(X509CertInfo.ISSUER, if (justName) owner else new CertificateIssuerName(owner)) info.set(X509CertInfo.KEY, new CertificateX509Key(pair.getPublic)) - info - .set(X509CertInfo.VERSION, new CertificateVersion(CertificateVersion.V3)) + info.set(X509CertInfo.VERSION, + new CertificateVersion(CertificateVersion.V3)) var algo: AlgorithmId = new AlgorithmId(oid) diff --git a/repos/playframework/framework/src/play-server/src/main/scala/play/core/server/ssl/FakeKeyStore.scala b/repos/playframework/framework/src/play-server/src/main/scala/play/core/server/ssl/FakeKeyStore.scala index ab1d06022ce..b5a995166c3 100644 --- a/repos/playframework/framework/src/play-server/src/main/scala/play/core/server/ssl/FakeKeyStore.scala +++ b/repos/playframework/framework/src/play-server/src/main/scala/play/core/server/ssl/FakeKeyStore.scala @@ -104,8 +104,8 @@ object FakeKeyStore { certInfo.set( X509CertInfo.SERIAL_NUMBER, new CertificateSerialNumber(new BigInteger(64, new SecureRandom()))) - certInfo - .set(X509CertInfo.VERSION, new CertificateVersion(CertificateVersion.V3)) + certInfo.set(X509CertInfo.VERSION, + new CertificateVersion(CertificateVersion.V3)) // Validity val validFrom = new Date() @@ -127,8 +127,8 @@ object FakeKeyStore { // Key and algorithm certInfo.set(X509CertInfo.KEY, new CertificateX509Key(keyPair.getPublic)) val algorithm = new AlgorithmId(SignatureAlgorithmOID) - certInfo - .set(X509CertInfo.ALGORITHM_ID, new CertificateAlgorithmId(algorithm)) + certInfo.set(X509CertInfo.ALGORITHM_ID, + new CertificateAlgorithmId(algorithm)) // Create a new certificate and sign it val cert = new X509CertImpl(certInfo) diff --git a/repos/playframework/framework/src/play-server/src/main/scala/play/core/server/ssl/ServerSSLEngine.scala b/repos/playframework/framework/src/play-server/src/main/scala/play/core/server/ssl/ServerSSLEngine.scala index d3384a2f181..b5299e8d488 100644 --- a/repos/playframework/framework/src/play-server/src/main/scala/play/core/server/ssl/ServerSSLEngine.scala +++ b/repos/playframework/framework/src/play-server/src/main/scala/play/core/server/ssl/ServerSSLEngine.scala @@ -129,8 +129,8 @@ object ServerSSLEngine { } if (serverConfigProviderArgsConstructor != null) { - serverConfigProviderArgsConstructor - .newInstance(serverConfig, applicationProvider) + serverConfigProviderArgsConstructor.newInstance(serverConfig, + applicationProvider) } else if (providerArgsConstructor != null) { providerArgsConstructor.newInstance(applicationProvider) } else if (noArgsConstructor != null) { diff --git a/repos/playframework/framework/src/play-streams/src/test/scala/play/api/libs/streams/impl/IterateeSubscriberSpec.scala b/repos/playframework/framework/src/play-streams/src/test/scala/play/api/libs/streams/impl/IterateeSubscriberSpec.scala index 316c0430279..495fc3d8f9b 100644 --- a/repos/playframework/framework/src/play-streams/src/test/scala/play/api/libs/streams/impl/IterateeSubscriberSpec.scala +++ b/repos/playframework/framework/src/play-streams/src/test/scala/play/api/libs/streams/impl/IterateeSubscriberSpec.scala @@ -60,8 +60,7 @@ class IterateeSubscriberSpec extends Specification { val iter = Iteratee.getChunks[Int] val subr = new IterateeSubscriber(iter) pubr.subscribe(subr) - Await - .result(subr.result.unflatten, ScalaFiniteDuration(2, SECONDS)) must_== + Await.result(subr.result.unflatten, ScalaFiniteDuration(2, SECONDS)) must_== Done(List(1, 2, 3), Input.EOF) } diff --git a/repos/playframework/framework/src/play-ws/src/main/scala/play/api/libs/ws/ssl/CompositeX509KeyManager.scala b/repos/playframework/framework/src/play-ws/src/main/scala/play/api/libs/ws/ssl/CompositeX509KeyManager.scala index 3287d084216..2fe9fb59af0 100644 --- a/repos/playframework/framework/src/play-ws/src/main/scala/play/api/libs/ws/ssl/CompositeX509KeyManager.scala +++ b/repos/playframework/framework/src/play-ws/src/main/scala/play/api/libs/ws/ssl/CompositeX509KeyManager.scala @@ -73,8 +73,9 @@ class CompositeX509KeyManager(keyManagers: Seq[X509KeyManager]) withKeyManagers { keyManager: X509KeyManager => keyManager match { case extendedKeyManager: X509ExtendedKeyManager => - val clientAlias = extendedKeyManager - .chooseEngineClientAlias(keyType, issuers, engine) + val clientAlias = extendedKeyManager.chooseEngineClientAlias(keyType, + issuers, + engine) if (clientAlias != null) { logger.debug( s"chooseEngineClientAlias: using clientAlias $clientAlias with keyManager $extendedKeyManager") @@ -96,8 +97,9 @@ class CompositeX509KeyManager(keyManagers: Seq[X509KeyManager]) withKeyManagers { keyManager: X509KeyManager => keyManager match { case extendedKeyManager: X509ExtendedKeyManager => - val clientAlias = extendedKeyManager - .chooseEngineServerAlias(keyType, issuers, engine) + val clientAlias = extendedKeyManager.chooseEngineServerAlias(keyType, + issuers, + engine) if (clientAlias != null) { logger.debug( s"chooseEngineServerAlias: using clientAlias $clientAlias with keyManager $extendedKeyManager") diff --git a/repos/playframework/framework/src/play-ws/src/main/scala/play/api/libs/ws/ssl/SystemConfiguration.scala b/repos/playframework/framework/src/play-ws/src/main/scala/play/api/libs/ws/ssl/SystemConfiguration.scala index a04fc3bfd26..65eee4a71b6 100644 --- a/repos/playframework/framework/src/play-ws/src/main/scala/play/api/libs/ws/ssl/SystemConfiguration.scala +++ b/repos/playframework/framework/src/play-ws/src/main/scala/play/api/libs/ws/ssl/SystemConfiguration.scala @@ -52,12 +52,12 @@ class SystemConfiguration { Security.setProperty("ocsp.enable", checkRevocation.toString) logger.debug("configureCheckRevocation: ocsp.enable = {}", checkRevocation.toString) - System - .setProperty("com.sun.security.enableCRLDP", checkRevocation.toString) + System.setProperty("com.sun.security.enableCRLDP", + checkRevocation.toString) logger.debug("configureCheckRevocation: com.sun.security.enableCRLDP = {}", checkRevocation.toString) - System - .setProperty("com.sun.net.ssl.checkRevocation", checkRevocation.toString) + System.setProperty("com.sun.net.ssl.checkRevocation", + checkRevocation.toString) } /** diff --git a/repos/playframework/framework/src/play-ws/src/test/scala/play/api/libs/oauth/OAuthSpec.scala b/repos/playframework/framework/src/play-ws/src/test/scala/play/api/libs/oauth/OAuthSpec.scala index adc76bd48f4..5f1cf2b4ded 100644 --- a/repos/playframework/framework/src/play-ws/src/test/scala/play/api/libs/oauth/OAuthSpec.scala +++ b/repos/playframework/framework/src/play-ws/src/test/scala/play/api/libs/oauth/OAuthSpec.scala @@ -29,8 +29,11 @@ class OAuthSpec extends PlaySpecification { implicit app => hostUrl => WS.url(hostUrl + "/foo").sign(oauthCalculator).get() } - OAuthRequestVerifier - .verifyRequest(request, body, hostUrl, consumerKey, requestToken) + OAuthRequestVerifier.verifyRequest(request, + body, + hostUrl, + consumerKey, + requestToken) } "sign a get request with query parameters" in { @@ -41,8 +44,11 @@ class OAuthSpec extends PlaySpecification { .sign(oauthCalculator) .get() } - OAuthRequestVerifier - .verifyRequest(request, body, hostUrl, consumerKey, requestToken) + OAuthRequestVerifier.verifyRequest(request, + body, + hostUrl, + consumerKey, + requestToken) } "sign a post request with a body" in { @@ -52,8 +58,11 @@ class OAuthSpec extends PlaySpecification { .sign(oauthCalculator) .post(Map("param" -> Seq("paramValue"))) } - OAuthRequestVerifier - .verifyRequest(request, body, hostUrl, consumerKey, requestToken) + OAuthRequestVerifier.verifyRequest(request, + body, + hostUrl, + consumerKey, + requestToken) } } diff --git a/repos/playframework/framework/src/play-ws/src/test/scala/play/api/libs/openid/OpenIDSpec.scala b/repos/playframework/framework/src/play-ws/src/test/scala/play/api/libs/openid/OpenIDSpec.scala index cc883e6268f..a8a427ed5c5 100644 --- a/repos/playframework/framework/src/play-ws/src/test/scala/play/api/libs/openid/OpenIDSpec.scala +++ b/repos/playframework/framework/src/play-ws/src/test/scala/play/api/libs/openid/OpenIDSpec.scala @@ -106,15 +106,15 @@ object OpenIDSpec extends Specification with Mockito { "generate a valid redirectUrl with a proper 'if_available' AND required extended attributes request" in { val ws = createMockWithValidOpDiscoveryAndVerification val openId = new WsOpenIdClient(ws, new WsDiscovery(ws)) - val redirectUrl = Await - .result(openId.redirectURL( - "http://example.com", - "http://foo.bar.com/returnto", - axRequired = - Seq("first" -> "http://axschema.org/namePerson/first"), - axOptional = - Seq("email" -> "http://schema.openid.net/contact/email")), - dur) + val redirectUrl = + Await.result(openId.redirectURL( + "http://example.com", + "http://foo.bar.com/returnto", + axRequired = Seq( + "first" -> "http://axschema.org/namePerson/first"), + axOptional = Seq( + "email" -> "http://schema.openid.net/contact/email")), + dur) val query = parseQueryString(redirectUrl) @@ -233,8 +233,7 @@ object OpenIDSpec extends Specification with Mockito { val errorResponse = (openIdResponse - "openid.mode") + ("openid.mode" -> Seq("error")) - Await - .result(openId.verifiedId(setupMockRequest(errorResponse)), dur) must throwA[ + Await.result(openId.verifiedId(setupMockRequest(errorResponse)), dur) must throwA[ BAD_RESPONSE.type] } diff --git a/repos/playframework/framework/src/play-ws/src/test/scala/play/api/libs/ws/ssl/CompositeX509KeyManagerSpec.scala b/repos/playframework/framework/src/play-ws/src/test/scala/play/api/libs/ws/ssl/CompositeX509KeyManagerSpec.scala index 84f55772e78..5e623512d79 100644 --- a/repos/playframework/framework/src/play-ws/src/test/scala/play/api/libs/ws/ssl/CompositeX509KeyManagerSpec.scala +++ b/repos/playframework/framework/src/play-ws/src/test/scala/play/api/libs/ws/ssl/CompositeX509KeyManagerSpec.scala @@ -148,8 +148,7 @@ object CompositeX509KeyManagerSpec extends Specification with Mockito { val issuers = Array[Principal]() val socket = mock[Socket] - mockKeyManager - .chooseClientAlias(keyType, issuers, socket) returns "clientAlias" + mockKeyManager.chooseClientAlias(keyType, issuers, socket) returns "clientAlias" val serverAlias = keyManager.chooseClientAlias(keyType = keyType, issuers = issuers, @@ -240,8 +239,7 @@ object CompositeX509KeyManagerSpec extends Specification with Mockito { val issuers = Array[Principal]() val socket = mock[Socket] - mockKeyManager - .chooseServerAlias(keyType, issuers, socket) returns "serverAlias" + mockKeyManager.chooseServerAlias(keyType, issuers, socket) returns "serverAlias" val serverAlias = keyManager.chooseServerAlias(keyType = keyType, issuers = issuers, diff --git a/repos/playframework/framework/src/play-ws/src/test/scala/play/api/libs/ws/ssl/CompositeX509TrustManagerSpec.scala b/repos/playframework/framework/src/play-ws/src/test/scala/play/api/libs/ws/ssl/CompositeX509TrustManagerSpec.scala index a25345fad2d..dc32575ba9a 100644 --- a/repos/playframework/framework/src/play-ws/src/test/scala/play/api/libs/ws/ssl/CompositeX509TrustManagerSpec.scala +++ b/repos/playframework/framework/src/play-ws/src/test/scala/play/api/libs/ws/ssl/CompositeX509TrustManagerSpec.scala @@ -32,11 +32,9 @@ object CompositeX509TrustManagerSpec extends Specification with Mockito { val chain = Array[X509Certificate](certificate) val authType = "" - mockTrustManager1 - .checkClientTrusted(chain, authType) throws new CertificateException( + mockTrustManager1.checkClientTrusted(chain, authType) throws new CertificateException( "fake1") - mockTrustManager2 - .checkClientTrusted(chain, authType) throws new CertificateException( + mockTrustManager2.checkClientTrusted(chain, authType) throws new CertificateException( "fake2") trustManager @@ -81,8 +79,7 @@ object CompositeX509TrustManagerSpec extends Specification with Mockito { val chain = Array[X509Certificate](certificate) val authType = "" - mockTrustManager1 - .checkClientTrusted(chain, authType) throws new CertificateException( + mockTrustManager1.checkClientTrusted(chain, authType) throws new CertificateException( "fake1") mockTrustManager2.checkClientTrusted(chain, authType) @@ -152,11 +149,9 @@ object CompositeX509TrustManagerSpec extends Specification with Mockito { val chain = Array[X509Certificate](certificate) val authType = "" - mockTrustManager1 - .checkServerTrusted(chain, authType) throws new CertificateException( + mockTrustManager1.checkServerTrusted(chain, authType) throws new CertificateException( "fake1") - mockTrustManager2 - .checkServerTrusted(chain, authType) throws new CertificateException( + mockTrustManager2.checkServerTrusted(chain, authType) throws new CertificateException( "fake2") trustManager diff --git a/repos/playframework/framework/src/play/src/main/scala/play/api/Configuration.scala b/repos/playframework/framework/src/play/src/main/scala/play/api/Configuration.scala index 33ffdabfa49..5ef5bfbe970 100644 --- a/repos/playframework/framework/src/play/src/main/scala/play/api/Configuration.scala +++ b/repos/playframework/framework/src/play/src/main/scala/play/api/Configuration.scala @@ -82,8 +82,9 @@ object Configuration { // Resolve another .conf file so that we can override values in Akka's // reference.conf, but still make it possible for users to override // Play's values in their application.conf. - val playOverridesConfig: Config = ConfigFactory - .parseResources(classLoader, "play/reference-overrides.conf") + val playOverridesConfig: Config = ConfigFactory.parseResources( + classLoader, + "play/reference-overrides.conf") // Resolve reference.conf ourselves because ConfigFactory.defaultReference resolves // values, and we won't have a value for `play.server.dir` until all our config is combined. diff --git a/repos/playframework/framework/src/play/src/main/scala/play/api/GlobalSettings.scala b/repos/playframework/framework/src/play/src/main/scala/play/api/GlobalSettings.scala index 8b3b68ae051..394d467216c 100644 --- a/repos/playframework/framework/src/play/src/main/scala/play/api/GlobalSettings.scala +++ b/repos/playframework/framework/src/play/src/main/scala/play/api/GlobalSettings.scala @@ -189,8 +189,9 @@ trait GlobalSettings { * @return the result to send to the client */ def onBadRequest(request: RequestHeader, error: String): Future[Result] = - defaultErrorHandler - .onClientError(request, play.api.http.Status.BAD_REQUEST, error) + defaultErrorHandler.onClientError(request, + play.api.http.Status.BAD_REQUEST, + error) } /** diff --git a/repos/playframework/framework/src/play/src/main/scala/play/api/Play.scala b/repos/playframework/framework/src/play/src/main/scala/play/api/Play.scala index ed79eae46c3..e89372a371b 100644 --- a/repos/playframework/framework/src/play/src/main/scala/play/api/Play.scala +++ b/repos/playframework/framework/src/play/src/main/scala/play/api/Play.scala @@ -55,8 +55,8 @@ object Play { xercesSaxParserFactory.setFeature(Constants.XERCES_FEATURE_PREFIX + Constants.DISALLOW_DOCTYPE_DECL_FEATURE, true) - xercesSaxParserFactory - .setFeature(XMLConstants.FEATURE_SECURE_PROCESSING, true) + xercesSaxParserFactory.setFeature(XMLConstants.FEATURE_SECURE_PROCESSING, + true) /* * A parser to be used that is configured to ensure that no schemas are loaded. diff --git a/repos/playframework/framework/src/play/src/main/scala/play/api/controllers/Assets.scala b/repos/playframework/framework/src/play/src/main/scala/play/api/controllers/Assets.scala index 6e8e4639350..ba85abbaf73 100644 --- a/repos/playframework/framework/src/play/src/main/scala/play/api/controllers/Assets.scala +++ b/repos/playframework/framework/src/play/src/main/scala/play/api/controllers/Assets.scala @@ -151,8 +151,8 @@ private[controllers] object AssetInfo { } } catch { case e: IllegalArgumentException => - Logger - .debug(s"An invalid date was received: couldn't parse: $date", e) + Logger.debug(s"An invalid date was received: couldn't parse: $date", + e) None } } else { diff --git a/repos/playframework/framework/src/play/src/main/scala/play/api/http/HttpErrorHandler.scala b/repos/playframework/framework/src/play/src/main/scala/play/api/http/HttpErrorHandler.scala index 8e43f40e51e..a1a09dfa16e 100644 --- a/repos/playframework/framework/src/play/src/main/scala/play/api/http/HttpErrorHandler.scala +++ b/repos/playframework/framework/src/play/src/main/scala/play/api/http/HttpErrorHandler.scala @@ -201,8 +201,9 @@ class DefaultHttpErrorHandler(environment: Environment, case Mode.Prod => views.html.defaultpages.notFound(request.method, request.uri) case _ => - views.html.defaultpages - .devNotFound(request.method, request.uri, router) + views.html.defaultpages.devNotFound(request.method, + request.uri, + router) })) } @@ -267,8 +268,9 @@ class DefaultHttpErrorHandler(environment: Environment, Logger.error(""" | |! @%s - Internal server error, for (%s) [%s] -> - | """.stripMargin - .format(usefulException.id, request.method, request.uri), + | """.stripMargin.format(usefulException.id, + request.method, + request.uri), usefulException) } diff --git a/repos/playframework/framework/src/play/src/main/scala/play/api/libs/Files.scala b/repos/playframework/framework/src/play/src/main/scala/play/api/libs/Files.scala index d0404618915..fff85287136 100644 --- a/repos/playframework/framework/src/play/src/main/scala/play/api/libs/Files.scala +++ b/repos/playframework/framework/src/play/src/main/scala/play/api/libs/Files.scala @@ -97,8 +97,9 @@ object Files { def moveTo(to: File, replace: Boolean = false): File = { try { if (replace) - JFiles - .move(file.toPath, to.toPath, StandardCopyOption.REPLACE_EXISTING) + JFiles.move(file.toPath, + to.toPath, + StandardCopyOption.REPLACE_EXISTING) else JFiles.move(file.toPath, to.toPath) } catch { case ex: FileAlreadyExistsException => to diff --git a/repos/playframework/framework/src/play/src/main/scala/play/core/formatters/Multipart.scala b/repos/playframework/framework/src/play/src/main/scala/play/core/formatters/Multipart.scala index 1e0bd82b8d8..f3b1690665c 100644 --- a/repos/playframework/framework/src/play/src/main/scala/play/core/formatters/Multipart.scala +++ b/repos/playframework/framework/src/play/src/main/scala/play/core/formatters/Multipart.scala @@ -166,8 +166,10 @@ object Multipart { val (key, filename, contentType) = bodyPart match { case MultipartFormData.DataPart(innerKey, _) => (innerKey, None, Option("text/plain")) - case MultipartFormData - .FilePart(innerKey, innerFilename, innerContentType, _) => + case MultipartFormData.FilePart(innerKey, + innerFilename, + innerContentType, + _) => (innerKey, Option(innerFilename), innerContentType) case _ => throw new UnsupportedOperationException() } diff --git a/repos/playframework/framework/src/play/src/main/scala/play/core/parsers/Multipart.scala b/repos/playframework/framework/src/play/src/main/scala/play/core/parsers/Multipart.scala index ea1a1edbfb9..3355dcc2d53 100644 --- a/repos/playframework/framework/src/play/src/main/scala/play/core/parsers/Multipart.scala +++ b/repos/playframework/framework/src/play/src/main/scala/play/core/parsers/Multipart.scala @@ -281,8 +281,11 @@ object Multipart { array(1) = '\n'.toByte array(2) = '-'.toByte array(3) = '-'.toByte - System - .arraycopy(boundary.getBytes("US-ASCII"), 0, array, 4, boundary.length) + System.arraycopy(boundary.getBytes("US-ASCII"), + 0, + array, + 4, + boundary.length) array } diff --git a/repos/playframework/framework/src/play/src/main/scala/play/core/routing/GeneratedRouter.scala b/repos/playframework/framework/src/play/src/main/scala/play/core/routing/GeneratedRouter.scala index 58e59e591b1..6157525b264 100644 --- a/repos/playframework/framework/src/play/src/main/scala/play/core/routing/GeneratedRouter.scala +++ b/repos/playframework/framework/src/play/src/main/scala/play/core/routing/GeneratedRouter.scala @@ -99,8 +99,9 @@ abstract class GeneratedRouter extends Router { def errorHandler: HttpErrorHandler def badRequest(error: String) = Action.async { request => - errorHandler - .onClientError(request, play.api.http.Status.BAD_REQUEST, error) + errorHandler.onClientError(request, + play.api.http.Status.BAD_REQUEST, + error) } def call(generator: => Handler): Handler = { diff --git a/repos/playframework/framework/src/play/src/test/scala/play/api/libs/crypto/AESCTRCrypterSpec.scala b/repos/playframework/framework/src/play/src/test/scala/play/api/libs/crypto/AESCTRCrypterSpec.scala index afae27a6b7d..6de0b5e9506 100644 --- a/repos/playframework/framework/src/play/src/test/scala/play/api/libs/crypto/AESCTRCrypterSpec.scala +++ b/repos/playframework/framework/src/play/src/test/scala/play/api/libs/crypto/AESCTRCrypterSpec.scala @@ -20,14 +20,12 @@ class AESCTRCrypterSpec extends Specification { "be able to encrypt/decrypt text using AES algorithm" in { val text = "Play Framework 2.0" - crypter - .decryptAES(crypter.encryptAES(text, key), key) must be equalTo text + crypter.decryptAES(crypter.encryptAES(text, key), key) must be equalTo text } "be able to encrypt/decrypt text using other AES transformations" in { val text = "Play Framework 2.0" - crypter - .decryptAES(crypter.encryptAES(text, key), key) must be equalTo text + crypter.decryptAES(crypter.encryptAES(text, key), key) must be equalTo text } "be able to decrypt text generated using the old transformation methods" in { diff --git a/repos/playframework/framework/src/play/src/test/scala/play/api/mvc/CookiesSpec.scala b/repos/playframework/framework/src/play/src/test/scala/play/api/mvc/CookiesSpec.scala index 2d3779e8d51..0c623858410 100644 --- a/repos/playframework/framework/src/play/src/test/scala/play/api/mvc/CookiesSpec.scala +++ b/repos/playframework/framework/src/play/src/test/scala/play/api/mvc/CookiesSpec.scala @@ -111,8 +111,9 @@ object CookiesSpec extends Specification { "merging cookies" should { "replace old cookies with new cookies of the same name" in { - val originalRequest = FakeRequest() - .withCookies(Cookie("foo", "fooValue1"), Cookie("bar", "barValue2")) + val originalRequest = + FakeRequest().withCookies(Cookie("foo", "fooValue1"), + Cookie("bar", "barValue2")) val requestWithMoreCookies = originalRequest.withCookies(Cookie("foo", "fooValue2"), Cookie("baz", "bazValue")) diff --git a/repos/playframework/framework/src/routes-compiler/src/main/scala/play/routes/compiler/templates/package.scala b/repos/playframework/framework/src/routes-compiler/src/main/scala/play/routes/compiler/templates/package.scala index e6268fae4c3..1208fe59a4f 100644 --- a/repos/playframework/framework/src/routes-compiler/src/main/scala/play/routes/compiler/templates/package.scala +++ b/repos/playframework/framework/src/routes-compiler/src/main/scala/play/routes/compiler/templates/package.scala @@ -492,8 +492,9 @@ package object templates { }.mkString(", ")) } - "return _wA({method:\"%s\", url:%s%s})" - .format(route.verb.value, path, queryString) + "return _wA({method:\"%s\", url:%s%s})".format(route.verb.value, + path, + queryString) } /** diff --git a/repos/playframework/framework/src/run-support/src/main/scala/play/runsupport/FileWatchService.scala b/repos/playframework/framework/src/run-support/src/main/scala/play/runsupport/FileWatchService.scala index 57c9da8e80c..d8ef960e951 100644 --- a/repos/playframework/framework/src/run-support/src/main/scala/play/runsupport/FileWatchService.scala +++ b/repos/playframework/framework/src/run-support/src/main/scala/play/runsupport/FileWatchService.scala @@ -447,8 +447,11 @@ private[runsupport] object GlobalStaticVar { */ def get[T](name: String)(implicit ct: ClassTag[T]): Option[T] = { try { - val value = ManagementFactory.getPlatformMBeanServer - .invoke(objectName(name), "get", Array.empty, Array.empty) + val value = ManagementFactory.getPlatformMBeanServer.invoke( + objectName(name), + "get", + Array.empty, + Array.empty) if (ct.runtimeClass.isInstance(value)) { Some(value.asInstanceOf[T]) } else { diff --git a/repos/playframework/framework/src/run-support/src/main/scala/play/runsupport/Reloader.scala b/repos/playframework/framework/src/run-support/src/main/scala/play/runsupport/Reloader.scala index 78c83a61dd6..4ce7a9bb082 100644 --- a/repos/playframework/framework/src/run-support/src/main/scala/play/runsupport/Reloader.scala +++ b/repos/playframework/framework/src/run-support/src/main/scala/play/runsupport/Reloader.scala @@ -275,8 +275,10 @@ object Reloader { docsLoader.loadClass("play.docs.BuildDocHandlerFactory") val buildDocHandler = maybeDocsJarFile match { case Some(docsJarFile) => - val factoryMethod = docHandlerFactoryClass - .getMethod("fromJar", classOf[JarFile], classOf[String]) + val factoryMethod = docHandlerFactoryClass.getMethod( + "fromJar", + classOf[JarFile], + classOf[String]) factoryMethod .invoke(null, docsJarFile, "play/docs/content") .asInstanceOf[BuildDocHandler] diff --git a/repos/playframework/framework/src/run-support/src/test/scala/play/runsupport/FilterArgsSpec.scala b/repos/playframework/framework/src/run-support/src/test/scala/play/runsupport/FilterArgsSpec.scala index 8517ca6a8e7..4c14cd3df2d 100644 --- a/repos/playframework/framework/src/run-support/src/test/scala/play/runsupport/FilterArgsSpec.scala +++ b/repos/playframework/framework/src/run-support/src/test/scala/play/runsupport/FilterArgsSpec.scala @@ -18,8 +18,10 @@ object FilterArgsSpec extends Specification { httpAddress: String = defaultHttpAddress, devSettings: Seq[(String, String)] = Seq.empty): Result = { - val result = Reloader - .filterArgs(args, defaultHttpPort, defaultHttpAddress, devSettings) + val result = Reloader.filterArgs(args, + defaultHttpPort, + defaultHttpAddress, + devSettings) result must_== ((properties, httpPort, httpsPort, httpAddress)) } diff --git a/repos/playframework/framework/src/sbt-plugin/src/main/scala/play/sbt/routes/RoutesCompiler.scala b/repos/playframework/framework/src/sbt-plugin/src/main/scala/play/sbt/routes/RoutesCompiler.scala index 15a3eac6245..e6fa746fd3a 100644 --- a/repos/playframework/framework/src/sbt-plugin/src/main/scala/play/sbt/routes/RoutesCompiler.scala +++ b/repos/playframework/framework/src/sbt-plugin/src/main/scala/play/sbt/routes/RoutesCompiler.scala @@ -151,8 +151,9 @@ object RoutesCompiler extends AutoPlugin { val (products, errors) = syncIncremental(cacheDirectory, ops) { opsToRun: Seq[RoutesCompilerOp] => val results = opsToRun.map { op => - op -> play.routes.compiler.RoutesCompiler - .compile(op.task, generator, generatedDir) + op -> play.routes.compiler.RoutesCompiler.compile(op.task, + generator, + generatedDir) } val opResults = results.map { case (op, Right(inputs)) => diff --git a/repos/saddle/saddle-core/src/main/scala/org/saddle/Mat.scala b/repos/saddle/saddle-core/src/main/scala/org/saddle/Mat.scala index 5dbb7708a20..2446ef4e757 100644 --- a/repos/saddle/saddle-core/src/main/scala/org/saddle/Mat.scala +++ b/repos/saddle/saddle-core/src/main/scala/org/saddle/Mat.scala @@ -363,8 +363,10 @@ trait Mat[@spec(Boolean, Int, Long, Double) A] */ def mult[B](m: Mat[B])(implicit evA: NUM[A], evB: NUM[B]): Mat[Double] = { if (numCols != m.numRows) { - val errMsg = "Cannot multiply (%d %d) x (%d %d)" - .format(numRows, numCols, m.numRows, m.numCols) + val errMsg = "Cannot multiply (%d %d) x (%d %d)".format(numRows, + numCols, + m.numRows, + m.numCols) throw new IllegalArgumentException(errMsg) } diff --git a/repos/saddle/saddle-core/src/main/scala/org/saddle/array/Sorter.scala b/repos/saddle/saddle-core/src/main/scala/org/saddle/array/Sorter.scala index a5080f17bcf..507a933eb4d 100644 --- a/repos/saddle/saddle-core/src/main/scala/org/saddle/array/Sorter.scala +++ b/repos/saddle/saddle-core/src/main/scala/org/saddle/array/Sorter.scala @@ -129,8 +129,9 @@ object Sorter { object timeSorter extends Sorter[DateTime] { def argSorted(arr: Array[DateTime]) = { val res = range(0, arr.length) - LongArrays - .radixSortIndirect(res, ScalarTagTime.time2LongArray(arr), true) + LongArrays.radixSortIndirect(res, + ScalarTagTime.time2LongArray(arr), + true) res } diff --git a/repos/saddle/saddle-core/src/main/scala/org/saddle/io/CsvImplicits.scala b/repos/saddle/saddle-core/src/main/scala/org/saddle/io/CsvImplicits.scala index f7cfc1641e4..51ece4d9ae6 100644 --- a/repos/saddle/saddle-core/src/main/scala/org/saddle/io/CsvImplicits.scala +++ b/repos/saddle/saddle-core/src/main/scala/org/saddle/io/CsvImplicits.scala @@ -54,16 +54,20 @@ object CsvImplicits { withColIx: Boolean = false, withRowIx: Boolean = true, settings: CsvSettings = new CsvSettings()) { - frame2CsvWriter(Frame(series)) - .writeCsvFile(path, withColIx, withRowIx, settings) + frame2CsvWriter(Frame(series)).writeCsvFile(path, + withColIx, + withRowIx, + settings) } def writeCsvStream(stream: OutputStream, withColIx: Boolean = false, withRowIx: Boolean = true, settings: CsvSettings = new CsvSettings()) { - frame2CsvWriter(Frame(series)) - .writeCsvStream(stream, withColIx, withRowIx, settings) + frame2CsvWriter(Frame(series)).writeCsvStream(stream, + withColIx, + withRowIx, + settings) } } // end new diff --git a/repos/saddle/saddle-core/src/main/scala/org/saddle/time/RRule.scala b/repos/saddle/saddle-core/src/main/scala/org/saddle/time/RRule.scala index fe6db0ada84..91f278e5c74 100644 --- a/repos/saddle/saddle-core/src/main/scala/org/saddle/time/RRule.scala +++ b/repos/saddle/saddle-core/src/main/scala/org/saddle/time/RRule.scala @@ -279,22 +279,28 @@ case class RRule private ( * provided DateTime instance. */ def from(dt: DateTime): Iterator[DateTime] = { - val riter = RecurrenceIteratorFactory - .createRecurrenceIterator(toICal, dt2dtv(dt), inzone.toTimeZone) + val riter = RecurrenceIteratorFactory.createRecurrenceIterator( + toICal, + dt2dtv(dt), + inzone.toTimeZone) val iterWithJoins = joins.foldLeft(riter) { case (i1, (rrule, t)) => val tmpfrom = t.map { dt2dtv } getOrElse dt2dtv(dt) - val tmpiter = RecurrenceIteratorFactory - .createRecurrenceIterator(rrule.toICal, tmpfrom, inzone.toTimeZone) + val tmpiter = + RecurrenceIteratorFactory.createRecurrenceIterator(rrule.toICal, + tmpfrom, + inzone.toTimeZone) RecurrenceIteratorFactory.join(i1, tmpiter) } val iterWithJoinsWithExcepts = excepts.foldLeft(iterWithJoins) { case (i1, (rrule, t)) => val tmpfrom = t.map { dt2dtv } getOrElse dt2dtv(dt) - val tmpiter = RecurrenceIteratorFactory - .createRecurrenceIterator(rrule.toICal, tmpfrom, inzone.toTimeZone) + val tmpiter = + RecurrenceIteratorFactory.createRecurrenceIterator(rrule.toICal, + tmpfrom, + inzone.toTimeZone) RecurrenceIteratorFactory.except(i1, tmpiter) } diff --git a/repos/saddle/saddle-hdf5/src/test/scala/org/saddle/io/H5StoreSpec.scala b/repos/saddle/saddle-hdf5/src/test/scala/org/saddle/io/H5StoreSpec.scala index e01a15cbcb0..7a0e5b97757 100644 --- a/repos/saddle/saddle-hdf5/src/test/scala/org/saddle/io/H5StoreSpec.scala +++ b/repos/saddle/saddle-hdf5/src/test/scala/org/saddle/io/H5StoreSpec.scala @@ -282,8 +282,8 @@ class H5StoreSpec extends Specification { H5Store.writeFrame(tmp, "df1", df1) H5Store.readFrame[DateTime, Int, Double](tmp, "df1") must_== df1 - H5Store - .writeFrame(tmp, "df1", df2) must throwAn[H5Store.H5StoreException] + H5Store.writeFrame(tmp, "df1", df2) must throwAn[ + H5Store.H5StoreException] H5Store.readFrame[DateTime, Int, Double](tmp, "df1") must_== df1 Files.deleteIfExists(Paths.get(tmp)) @@ -438,8 +438,7 @@ class H5StoreSpec extends Specification { H5Store.readFrame[DateTime, Int, Double]( tmp, "f%s".format(100 + i)) must_== df2 - H5Store - .readFrame[DateTime, Int, Double](tmp, "f%s".format(i)) must_== df1 + H5Store.readFrame[DateTime, Int, Double](tmp, "f%s".format(i)) must_== df1 } } @@ -454,8 +453,7 @@ class H5StoreSpec extends Specification { if (i % 2 == 0) H5Store.writeFrame(tmp, "f%s".format(100 + i), df1) else - H5Store - .readFrame[DateTime, Int, Double](tmp, "f%s".format(i)) must_== df1 + H5Store.readFrame[DateTime, Int, Double](tmp, "f%s".format(i)) must_== df1 } } diff --git a/repos/sbt/main/actions/src/main/scala/sbt/Compiler.scala b/repos/sbt/main/actions/src/main/scala/sbt/Compiler.scala index f3eff5a4704..07c28792f0b 100644 --- a/repos/sbt/main/actions/src/main/scala/sbt/Compiler.scala +++ b/repos/sbt/main/actions/src/main/scala/sbt/Compiler.scala @@ -170,8 +170,9 @@ object Compiler { app.provider.components, Option(launcher.ivyHome), log) - val provider = ComponentCompiler - .interfaceProvider(componentManager, ivyConfiguration, sourcesModule) + val provider = ComponentCompiler.interfaceProvider(componentManager, + ivyConfiguration, + sourcesModule) new AnalyzingCompiler(instance, provider, cpOptions) } diff --git a/repos/sbt/main/actions/src/main/scala/sbt/DotGraph.scala b/repos/sbt/main/actions/src/main/scala/sbt/DotGraph.scala index 9f6d206a462..82e47a56a79 100644 --- a/repos/sbt/main/actions/src/main/scala/sbt/DotGraph.scala +++ b/repos/sbt/main/actions/src/main/scala/sbt/DotGraph.scala @@ -59,8 +59,7 @@ object DotGraph { val mappedGraph = new HashMap[String, HashSet[String]] for ((key, values) <- relation.forwardMap; keyString = keyToString(key); value <- values) - mappedGraph - .getOrElseUpdate(keyString, new HashSet[String]) += valueToString( + mappedGraph.getOrElseUpdate(keyString, new HashSet[String]) += valueToString( value) val mappings = for { diff --git a/repos/sbt/main/actions/src/main/scala/sbt/Tests.scala b/repos/sbt/main/actions/src/main/scala/sbt/Tests.scala index fa6722108b2..e4b2ab57fa9 100644 --- a/repos/sbt/main/actions/src/main/scala/sbt/Tests.scala +++ b/repos/sbt/main/actions/src/main/scala/sbt/Tests.scala @@ -237,8 +237,12 @@ object Tests { } val (frameworkSetup, runnables, frameworkCleanup) = - TestFramework - .testTasks(frameworks, runners, loader, tests, log, testListeners) + TestFramework.testTasks(frameworks, + runners, + loader, + tests, + log, + testListeners) val setupTasks = fj(partApp(userSetup) :+ frameworkSetup) val mainTasks = diff --git a/repos/sbt/main/command/src/main/scala/sbt/Command.scala b/repos/sbt/main/command/src/main/scala/sbt/Command.scala index 5b482742c11..35cf8657ba0 100644 --- a/repos/sbt/main/command/src/main/scala/sbt/Command.scala +++ b/repos/sbt/main/command/src/main/scala/sbt/Command.scala @@ -117,8 +117,7 @@ object Command { sc: SimpleCommand): State => Parser[() => State] = { def usageError = s"${sc.name} usage:" + Help.message(sc.help0, None) s => - (Parser - .softFailure(usageError, definitive = true): Parser[() => State]) | sc + (Parser.softFailure(usageError, definitive = true): Parser[() => State]) | sc .parser(s) } diff --git a/repos/sbt/main/settings/src/main/scala/sbt/std/SettingMacro.scala b/repos/sbt/main/settings/src/main/scala/sbt/std/SettingMacro.scala index dfc2ea0645e..d88d87f3e23 100644 --- a/repos/sbt/main/settings/src/main/scala/sbt/std/SettingMacro.scala +++ b/repos/sbt/main/settings/src/main/scala/sbt/std/SettingMacro.scala @@ -37,8 +37,8 @@ object InitializeConvert extends Convert { nme == InputWrapper.WrapInitTaskName) Converted.Failure(in.pos, "A setting cannot depend on a task") else if (nme == InputWrapper.WrapPreviousName) - Converted - .Failure(in.pos, "A setting cannot depend on a task's previous value.") + Converted.Failure(in.pos, + "A setting cannot depend on a task's previous value.") else Converted.NotApplicable } diff --git a/repos/sbt/main/src/main/scala/sbt/Aggregation.scala b/repos/sbt/main/src/main/scala/sbt/Aggregation.scala index 374046cc2ef..44158725faf 100644 --- a/repos/sbt/main/src/main/scala/sbt/Aggregation.scala +++ b/repos/sbt/main/src/main/scala/sbt/Aggregation.scala @@ -270,8 +270,7 @@ final object Aggregation { } def aggregationEnabled(key: ScopedKey[_], data: Settings[Scope]): Boolean = - Keys.aggregate in Scope - .fillTaskAxis(key.scope, key.key) get data getOrElse true + Keys.aggregate in Scope.fillTaskAxis(key.scope, key.key) get data getOrElse true @deprecated("Use BuildUtil.aggregationRelation", "0.13.0") def relation( diff --git a/repos/sbt/main/src/main/scala/sbt/Extracted.scala b/repos/sbt/main/src/main/scala/sbt/Extracted.scala index 57f5ef82662..68216ba2b92 100644 --- a/repos/sbt/main/src/main/scala/sbt/Extracted.scala +++ b/repos/sbt/main/src/main/scala/sbt/Extracted.scala @@ -81,8 +81,11 @@ final case class Extracted( val config = extractedTaskConfig(this, structure, state) withStreams(structure, state) { str => val nv = nodeView(state, str, rkey :: Nil) - val (newS, result) = EvaluateTask - .runTask(task, state, str, structure.index.triggers, config)(nv) + val (newS, result) = EvaluateTask.runTask(task, + state, + str, + structure.index.triggers, + config)(nv) (newS, processResult(result, newS.log)) } } diff --git a/repos/sbt/main/src/main/scala/sbt/GlobalPlugin.scala b/repos/sbt/main/src/main/scala/sbt/GlobalPlugin.scala index 496cba82d59..bdababbd636 100644 --- a/repos/sbt/main/src/main/scala/sbt/GlobalPlugin.scala +++ b/repos/sbt/main/src/main/scala/sbt/GlobalPlugin.scala @@ -95,8 +95,9 @@ object GlobalPlugin { import EvaluateTask._ withStreams(structure, state) { str => val nv = nodeView(state, str, roots) - val config = EvaluateTask - .extractedTaskConfig(Project.extract(state), structure, state) + val config = EvaluateTask.extractedTaskConfig(Project.extract(state), + structure, + state) val (newS, result) = runTask(t, state, str, structure.index.triggers, config)(nv) (newS, processResult(result, newS.log)) diff --git a/repos/sbt/main/src/main/scala/sbt/Load.scala b/repos/sbt/main/src/main/scala/sbt/Load.scala index 409bd0fe613..e8d946fc3da 100755 --- a/repos/sbt/main/src/main/scala/sbt/Load.scala +++ b/repos/sbt/main/src/main/scala/sbt/Load.scala @@ -398,8 +398,8 @@ object Load { uri: URI, rootProject: URI => String, settings: Seq[Setting[_]]): Seq[Setting[_]] = - Project - .transform(Scope.resolveScope(thisScope, uri, rootProject), settings) + Project.transform(Scope.resolveScope(thisScope, uri, rootProject), + settings) def projectScope(project: Reference): Scope = Scope(Select(project), Global, Global, Global) @@ -468,8 +468,9 @@ object Load { val resolver = (info: BuildLoader.ResolveInfo) => RetrieveUnit(info) val build = (info: BuildLoader.BuildInfo) => Some(() => loadUnit(info.uri, info.base, info.state, info.config)) - val components = BuildLoader - .components(resolver, build, full = BuildLoader.componentLoader) + val components = BuildLoader.components(resolver, + build, + full = BuildLoader.componentLoader) BuildLoader(components, fail, s, config) } def load(file: File, loaders: BuildLoader, extra: List[URI]): sbt.PartBuild = @@ -1263,8 +1264,9 @@ object Load { @deprecated("Use PluginDiscovery.binaryModuleNames.", "0.13.2") def binaryPlugins(classpath: Seq[File], loader: ClassLoader): Seq[String] = - PluginDiscovery - .binaryModuleNames(classpath, loader, PluginDiscovery.Paths.Plugins) + PluginDiscovery.binaryModuleNames(classpath, + loader, + PluginDiscovery.Paths.Plugins) @deprecated("Use PluginDiscovery.onClasspath", "0.13.2") def onClasspath(classpath: Seq[File])(url: URL): Boolean = diff --git a/repos/sbt/main/src/main/scala/sbt/Main.scala b/repos/sbt/main/src/main/scala/sbt/Main.scala index 9e97591ba48..48e41e26cfa 100644 --- a/repos/sbt/main/src/main/scala/sbt/Main.scala +++ b/repos/sbt/main/src/main/scala/sbt/Main.scala @@ -441,8 +441,10 @@ object BuiltinCommands { (s: State) => { val extracted = Project.extract(s) import extracted._ - token(Space ~> flag("every" ~ Space)) ~ SettingCompletions - .settingParser(structure.data, structure.index.keyMap, currentProject) + token(Space ~> flag("every" ~ Space)) ~ SettingCompletions.settingParser( + structure.data, + structure.index.keyMap, + currentProject) } @deprecated("Use Inspect.parser", "0.13.0") diff --git a/repos/sbt/main/src/main/scala/sbt/Resolve.scala b/repos/sbt/main/src/main/scala/sbt/Resolve.scala index 6c121e8552f..71248846fb2 100644 --- a/repos/sbt/main/src/main/scala/sbt/Resolve.scala +++ b/repos/sbt/main/src/main/scala/sbt/Resolve.scala @@ -42,8 +42,7 @@ object Resolve { val task = scope.task.toOption val keyIndex = index.keyIndex val definesKey = (c: ScopeAxis[ConfigKey]) => - keyIndex - .keys(resolvedRef, c.toOption.map(_.name), task) contains key.label + keyIndex.keys(resolvedRef, c.toOption.map(_.name), task) contains key.label val projectConfigs = index.configurations(proj).map(ck => Select(ck)) val config: ScopeAxis[ConfigKey] = (Global +: projectConfigs) find definesKey getOrElse Global diff --git a/repos/sbt/main/src/main/scala/sbt/SettingCompletions.scala b/repos/sbt/main/src/main/scala/sbt/SettingCompletions.scala index dd9dd6ce357..1a41a031bc1 100644 --- a/repos/sbt/main/src/main/scala/sbt/SettingCompletions.scala +++ b/repos/sbt/main/src/main/scala/sbt/SettingCompletions.scala @@ -126,8 +126,10 @@ private[sbt] object SettingCompletions { val (used, trimU) = lines(strings(affected)) val details = if (trimR || trimU) "\n\tRun `last` for details." else "" val valuesString = if (redefined.size == 1) "value" else "values" - "Defining %s\nThe new %s will be used by %s%s" - .format(redef, valuesString, used, details) + "Defining %s\nThe new %s will be used by %s%s".format(redef, + valuesString, + used, + details) } } @@ -274,8 +276,7 @@ private[sbt] object SettingCompletions { completeAssign(seen, level, key).toSet } val identifier = - Act - .filterStrings(Op, Assign.values.map(_.toString), "assignment method") map Assign.withName + Act.filterStrings(Op, Assign.values.map(_.toString), "assignment method") map Assign.withName token(Space) ~> token(optionallyQuoted(identifier), completions) } diff --git a/repos/sbt/sbt-maven-resolver/src/main/scala/sbt/mavenint/MavenRepositoryResolver.scala b/repos/sbt/sbt-maven-resolver/src/main/scala/sbt/mavenint/MavenRepositoryResolver.scala index 79ffc2aa68c..b0ae22ecc7a 100644 --- a/repos/sbt/sbt-maven-resolver/src/main/scala/sbt/mavenint/MavenRepositoryResolver.scala +++ b/repos/sbt/sbt-maven-resolver/src/main/scala/sbt/mavenint/MavenRepositoryResolver.scala @@ -237,8 +237,9 @@ abstract class MavenRepositoryResolver(settings: IvySettings) } // Here we need to pretend we downloaded the pom.xml file - val pom = DefaultArtifact - .newPomArtifact(drid, new java.util.Date(lastModifiedTime)) + val pom = DefaultArtifact.newPomArtifact( + drid, + new java.util.Date(lastModifiedTime)) val madr = new MetadataArtifactDownloadReport(pom) madr.setSearched(true) madr.setDownloadStatus(DownloadStatus.SUCCESSFUL) // TODO - Figure this things out for this report. diff --git a/repos/sbt/sbt-maven-resolver/src/main/scala/sbt/mavenint/MavenRepositorySystemFactory.scala b/repos/sbt/sbt-maven-resolver/src/main/scala/sbt/mavenint/MavenRepositorySystemFactory.scala index 28904ae760f..59721325778 100644 --- a/repos/sbt/sbt-maven-resolver/src/main/scala/sbt/mavenint/MavenRepositorySystemFactory.scala +++ b/repos/sbt/sbt-maven-resolver/src/main/scala/sbt/mavenint/MavenRepositorySystemFactory.scala @@ -39,8 +39,8 @@ object MavenRepositorySystemFactory { } }) // Here we register the Ivy <-> Aether transport bridge - locator - .addService(classOf[TransporterFactory], classOf[MyTransportFactory]) + locator.addService(classOf[TransporterFactory], + classOf[MyTransportFactory]) // This connects the download mechanism to our transports. Why is it needed? no clue. locator.addService(classOf[RepositoryConnectorFactory], classOf[BasicRepositoryConnectorFactory]) diff --git a/repos/scala-js/compiler/src/main/scala/org/scalajs/core/compiler/GenJSCode.scala b/repos/scala-js/compiler/src/main/scala/org/scalajs/core/compiler/GenJSCode.scala index d429436e90f..1c4c67c228a 100644 --- a/repos/scala-js/compiler/src/main/scala/org/scalajs/core/compiler/GenJSCode.scala +++ b/repos/scala-js/compiler/src/main/scala/org/scalajs/core/compiler/GenJSCode.scala @@ -3752,8 +3752,8 @@ abstract class GenJSCode def requireNotSuper(): Unit = { if (superIn.isDefined) { - reporter - .error(pos, "Illegal super call in Scala.js-defined JS class") + reporter.error(pos, + "Illegal super call in Scala.js-defined JS class") } } diff --git a/repos/scala-js/compiler/src/main/scala/org/scalajs/core/compiler/GenJSExports.scala b/repos/scala-js/compiler/src/main/scala/org/scalajs/core/compiler/GenJSExports.scala index 5cb4a87882f..41f2f781dc7 100644 --- a/repos/scala-js/compiler/src/main/scala/org/scalajs/core/compiler/GenJSExports.scala +++ b/repos/scala-js/compiler/src/main/scala/org/scalajs/core/compiler/GenJSExports.scala @@ -148,8 +148,8 @@ trait GenJSExports extends SubComponent { self: GenJSCode => implicit pos: Position) = { if (hasRepeatedParam(trgSym)) { - reporter - .error(pos, "You may not name-export a method with a *-parameter") + reporter.error(pos, + "You may not name-export a method with a *-parameter") } val jsArgs = for { diff --git a/repos/scala-js/compiler/src/main/scala/org/scalajs/core/compiler/PreTyperComponent.scala b/repos/scala-js/compiler/src/main/scala/org/scalajs/core/compiler/PreTyperComponent.scala index 5060fc9c389..ee71dc20dd2 100644 --- a/repos/scala-js/compiler/src/main/scala/org/scalajs/core/compiler/PreTyperComponent.scala +++ b/repos/scala-js/compiler/src/main/scala/org/scalajs/core/compiler/PreTyperComponent.scala @@ -96,8 +96,10 @@ abstract class PreTyperComponent case member => transform(member) } - val newImpl = treeCopy - .Template(tree.impl, tree.impl.parents, tree.impl.self, newBody) + val newImpl = treeCopy.Template(tree.impl, + tree.impl.parents, + tree.impl.self, + newBody) treeCopy.ClassDef(tree, tree.mods, tree.name, tree.tparams, newImpl) case tree: Template => diff --git a/repos/scala-js/compiler/src/main/scala/org/scalajs/core/compiler/PrepJSInterop.scala b/repos/scala-js/compiler/src/main/scala/org/scalajs/core/compiler/PrepJSInterop.scala index 907c4a9a74d..a5cb8768464 100644 --- a/repos/scala-js/compiler/src/main/scala/org/scalajs/core/compiler/PrepJSInterop.scala +++ b/repos/scala-js/compiler/src/main/scala/org/scalajs/core/compiler/PrepJSInterop.scala @@ -394,8 +394,8 @@ abstract class PrepJSInterop } } } else { - reporter - .error(tpeArg.pos, s"non-trait class type required but $tpe found") + reporter.error(tpeArg.pos, + s"non-trait class type required but $tpe found") EmptyTree } } else { @@ -591,8 +591,8 @@ abstract class PrepJSInterop // Check that only native objects extend js.GlobalScope if (isJSGlobalScope(implDef) && implDef.symbol != JSGlobalScopeClass && (!sym.isModuleClass || !isJSNative)) { - reporter - .error(implDef.pos, "Only native objects may extend js.GlobalScope") + reporter.error(implDef.pos, + "Only native objects may extend js.GlobalScope") } if (shouldPrepareExports) { @@ -608,8 +608,8 @@ abstract class PrepJSInterop for { exp <- exportsOf(sym) if !exp.ignoreInvalid } { - reporter - .error(exp.pos, "You may not export a native JS class or object") + reporter.error(exp.pos, + "You may not export a native JS class or object") } } else { if (sym.isModuleClass) registerModuleExports(sym) @@ -923,8 +923,8 @@ abstract class PrepJSInterop annot <- sym.getAnnotation(JSNameAnnotation) if annot.stringArg(0).isEmpty } { - reporter - .error(annot.pos, "The argument to JSName must be a literal string") + reporter.error(annot.pos, + "The argument to JSName must be a literal string") } } diff --git a/repos/scala-js/javalib/src/main/scala/java/math/BigInteger.scala b/repos/scala-js/javalib/src/main/scala/java/math/BigInteger.scala index b9855dac4d8..fe129e16c13 100644 --- a/repos/scala-js/javalib/src/main/scala/java/math/BigInteger.scala +++ b/repos/scala-js/javalib/src/main/scala/java/math/BigInteger.scala @@ -376,8 +376,10 @@ class BigInteger extends Number with Comparable[BigInteger] { val resDigits = new Array[Int](resLength) val resSign = if (thisSign == divisorSign) 1 else -1 if (divisorLen == 1) { - Division - .divideArrayByInt(resDigits, digits, thisLen, divisor.digits(0)) + Division.divideArrayByInt(resDigits, + digits, + thisLen, + divisor.digits(0)) } else { Division.divide(resDigits, resLength, @@ -642,8 +644,12 @@ class BigInteger extends Number with Comparable[BigInteger] { Division.remainderArrayByInt(digits, thisLen, divisor.digits(0)) } else { val qLen = thisLen - divisorLen + 1 - resDigits = Division - .divide(null, qLen, digits, thisLen, divisor.digits, divisorLen) + resDigits = Division.divide(null, + qLen, + digits, + thisLen, + divisor.digits, + divisorLen) } val result = new BigInteger(sign, resLength, resDigits) result.cutOffLeadingZeroes() diff --git a/repos/scala-js/javalib/src/main/scala/java/math/Elementary.scala b/repos/scala-js/javalib/src/main/scala/java/math/Elementary.scala index 36aac570366..cfdf1680859 100644 --- a/repos/scala-js/javalib/src/main/scala/java/math/Elementary.scala +++ b/repos/scala-js/javalib/src/main/scala/java/math/Elementary.scala @@ -230,8 +230,9 @@ private[math] object Elementary { */ def inplaceAdd(op1: BigInteger, op2: BigInteger): Unit = { add(op1.digits, op1.digits, op1.numberLength, op2.digits, op2.numberLength) - op1.numberLength = Math - .min(Math.max(op1.numberLength, op2.numberLength) + 1, op1.digits.length) + op1.numberLength = Math.min( + Math.max(op1.numberLength, op2.numberLength) + 1, + op1.digits.length) op1.cutOffLeadingZeroes() op1.unCache() } diff --git a/repos/scala-js/javalib/src/main/scala/java/nio/GenHeapBuffer.scala b/repos/scala-js/javalib/src/main/scala/java/nio/GenHeapBuffer.scala index 5c7797d259a..82dadbc1e79 100644 --- a/repos/scala-js/javalib/src/main/scala/java/nio/GenHeapBuffer.scala +++ b/repos/scala-js/javalib/src/main/scala/java/nio/GenHeapBuffer.scala @@ -82,8 +82,11 @@ private[nio] final class GenHeapBuffer[B <: Buffer](val self: B) ensureNotReadOnly() val len = remaining - System - .arraycopy(_array, _arrayOffset + position, _array, _arrayOffset, len) + System.arraycopy(_array, + _arrayOffset + position, + _array, + _arrayOffset, + len) _mark = -1 limit(capacity) position(len) diff --git a/repos/scala-js/javalib/src/main/scala/java/nio/charset/CoderResult.scala b/repos/scala-js/javalib/src/main/scala/java/nio/charset/CoderResult.scala index 0281d444686..9329e295122 100644 --- a/repos/scala-js/javalib/src/main/scala/java/nio/charset/CoderResult.scala +++ b/repos/scala-js/javalib/src/main/scala/java/nio/charset/CoderResult.scala @@ -75,6 +75,6 @@ object CoderResult { } private def unmappableForLengthImpl(length: Int): CoderResult = - uniqueUnmappable - .getOrElseUpdate(length, new CoderResult(Unmappable, length)) + uniqueUnmappable.getOrElseUpdate(length, + new CoderResult(Unmappable, length)) } diff --git a/repos/scala-js/js-envs/src/main/scala/org/scalajs/jsenv/LinkingUnitAsyncJSEnv.scala b/repos/scala-js/js-envs/src/main/scala/org/scalajs/jsenv/LinkingUnitAsyncJSEnv.scala index 4e4e2170645..02dd1b62393 100644 --- a/repos/scala-js/js-envs/src/main/scala/org/scalajs/jsenv/LinkingUnitAsyncJSEnv.scala +++ b/repos/scala-js/js-envs/src/main/scala/org/scalajs/jsenv/LinkingUnitAsyncJSEnv.scala @@ -33,8 +33,10 @@ trait LinkingUnitAsyncJSEnv extends LinkingUnitJSEnv with AsyncJSEnv { linkingUnit: LinkingUnit, postLibs: Seq[ResolvedJSDependency], code: VirtualJSFile): AsyncJSRunner = { - LinkingUnitAsyncJSEnv.this - .asyncRunner(loadedLibs ++ preLibs, linkingUnit, postLibs, code) + LinkingUnitAsyncJSEnv.this.asyncRunner(loadedLibs ++ preLibs, + linkingUnit, + postLibs, + code) } } diff --git a/repos/scala-js/js-envs/src/main/scala/org/scalajs/jsenv/LinkingUnitComJSEnv.scala b/repos/scala-js/js-envs/src/main/scala/org/scalajs/jsenv/LinkingUnitComJSEnv.scala index 1e23eac1b1f..0441b512cfd 100644 --- a/repos/scala-js/js-envs/src/main/scala/org/scalajs/jsenv/LinkingUnitComJSEnv.scala +++ b/repos/scala-js/js-envs/src/main/scala/org/scalajs/jsenv/LinkingUnitComJSEnv.scala @@ -32,8 +32,10 @@ trait LinkingUnitComJSEnv extends LinkingUnitAsyncJSEnv with ComJSEnv { linkingUnit: LinkingUnit, postLibs: Seq[ResolvedJSDependency], code: VirtualJSFile): ComJSRunner = { - LinkingUnitComJSEnv.this - .comRunner(loadedLibs ++ preLibs, linkingUnit, postLibs, code) + LinkingUnitComJSEnv.this.comRunner(loadedLibs ++ preLibs, + linkingUnit, + postLibs, + code) } } diff --git a/repos/scala-js/js-envs/src/main/scala/org/scalajs/jsenv/LinkingUnitJSEnv.scala b/repos/scala-js/js-envs/src/main/scala/org/scalajs/jsenv/LinkingUnitJSEnv.scala index 2ccc8100d53..b95cac920c7 100644 --- a/repos/scala-js/js-envs/src/main/scala/org/scalajs/jsenv/LinkingUnitJSEnv.scala +++ b/repos/scala-js/js-envs/src/main/scala/org/scalajs/jsenv/LinkingUnitJSEnv.scala @@ -61,8 +61,10 @@ trait LinkingUnitJSEnv extends JSEnv { linkingUnit: LinkingUnit, postLibs: Seq[ResolvedJSDependency], code: VirtualJSFile): JSRunner = { - LinkingUnitJSEnv.this - .jsRunner(loadedLibs ++ preLibs, linkingUnit, postLibs, code) + LinkingUnitJSEnv.this.jsRunner(loadedLibs ++ preLibs, + linkingUnit, + postLibs, + code) } } diff --git a/repos/scala-js/library/src/main/scala/scala/scalajs/js/typedarray/DataViewExt.scala b/repos/scala-js/library/src/main/scala/scala/scalajs/js/typedarray/DataViewExt.scala index b24ad1064e3..3f8f511a7da 100644 --- a/repos/scala-js/library/src/main/scala/scala/scalajs/js/typedarray/DataViewExt.scala +++ b/repos/scala-js/library/src/main/scala/scala/scalajs/js/typedarray/DataViewExt.scala @@ -36,10 +36,12 @@ object DataViewExt { littleEndian: Boolean = false): Unit = { val high = (value >>> 32).toInt val low = value.toInt - dataView - .setInt32(index + (if (littleEndian) 4 else 0), high, littleEndian) - dataView - .setInt32(index + (if (littleEndian) 0 else 4), low, littleEndian) + dataView.setInt32(index + (if (littleEndian) 4 else 0), + high, + littleEndian) + dataView.setInt32(index + (if (littleEndian) 0 else 4), + low, + littleEndian) } } } diff --git a/repos/scala-js/partest/src/main/scala/scala/tools/partest/scalajs/ScalaJSPartest.scala b/repos/scala-js/partest/src/main/scala/scala/tools/partest/scalajs/ScalaJSPartest.scala index a9ce2a33733..dbf74d6b59a 100644 --- a/repos/scala-js/partest/src/main/scala/scala/tools/partest/scalajs/ScalaJSPartest.scala +++ b/repos/scala-js/partest/src/main/scala/scala/tools/partest/scalajs/ScalaJSPartest.scala @@ -26,8 +26,9 @@ trait ScalaJSDirectCompiler extends DirectCompiler { reporter: Reporter): PartestGlobal = { new PartestGlobal(settings, reporter) { override protected def loadRoughPluginsList(): List[Plugin] = { - (super.loadRoughPluginsList() :+ Plugin - .instantiate(classOf[ScalaJSPlugin], this)) + (super.loadRoughPluginsList() :+ Plugin.instantiate( + classOf[ScalaJSPlugin], + this)) } } } diff --git a/repos/scala-js/tools/shared/src/main/scala/org/scalajs/core/tools/linker/backend/emitter/JSDesugaring.scala b/repos/scala-js/tools/shared/src/main/scala/org/scalajs/core/tools/linker/backend/emitter/JSDesugaring.scala index 5b274df76cd..4126b3ec40f 100644 --- a/repos/scala-js/tools/shared/src/main/scala/org/scalajs/core/tools/linker/backend/emitter/JSDesugaring.scala +++ b/repos/scala-js/tools/shared/src/main/scala/org/scalajs/core/tools/linker/backend/emitter/JSDesugaring.scala @@ -958,8 +958,7 @@ private[emitter] class JSDesugaring(internalOptions: InternalOptions) { val VarRef(ident) = lhs val elems = (rhs: @unchecked) match { case VarRef(rhsIdent) => - for (RecordType - .Field(fName, fOrigName, fTpe, fMutable) <- fields) + for (RecordType.Field(fName, fOrigName, fTpe, fMutable) <- fields) yield VarRef(makeRecordFieldIdent(rhsIdent, fName, fOrigName))( fTpe) diff --git a/repos/scala-js/tools/shared/src/main/scala/org/scalajs/core/tools/linker/frontend/optimizer/GenIncOptimizer.scala b/repos/scala-js/tools/shared/src/main/scala/org/scalajs/core/tools/linker/frontend/optimizer/GenIncOptimizer.scala index 9f271a473bc..09cd6b091c6 100644 --- a/repos/scala-js/tools/shared/src/main/scala/org/scalajs/core/tools/linker/frontend/optimizer/GenIncOptimizer.scala +++ b/repos/scala-js/tools/shared/src/main/scala/org/scalajs/core/tools/linker/frontend/optimizer/GenIncOptimizer.scala @@ -253,8 +253,8 @@ abstract class GenIncOptimizer private[optimizer] ( * * Non-batch mode only. */ - objectClass - .walkForChanges(CollOps.remove(neededClasses, _).get, Set.empty) + objectClass.walkForChanges(CollOps.remove(neededClasses, _).get, + Set.empty) } /* Class additions: diff --git a/repos/scala/src/build/genprod.scala b/repos/scala/src/build/genprod.scala index 58d11f3b01a..3a604511f53 100644 --- a/repos/scala/src/build/genprod.scala +++ b/repos/scala/src/build/genprod.scala @@ -127,8 +127,10 @@ object FunctionOne extends Function(1) { "@specialized(scala.Unit, scala.Boolean, scala.Int, scala.Float, scala.Long, scala.Double) " override def descriptiveComment = - " " + functionNTemplate - .format("succ", "anonfun1", """ + " " + functionNTemplate.format( + "succ", + "anonfun1", + """ * val succ = (x: Int) => x + 1 * val anonfun1 = new Function1[Int, Int] { * def apply(x: Int): Int = x + 1 @@ -313,8 +315,9 @@ class Tuple(val i: Int) extends Group("Tuple") with Arity { private def idiomatic = if (i < 2) "" else - " Note that it is more idiomatic to create a %s via `(%s)`" - .format(className, constructorArgs) + " Note that it is more idiomatic to create a %s via `(%s)`".format( + className, + constructorArgs) private def params = ( diff --git a/repos/scala/src/compiler/scala/reflect/reify/Errors.scala b/repos/scala/src/compiler/scala/reflect/reify/Errors.scala index 488529bf025..05264e64890 100644 --- a/repos/scala/src/compiler/scala/reflect/reify/Errors.scala +++ b/repos/scala/src/compiler/scala/reflect/reify/Errors.scala @@ -17,8 +17,9 @@ trait Errors { self: Reifier => // hence we don't crash here, but nicely report a typechecking error and bail out asap def CannotReifyType(tpe: Type) = { - val msg = "implementation restriction: cannot reify type %s (%s)" - .format(tpe, tpe.kind) + val msg = "implementation restriction: cannot reify type %s (%s)".format( + tpe, + tpe.kind) throw new ReificationException(defaultErrorPosition, msg) } diff --git a/repos/scala/src/compiler/scala/reflect/reify/codegen/GenUtils.scala b/repos/scala/src/compiler/scala/reflect/reify/codegen/GenUtils.scala index 7d002d2c438..94fdf266741 100644 --- a/repos/scala/src/compiler/scala/reflect/reify/codegen/GenUtils.scala +++ b/repos/scala/src/compiler/scala/reflect/reify/codegen/GenUtils.scala @@ -116,8 +116,9 @@ trait GenUtils { self: Reifier => var origin = "" if (sym.owner != NoSymbol) origin += "defined by %s".format(sym.owner.name) if (sym.pos != NoPosition) - origin += " in %s:%s:%s" - .format(sym.pos.source.file.name, sym.pos.line, sym.pos.column) + origin += " in %s:%s:%s".format(sym.pos.source.file.name, + sym.pos.line, + sym.pos.column) if (origin == "") origin = "of unknown origin" origin } diff --git a/repos/scala/src/compiler/scala/reflect/reify/phases/Metalevels.scala b/repos/scala/src/compiler/scala/reflect/reify/phases/Metalevels.scala index 6095582fd10..3ae53dc0905 100644 --- a/repos/scala/src/compiler/scala/reflect/reify/phases/Metalevels.scala +++ b/repos/scala/src/compiler/scala/reflect/reify/phases/Metalevels.scala @@ -155,8 +155,10 @@ trait Metalevels { self: Reifier => // if (settings.logRuntimeSplices.value) reporter.echo(tree.pos, "this splice cannot be resolved statically") // withinSplice { super.transform(tree) } if (reifyDebug) - println("metalevel breach in %s: %s" - .format(tree, (breaches map (_.symbol)).distinct mkString ", ")) + println( + "metalevel breach in %s: %s".format( + tree, + (breaches map (_.symbol)).distinct mkString ", ")) CannotReifyRuntimeSplice(tree) } else { withinSplice { super.transform(tree) } diff --git a/repos/scala/src/compiler/scala/tools/ant/Scalac.scala b/repos/scala/src/compiler/scala/tools/ant/Scalac.scala index 792df73da25..5f834631e93 100644 --- a/repos/scala/src/compiler/scala/tools/ant/Scalac.scala +++ b/repos/scala/src/compiler/scala/tools/ant/Scalac.scala @@ -608,8 +608,8 @@ class Scalac extends ScalaMatchingTask with ScalacShared { if (javaFiles.isEmpty) "%d source file%s".format(list.length, plural(list)) else - "%d scala and %d java source files" - .format(scalaFiles.length, javaFiles.length) + "%d scala and %d java source files".format(scalaFiles.length, + javaFiles.length) log("Compiling %s to %s".format(str, getDestination.toString)) } else log("No files selected for compilation", Project.MSG_VERBOSE) diff --git a/repos/scala/src/compiler/scala/tools/nsc/Global.scala b/repos/scala/src/compiler/scala/tools/nsc/Global.scala index 2496b403ddc..66b4af0b8f2 100644 --- a/repos/scala/src/compiler/scala/tools/nsc/Global.scala +++ b/repos/scala/src/compiler/scala/tools/nsc/Global.scala @@ -1722,8 +1722,9 @@ class Global(var currentSettings: Settings, var reporter: Reporter) } syms foreach { sym => - val name = "\n<<-- %s %s after phase '%s' -->>" - .format(sym.kindString, sym.fullName, ph.name) + val name = "\n<<-- %s %s after phase '%s' -->>".format(sym.kindString, + sym.fullName, + ph.name) val baseClasses = bases(sym).mkString("Base classes:\n ", "\n ", "") val contents = if (declsOnly) decls(sym).mkString("Declarations:\n ", "\n ", "") diff --git a/repos/scala/src/compiler/scala/tools/nsc/ast/TreeGen.scala b/repos/scala/src/compiler/scala/tools/nsc/ast/TreeGen.scala index 45a90e0dc8c..ed2b838ecaf 100644 --- a/repos/scala/src/compiler/scala/tools/nsc/ast/TreeGen.scala +++ b/repos/scala/src/compiler/scala/tools/nsc/ast/TreeGen.scala @@ -202,8 +202,7 @@ abstract class TreeGen extends scala.reflect.internal.TreeGen with TreeDSL { name: Name): (ValDef, () => Ident) = { val packedType = typer.packedType(expr, owner) val sym = - owner - .newValue(name.toTermName, expr.pos.makeTransparent, SYNTHETIC) setInfo packedType + owner.newValue(name.toTermName, expr.pos.makeTransparent, SYNTHETIC) setInfo packedType (ValDef(sym, expr), () => Ident(sym) setPos sym.pos.focus setType expr.tpe) } diff --git a/repos/scala/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala b/repos/scala/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala index dcf3b7b3472..a540c64290a 100644 --- a/repos/scala/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala +++ b/repos/scala/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala @@ -945,8 +945,10 @@ trait Parsers extends Scanners with MarkupParsers with ParsersCommon { self => def finishBinaryOp(isExpr: Boolean, opinfo: OpInfo, rhs: Tree): Tree = { import opinfo._ - val operatorPos: Position = Position - .range(rhs.pos.source, offset, offset, offset + operator.length) + val operatorPos: Position = Position.range(rhs.pos.source, + offset, + offset, + offset + operator.length) val pos = lhs.pos union rhs.pos union operatorPos withPoint offset atPos(pos)( diff --git a/repos/scala/src/compiler/scala/tools/nsc/backend/ScalaPrimitives.scala b/repos/scala/src/compiler/scala/tools/nsc/backend/ScalaPrimitives.scala index 6c46b7c163a..691886fbfa7 100644 --- a/repos/scala/src/compiler/scala/tools/nsc/backend/ScalaPrimitives.scala +++ b/repos/scala/src/compiler/scala/tools/nsc/backend/ScalaPrimitives.scala @@ -523,8 +523,8 @@ abstract class ScalaPrimitives { /** Return the code for the given symbol. */ def getPrimitive(sym: Symbol): Int = - primitives - .getOrElse(sym, throw new AssertionError(s"Unknown primitive $sym")) + primitives.getOrElse(sym, + throw new AssertionError(s"Unknown primitive $sym")) /** * Return the primitive code of the given operation. If the diff --git a/repos/scala/src/compiler/scala/tools/nsc/backend/jvm/AsmUtils.scala b/repos/scala/src/compiler/scala/tools/nsc/backend/jvm/AsmUtils.scala index e041f3aa8df..c2ccbb811a5 100644 --- a/repos/scala/src/compiler/scala/tools/nsc/backend/jvm/AsmUtils.scala +++ b/repos/scala/src/compiler/scala/tools/nsc/backend/jvm/AsmUtils.scala @@ -155,8 +155,9 @@ object AsmUtils { classNode.accept(cw) val sw = new StringWriter() val pw = new PrintWriter(sw) - CheckClassAdapter - .verify(new ClassReader(cw.toByteArray), dumpNonErroneous, pw) + CheckClassAdapter.verify(new ClassReader(cw.toByteArray), + dumpNonErroneous, + pw) val res = sw.toString if (res.isEmpty) None else Some(res) } diff --git a/repos/scala/src/compiler/scala/tools/nsc/backend/jvm/BCodeHelpers.scala b/repos/scala/src/compiler/scala/tools/nsc/backend/jvm/BCodeHelpers.scala index 56b6b6910ef..3a32cd14bea 100644 --- a/repos/scala/src/compiler/scala/tools/nsc/backend/jvm/BCodeHelpers.scala +++ b/repos/scala/src/compiler/scala/tools/nsc/backend/jvm/BCodeHelpers.scala @@ -1095,8 +1095,8 @@ abstract class BCodeHelpers extends BCodeIdiomatic with BytecodeWriters { var index = 0 for (jparamType <- paramJavaTypes) { - mirrorMethod - .visitVarInsn(jparamType.typedOpcode(asm.Opcodes.ILOAD), index) + mirrorMethod.visitVarInsn(jparamType.typedOpcode(asm.Opcodes.ILOAD), + index) assert(!jparamType.isInstanceOf[MethodBType], jparamType) index += jparamType.size } @@ -1234,8 +1234,8 @@ abstract class BCodeHelpers extends BCodeIdiomatic with BytecodeWriters { ) if (emitSource) - mirrorClass - .visitSource("" + cunit.source, null /* SourceDebugExtension */ ) + mirrorClass.visitSource("" + cunit.source, + null /* SourceDebugExtension */ ) val ssa = getAnnotPickle(bType.internalName, moduleClass.companionSymbol) mirrorClass.visitAttribute( diff --git a/repos/scala/src/compiler/scala/tools/nsc/backend/jvm/BCodeIdiomatic.scala b/repos/scala/src/compiler/scala/tools/nsc/backend/jvm/BCodeIdiomatic.scala index b7d4891192b..dcb53b9851c 100644 --- a/repos/scala/src/compiler/scala/tools/nsc/backend/jvm/BCodeIdiomatic.scala +++ b/repos/scala/src/compiler/scala/tools/nsc/backend/jvm/BCodeIdiomatic.scala @@ -536,8 +536,10 @@ abstract class BCodeIdiomatic extends SubComponent { i += 1 } assert(oldPos == keys.length, "emitSWITCH") - jmethod - .visitTableSwitchInsn(keyMin, keyMax, defaultBranch, newBranches: _*) + jmethod.visitTableSwitchInsn(keyMin, + keyMax, + defaultBranch, + newBranches: _*) } else { jmethod.visitLookupSwitchInsn(defaultBranch, keys, branches) } diff --git a/repos/scala/src/compiler/scala/tools/nsc/backend/jvm/BCodeSkelBuilder.scala b/repos/scala/src/compiler/scala/tools/nsc/backend/jvm/BCodeSkelBuilder.scala index 76246f9287a..a2405c3294d 100644 --- a/repos/scala/src/compiler/scala/tools/nsc/backend/jvm/BCodeSkelBuilder.scala +++ b/repos/scala/src/compiler/scala/tools/nsc/backend/jvm/BCodeSkelBuilder.scala @@ -152,8 +152,8 @@ abstract class BCodeSkelBuilder extends BCodeHelpers { interfaceNames.toArray) if (emitSource) { - cnode - .visitSource(cunit.source.toString, null /* SourceDebugExtension */ ) + cnode.visitSource(cunit.source.toString, + null /* SourceDebugExtension */ ) } enclosingMethodAttribute(claszSymbol, diff --git a/repos/scala/src/compiler/scala/tools/nsc/backend/jvm/BackendReporting.scala b/repos/scala/src/compiler/scala/tools/nsc/backend/jvm/BackendReporting.scala index 4205d57c6e5..d3c93475735 100644 --- a/repos/scala/src/compiler/scala/tools/nsc/backend/jvm/BackendReporting.scala +++ b/repos/scala/src/compiler/scala/tools/nsc/backend/jvm/BackendReporting.scala @@ -265,8 +265,9 @@ object BackendReporting { def descriptor: String def calleeMethodSig = - BackendReporting - .methodSignature(calleeDeclarationClass, name, descriptor) + BackendReporting.methodSignature(calleeDeclarationClass, + name, + descriptor) override def toString = this match { case IllegalAccessInstruction(_, _, _, callsiteClass, instruction) => @@ -302,8 +303,10 @@ object BackendReporting { callsiteClass, callsiteName, callsiteDesc) => - s"""The callsite method ${BackendReporting - .methodSignature(callsiteClass, callsiteName, callsiteDesc)} + s"""The callsite method ${BackendReporting.methodSignature( + callsiteClass, + callsiteName, + callsiteDesc)} |does not have the same strictfp mode as the callee $calleeMethodSig. """.stripMargin @@ -313,8 +316,10 @@ object BackendReporting { callsiteClass, callsiteName, callsiteDesc) => - s"""The size of the callsite method ${BackendReporting - .methodSignature(callsiteClass, callsiteName, callsiteDesc)} + s"""The size of the callsite method ${BackendReporting.methodSignature( + callsiteClass, + callsiteName, + callsiteDesc)} |would exceed the JVM method size limit after inlining $calleeMethodSig. """.stripMargin } diff --git a/repos/scala/src/compiler/scala/tools/nsc/backend/jvm/GenBCode.scala b/repos/scala/src/compiler/scala/tools/nsc/backend/jvm/GenBCode.scala index d66c1774cba..7f53565a77d 100644 --- a/repos/scala/src/compiler/scala/tools/nsc/backend/jvm/GenBCode.scala +++ b/repos/scala/src/compiler/scala/tools/nsc/backend/jvm/GenBCode.scala @@ -414,8 +414,10 @@ abstract class GenBCode extends BCodeSyncAndTry { val outFile = if (outFolder == null) null else getFileForClassfile(outFolder, jclassName, ".class") - bytecodeWriter - .writeClass(jclassName, jclassName, jclassBytes, outFile) + bytecodeWriter.writeClass(jclassName, + jclassName, + jclassBytes, + outFile) } catch { case e: FileConflictException => error(s"error writing $jclassName: ${e.getMessage}") diff --git a/repos/scala/src/compiler/scala/tools/nsc/backend/jvm/analysis/AliasingFrame.scala b/repos/scala/src/compiler/scala/tools/nsc/backend/jvm/analysis/AliasingFrame.scala index 4577ca1dfed..bebe34c6d2c 100644 --- a/repos/scala/src/compiler/scala/tools/nsc/backend/jvm/analysis/AliasingFrame.scala +++ b/repos/scala/src/compiler/scala/tools/nsc/backend/jvm/analysis/AliasingFrame.scala @@ -103,8 +103,7 @@ class AliasingFrame[V <: Value](nLocals: Int, nStack: Int) def peekStack(n: Int): V = this.peekStack(n) val prodCons = - InstructionStackEffect - .forAsmAnalysis(insn, this) // needs to be called before super.execute, see its doc + InstructionStackEffect.forAsmAnalysis(insn, this) // needs to be called before super.execute, see its doc val consumed = InstructionStackEffect.cons(prodCons) val produced = InstructionStackEffect.prod(prodCons) diff --git a/repos/scala/src/compiler/scala/tools/nsc/backend/jvm/opt/BoxUnbox.scala b/repos/scala/src/compiler/scala/tools/nsc/backend/jvm/opt/BoxUnbox.scala index bea9c2138b4..cb8cfaac05d 100644 --- a/repos/scala/src/compiler/scala/tools/nsc/backend/jvm/opt/BoxUnbox.scala +++ b/repos/scala/src/compiler/scala/tools/nsc/backend/jvm/opt/BoxUnbox.scala @@ -710,8 +710,9 @@ class BoxUnbox[BT <: BTypes](val btypes: BT) { mi: MethodInsnNode, prodCons: ProdConsAnalyzer): Option[AbstractInsnNode] = { val numArgs = Type.getArgumentTypes(mi.desc).length - val receiverProds = prodCons - .producersForValueAt(mi, prodCons.frameAt(mi).stackTop - numArgs) + val receiverProds = prodCons.producersForValueAt( + mi, + prodCons.frameAt(mi).stackTop - numArgs) if (receiverProds.size == 1) { val prod = receiverProds.head if (isPredefLoad(prod) && @@ -935,8 +936,9 @@ class BoxUnbox[BT <: BTypes](val btypes: BT) { if (isSpecializedTupleClass(expectedTupleClass)) { val typeOK = tupleClass == expectedTupleClass || - tupleClass == expectedTupleClass - .substring(0, expectedTupleClass.indexOf('$')) + tupleClass == expectedTupleClass.substring( + 0, + expectedTupleClass.indexOf('$')) if (typeOK) { if (isSpecializedTupleGetter(mi)) return Some(StaticGetterOrInstanceRead(mi)) diff --git a/repos/scala/src/compiler/scala/tools/nsc/backend/jvm/opt/CallGraph.scala b/repos/scala/src/compiler/scala/tools/nsc/backend/jvm/opt/CallGraph.scala index f8a991b43c9..f4433c6148f 100644 --- a/repos/scala/src/compiler/scala/tools/nsc/backend/jvm/opt/CallGraph.scala +++ b/repos/scala/src/compiler/scala/tools/nsc/backend/jvm/opt/CallGraph.scala @@ -270,8 +270,9 @@ class CallGraph[BT <: BTypes](val btypes: BT) { } sams flatMap { case (index, _) => - val prods = prodConsI - .initialProducersForValueAt(consumerInsn, firstConsumedSlot + index) + val prods = + prodConsI.initialProducersForValueAt(consumerInsn, + firstConsumedSlot + index) if (prods.size != 1) None else { val argInfo = prods.head match { diff --git a/repos/scala/src/compiler/scala/tools/nsc/backend/jvm/opt/Inliner.scala b/repos/scala/src/compiler/scala/tools/nsc/backend/jvm/opt/Inliner.scala index 9f337d6fec3..151cb34771f 100644 --- a/repos/scala/src/compiler/scala/tools/nsc/backend/jvm/opt/Inliner.scala +++ b/repos/scala/src/compiler/scala/tools/nsc/backend/jvm/opt/Inliner.scala @@ -47,8 +47,8 @@ class Inliner[BT <: BTypes](val btypes: BT) { val msg = s"${BackendReporting.methodSignature(callee.calleeDeclarationClass.internalName, callee.callee)}$annotWarn could not be inlined:\n$warning" - backendReporting - .inlinerWarning(request.callsite.callsitePosition, msg) + backendReporting.inlinerWarning(request.callsite.callsitePosition, + msg) } } } @@ -184,8 +184,8 @@ class Inliner[BT <: BTypes](val btypes: BT) { .insert(callsite.callsiteInstruction, newCallsiteInstruction) callsite.callsiteMethod.instructions.remove(callsite.callsiteInstruction) - callGraph - .removeCallsite(callsite.callsiteInstruction, callsite.callsiteMethod) + callGraph.removeCallsite(callsite.callsiteInstruction, + callsite.callsiteMethod) val staticCallSamParamTypes = { if (selfParamType.info.get.inlineInfo.sam.isEmpty) samParamTypes - 0 else samParamTypes.updated(0, selfParamType) diff --git a/repos/scala/src/compiler/scala/tools/nsc/classpath/FileUtils.scala b/repos/scala/src/compiler/scala/tools/nsc/classpath/FileUtils.scala index 2479e7b25a6..051a34c738b 100644 --- a/repos/scala/src/compiler/scala/tools/nsc/classpath/FileUtils.scala +++ b/repos/scala/src/compiler/scala/tools/nsc/classpath/FileUtils.scala @@ -61,8 +61,7 @@ object FileUtils { fileName.length > 6 && fileName.substring(fileName.length - 6) == ".scala" def stripClassExtension(fileName: String): String = - fileName - .substring(0, fileName.length - 6) // equivalent of fileName.length - ".class".length + fileName.substring(0, fileName.length - 6) // equivalent of fileName.length - ".class".length def stripJavaExtension(fileName: String): String = fileName.substring(0, fileName.length - 5) diff --git a/repos/scala/src/compiler/scala/tools/nsc/classpath/ZipAndJarFileLookupFactory.scala b/repos/scala/src/compiler/scala/tools/nsc/classpath/ZipAndJarFileLookupFactory.scala index 9c96cfccd33..641c9c35540 100644 --- a/repos/scala/src/compiler/scala/tools/nsc/classpath/ZipAndJarFileLookupFactory.scala +++ b/repos/scala/src/compiler/scala/tools/nsc/classpath/ZipAndJarFileLookupFactory.scala @@ -133,8 +133,8 @@ object ZipAndJarFlatClassPathFactory extends ZipAndJarFileLookupFactory { } val subpackages = getSubpackages(file) - packages - .put(FlatClassPath.RootPackage, PackageFileInfo(file, subpackages)) + packages.put(FlatClassPath.RootPackage, + PackageFileInfo(file, subpackages)) traverse(FlatClassPath.RootPackage, subpackages, collection.mutable.Queue()) diff --git a/repos/scala/src/compiler/scala/tools/nsc/plugins/Plugins.scala b/repos/scala/src/compiler/scala/tools/nsc/plugins/Plugins.scala index 03a9bdf8696..5dd6974a9df 100644 --- a/repos/scala/src/compiler/scala/tools/nsc/plugins/Plugins.scala +++ b/repos/scala/src/compiler/scala/tools/nsc/plugins/Plugins.scala @@ -114,8 +114,7 @@ trait Plugins { global: Global => /** A description of all the plugins that are loaded */ def pluginDescriptions: String = roughPluginsList map (x => - "%s - %s" - .format(x.name, x.description)) mkString "\n" + "%s - %s".format(x.name, x.description)) mkString "\n" /** * Extract all phases supplied by plugins and add them to the phasesSet. diff --git a/repos/scala/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala b/repos/scala/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala index 56e8e01c714..b63e09cb159 100644 --- a/repos/scala/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala +++ b/repos/scala/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala @@ -893,8 +893,11 @@ abstract class ClassfileParser { if (!isScalaAnnot) { devWarning( s"symbol ${sym.fullName} has pickled signature in attribute") - unpickler - .unpickle(in.buf, in.bp, clazz, staticModule, in.file.name) + unpickler.unpickle(in.buf, + in.bp, + clazz, + staticModule, + in.file.name) } in.skip(attrLen) case tpnme.ScalaATTR => @@ -916,8 +919,11 @@ abstract class ClassfileParser { ._2 .asInstanceOf[ScalaSigBytes] .bytes - unpickler - .unpickle(bytes, 0, clazz, staticModule, in.file.name) + unpickler.unpickle(bytes, + 0, + clazz, + staticModule, + in.file.name) case None => throw new RuntimeException( "Scala class file does not contain Scala annotation") @@ -1123,11 +1129,9 @@ abstract class ClassfileParser { (newStub(name.toTypeName), newStub(name.toTermName)) } else { val cls = - owner - .newClass(name.toTypeName, NoPosition, sflags) setInfo completer + owner.newClass(name.toTypeName, NoPosition, sflags) setInfo completer val mod = - owner - .newModule(name.toTermName, NoPosition, sflags) setInfo completer + owner.newModule(name.toTermName, NoPosition, sflags) setInfo completer mod.moduleClass setInfo loaders.moduleClassLoader List(cls, mod.moduleClass) foreach (_.associatedFile = file) (cls, mod) diff --git a/repos/scala/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala b/repos/scala/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala index c780e2b4945..812bad31c18 100644 --- a/repos/scala/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala +++ b/repos/scala/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala @@ -363,8 +363,11 @@ abstract class Pickler extends SubComponent { private def writeName(name: Name) { ensureCapacity(name.length * 3) val utfBytes = Codec toUTF8 name.toString - scala.compat.Platform - .arraycopy(utfBytes, 0, bytes, writeIndex, utfBytes.length) + scala.compat.Platform.arraycopy(utfBytes, + 0, + bytes, + writeIndex, + utfBytes.length) writeIndex += utfBytes.length } diff --git a/repos/scala/src/compiler/scala/tools/nsc/transform/CleanUp.scala b/repos/scala/src/compiler/scala/tools/nsc/transform/CleanUp.scala index a4d29e0be35..f98a801582b 100644 --- a/repos/scala/src/compiler/scala/tools/nsc/transform/CleanUp.scala +++ b/repos/scala/src/compiler/scala/tools/nsc/transform/CleanUp.scala @@ -287,8 +287,7 @@ abstract class CleanUp extends Statics with Transform with ast.TreeDSL { // exception catching machinery val invokeExc = - currentOwner - .newValue(mkTerm(""), ad.pos) setInfo InvocationTargetExceptionClass.tpe + currentOwner.newValue(mkTerm(""), ad.pos) setInfo InvocationTargetExceptionClass.tpe def catchVar = Bind(invokeExc, Typed(Ident(nme.WILDCARD), diff --git a/repos/scala/src/compiler/scala/tools/nsc/transform/Constructors.scala b/repos/scala/src/compiler/scala/tools/nsc/transform/Constructors.scala index ccf04ac0de0..10702daecda 100644 --- a/repos/scala/src/compiler/scala/tools/nsc/transform/Constructors.scala +++ b/repos/scala/src/compiler/scala/tools/nsc/transform/Constructors.scala @@ -93,8 +93,11 @@ abstract class Constructors extends Statics with Transform with ast.TreeDSL { } else { checkUninitializedReads(cd) val tplTransformer = new TemplateTransformer(unit, impl0) - treeCopy - .ClassDef(cd, mods0, name0, tparams0, tplTransformer.transformed) + treeCopy.ClassDef(cd, + mods0, + name0, + tparams0, + tplTransformer.transformed) } case _ => super.transform(tree) diff --git a/repos/scala/src/compiler/scala/tools/nsc/transform/Delambdafy.scala b/repos/scala/src/compiler/scala/tools/nsc/transform/Delambdafy.scala index dba6b39869e..f789aeff861 100644 --- a/repos/scala/src/compiler/scala/tools/nsc/transform/Delambdafy.scala +++ b/repos/scala/src/compiler/scala/tools/nsc/transform/Delambdafy.scala @@ -269,8 +269,8 @@ abstract class Delambdafy gen.mkAttributedThis(oldClass) // sort of a lie, EmptyTree. would be more honest, but the backend chokes on that. val body = localTyper typed Apply(Select(qual, target), oldParams) - body - .substituteSymbols(fun.vparams map (_.symbol), params map (_.symbol)) + body.substituteSymbols(fun.vparams map (_.symbol), + params map (_.symbol)) body changeOwner (fun.symbol -> methSym) val methDef = DefDef(methSym, List(params), body) @@ -465,8 +465,9 @@ abstract class Delambdafy if (functionalInterface.exists) { // Create a symbol representing a fictional lambda factory method that accepts the captured // arguments and returns a Function. - val msym = currentOwner - .newMethod(nme.ANON_FUN_NAME, originalFunction.pos, ARTIFACT) + val msym = currentOwner.newMethod(nme.ANON_FUN_NAME, + originalFunction.pos, + ARTIFACT) val argTypes: List[Type] = allCaptureArgs.map(_.tpe) val params = msym.newSyntheticValueParams(argTypes) msym.setInfo(MethodType(params, functionType)) @@ -512,8 +513,9 @@ abstract class Delambdafy def createBridgeMethod(newClass: Symbol, originalFunction: Function, applyMethod: DefDef): Option[DefDef] = { - val bridgeMethSym = newClass - .newMethod(nme.apply, applyMethod.pos, FINAL | SYNTHETIC | BRIDGE) + val bridgeMethSym = newClass.newMethod(nme.apply, + applyMethod.pos, + FINAL | SYNTHETIC | BRIDGE) val originalParams = applyMethod.vparamss(0) val bridgeParams = originalParams map { originalParam => diff --git a/repos/scala/src/compiler/scala/tools/nsc/transform/Erasure.scala b/repos/scala/src/compiler/scala/tools/nsc/transform/Erasure.scala index b1636120795..7e023acb8aa 100644 --- a/repos/scala/src/compiler/scala/tools/nsc/transform/Erasure.scala +++ b/repos/scala/src/compiler/scala/tools/nsc/transform/Erasure.scala @@ -659,8 +659,9 @@ abstract class Erasure } else treeCopy.Apply( tree, - treeCopy - .TypeApply(ta, treeCopy.Select(sel, qual1, name), List(targ)), + treeCopy.TypeApply(ta, + treeCopy.Select(sel, qual1, name), + List(targ)), List()) case Apply(TypeApply(sel @ Select(qual, name), List(targ)), List()) @@ -777,8 +778,10 @@ abstract class Erasure case Match(selector, cases) => treeCopy.Match(tree1, selector, cases map adaptCase) case Try(block, catches, finalizer) => - treeCopy - .Try(tree1, adaptBranch(block), catches map adaptCase, finalizer) + treeCopy.Try(tree1, + adaptBranch(block), + catches map adaptCase, + finalizer) case Ident(_) | Select(_, _) => if (tree1.symbol.isOverloaded) { val first = tree1.symbol.alternatives.head diff --git a/repos/scala/src/compiler/scala/tools/nsc/transform/ExtensionMethods.scala b/repos/scala/src/compiler/scala/tools/nsc/transform/ExtensionMethods.scala index 6d92e429e01..82b0e78eca1 100644 --- a/repos/scala/src/compiler/scala/tools/nsc/transform/ExtensionMethods.scala +++ b/repos/scala/src/compiler/scala/tools/nsc/transform/ExtensionMethods.scala @@ -176,8 +176,7 @@ abstract class ExtensionMethods extends Transform with TypingTransformers { val thisParamType = appliedType(clazz, tparamsFromClass map (_.tpeHK): _*) val thisParam = - extensionMeth - .newValueParameter(nme.SELF, extensionMeth.pos) setInfo thisParamType + extensionMeth.newValueParameter(nme.SELF, extensionMeth.pos) setInfo thisParamType val resultType = MethodType(List(thisParam), dropNullaryMethod(methodResult)) val selfParamType = diff --git a/repos/scala/src/compiler/scala/tools/nsc/transform/Mixin.scala b/repos/scala/src/compiler/scala/tools/nsc/transform/Mixin.scala index 066f3b013e1..7cbbc481703 100644 --- a/repos/scala/src/compiler/scala/tools/nsc/transform/Mixin.scala +++ b/repos/scala/src/compiler/scala/tools/nsc/transform/Mixin.scala @@ -190,8 +190,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL { field.flags & ~PrivateLocal | ACCESSOR | lateDEFERRED | (if (field.isMutable) 0 else STABLE) // TODO preserve pre-erasure info? - clazz - .newMethod(field.getterName, field.pos, newFlags) setInfo MethodType( + clazz.newMethod(field.getterName, field.pos, newFlags) setInfo MethodType( Nil, field.info) } @@ -1105,8 +1104,10 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL { // mark fields which can be nulled afterward lazyValNullables = nullableFields(templ) withDefaultValue Set() // add all new definitions to current class or interface - treeCopy - .Template(tree, parents1, self, addNewDefs(currentOwner, body)) + treeCopy.Template(tree, + parents1, + self, + addNewDefs(currentOwner, body)) case Select(qual, name) if sym.owner.isTrait && !sym.isMethod => // refer to fields in some trait an abstract getter in the interface. diff --git a/repos/scala/src/compiler/scala/tools/nsc/transform/OverridingPairs.scala b/repos/scala/src/compiler/scala/tools/nsc/transform/OverridingPairs.scala index 3cc615d3ce2..4ea1c225d00 100644 --- a/repos/scala/src/compiler/scala/tools/nsc/transform/OverridingPairs.scala +++ b/repos/scala/src/compiler/scala/tools/nsc/transform/OverridingPairs.scala @@ -41,7 +41,6 @@ abstract class OverridingPairs extends SymbolPairs { && !exclude(lo) // this admits private, as one can't have a private member that matches a less-private member. && - relatively - .matches(lo, high)) // TODO we don't call exclude(high), should we? + relatively.matches(lo, high)) // TODO we don't call exclude(high), should we? } } diff --git a/repos/scala/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala b/repos/scala/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala index c20284101d9..32db9005c47 100644 --- a/repos/scala/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala +++ b/repos/scala/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala @@ -409,8 +409,8 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { specializedOn(sym) map (s => specializesClass(s).tpe) sorted if (isBoundedGeneric(sym.tpe) && (types contains AnyRefClass)) - reporter - .warning(sym.pos, sym + " is always a subtype of " + AnyRefTpe + ".") + reporter.warning(sym.pos, + sym + " is always a subtype of " + AnyRefTpe + ".") types } @@ -607,8 +607,9 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { // debuglog("Specializing " + clazz + ", but found " + bytecodeClazz + " already there") bytecodeClazz.info - val sClass = clazz.owner - .newClass(clazzName, clazz.pos, (clazz.flags | SPECIALIZED) & ~CASE) + val sClass = clazz.owner.newClass(clazzName, + clazz.pos, + (clazz.flags | SPECIALIZED) & ~CASE) sClass.setAnnotations(clazz.annotations) // SI-8574 important that the subclass picks up @SerialVersionUID, @strictfp, etc. def cloneInSpecializedClass(member: Symbol, @@ -669,8 +670,10 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { val extraSpecializedMixins = specializedParents( clazz.info.parents map applyContext) if (extraSpecializedMixins.nonEmpty) - debuglog("extra specialized mixins for %s: %s" - .format(clazz.name.decode, extraSpecializedMixins.mkString(", "))) + debuglog( + "extra specialized mixins for %s: %s".format( + clazz.name.decode, + extraSpecializedMixins.mkString(", "))) // If the class being specialized has a self-type, the self type may // require specialization. First exclude classes whose self types have // the same type constructor as the class itself, since they will diff --git a/repos/scala/src/compiler/scala/tools/nsc/transform/patmat/MatchCodeGen.scala b/repos/scala/src/compiler/scala/tools/nsc/transform/patmat/MatchCodeGen.scala index e14df684279..ea29eb829fd 100644 --- a/repos/scala/src/compiler/scala/tools/nsc/transform/patmat/MatchCodeGen.scala +++ b/repos/scala/src/compiler/scala/tools/nsc/transform/patmat/MatchCodeGen.scala @@ -29,12 +29,10 @@ trait MatchCodeGen extends Interface { // assert(owner ne null); assert(owner ne NoSymbol) def freshSym(pos: Position, tp: Type = NoType, prefix: String = "x") = - NoSymbol - .newTermSymbol(freshName(prefix), pos, newFlags = SYNTHETIC) setInfo tp + NoSymbol.newTermSymbol(freshName(prefix), pos, newFlags = SYNTHETIC) setInfo tp def newSynthCaseLabel(name: String) = - NoSymbol - .newLabel(freshName(name), NoPosition) setFlag treeInfo.SYNTH_CASE_FLAGS + NoSymbol.newLabel(freshName(name), NoPosition) setFlag treeInfo.SYNTH_CASE_FLAGS // codegen relevant to the structure of the translation (how extractors are combined) trait AbsCodegen { @@ -256,8 +254,8 @@ trait MatchCodeGen extends Interface { def matcher(scrut: Tree, scrutSym: Symbol, restpe: Type)( cases: List[Casegen => Tree], matchFailGen: Option[Tree => Tree]): Tree = - optimizedCodegen - .matcher(scrut, scrutSym, restpe)(cases, matchFailGen) + optimizedCodegen.matcher(scrut, scrutSym, restpe)(cases, + matchFailGen) // only used to wrap the RHS of a body // res: T diff --git a/repos/scala/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala b/repos/scala/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala index 5789702ea51..b7e886eb7e9 100644 --- a/repos/scala/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala +++ b/repos/scala/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala @@ -948,8 +948,7 @@ trait ContextErrors { self: Analyzer => else msg macroLogLite("macro expansion has failed: %s".format(msgForLog)) if (msg != null) - context - .error(if (pos.isDefined) pos else expandee.pos, msg) // issueTypeError(PosAndMsgTypeError(..)) won't work => swallows positions + context.error(if (pos.isDefined) pos else expandee.pos, msg) // issueTypeError(PosAndMsgTypeError(..)) won't work => swallows positions setError(expandee) throw MacroExpansionException } @@ -1455,8 +1454,8 @@ trait ContextErrors { self: Analyzer => val where = if (currentSym.isTopLevel != prevSym.isTopLevel) { val inOrOut = if (prevSym.isTopLevel) "outside of" else "in" - " %s package object %s" - .format(inOrOut, "" + prevSym.effectiveOwner.name) + " %s package object %s".format(inOrOut, + "" + prevSym.effectiveOwner.name) } else "" issueSymbolTypeError( diff --git a/repos/scala/src/compiler/scala/tools/nsc/typechecker/Implicits.scala b/repos/scala/src/compiler/scala/tools/nsc/typechecker/Implicits.scala index 34692d5c5ca..b42131cc9cb 100644 --- a/repos/scala/src/compiler/scala/tools/nsc/typechecker/Implicits.scala +++ b/repos/scala/src/compiler/scala/tools/nsc/typechecker/Implicits.scala @@ -354,8 +354,8 @@ trait Implicits { self: Analyzer => object HasMember { private val hasMemberCache = perRunCaches.newMap[Name, Type]() def apply(name: Name): Type = - hasMemberCache - .getOrElseUpdate(name, memberWildcardType(name, WildcardType)) + hasMemberCache.getOrElseUpdate(name, + memberWildcardType(name, WildcardType)) } /** An extractor for types of the form ? { name: (? >: argtpe <: Any*)restp } @@ -1419,9 +1419,12 @@ trait Implicits { self: Analyzer => List(tp), if (prefix != EmptyTree) List(prefix) else List())) if (settings.XlogImplicits) - reporter.echo(pos, - "materializing requested %s.%s[%s] using %s" - .format(pre, tagClass.name, tp, materializer)) + reporter.echo( + pos, + "materializing requested %s.%s[%s] using %s".format(pre, + tagClass.name, + tp, + materializer)) if (context.macrosEnabled) success(materializer) // don't call `failure` here. if macros are disabled, we just fail silently // otherwise -Xlog-implicits will spam the long with zillions of "macros are disabled" diff --git a/repos/scala/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala b/repos/scala/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala index ba4ab9a9d12..78c0e27cfc3 100644 --- a/repos/scala/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala +++ b/repos/scala/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala @@ -63,8 +63,7 @@ trait MethodSynthesis { self: Analyzer => f: Symbol => Tree, name: Name): Tree = { val m = - original - .cloneSymbol(clazz, newMethodFlags(original), name) setPos clazz.pos.focus + original.cloneSymbol(clazz, newMethodFlags(original), name) setPos clazz.pos.focus finishMethod(clazz.info.decls enter m, f) } diff --git a/repos/scala/src/compiler/scala/tools/nsc/typechecker/Namers.scala b/repos/scala/src/compiler/scala/tools/nsc/typechecker/Namers.scala index d88a1e7e5ce..3193a66ab56 100644 --- a/repos/scala/src/compiler/scala/tools/nsc/typechecker/Namers.scala +++ b/repos/scala/src/compiler/scala/tools/nsc/typechecker/Namers.scala @@ -154,8 +154,9 @@ trait Namers extends MethodSynthesis { self: Analyzer => def setPrivateWithin[T <: Symbol](tree: Tree, sym: T, mods: Modifiers): T = if (sym.isPrivateLocal || !mods.hasAccessBoundary) sym else - sym setPrivateWithin typer - .qualifyingClass(tree, mods.privateWithin, packageOK = true) + sym setPrivateWithin typer.qualifyingClass(tree, + mods.privateWithin, + packageOK = true) def setPrivateWithin(tree: MemberDef, sym: Symbol): Symbol = setPrivateWithin(tree, sym, tree.mods) diff --git a/repos/scala/src/compiler/scala/tools/nsc/typechecker/PatternTypers.scala b/repos/scala/src/compiler/scala/tools/nsc/typechecker/PatternTypers.scala index 225bad2678c..b59fbfac929 100644 --- a/repos/scala/src/compiler/scala/tools/nsc/typechecker/PatternTypers.scala +++ b/repos/scala/src/compiler/scala/tools/nsc/typechecker/PatternTypers.scala @@ -327,8 +327,11 @@ trait PatternTypers { self: Analyzer => unapplyType.skolemizeExistential(context.owner, tree)) val unapplyContext = context.makeNewScope(context.tree, context.owner) freeVars foreach unapplyContext.scope.enter - val pattp = newTyper(unapplyContext).infer - .inferTypedPattern(tree, unappFormal, pt, canRemedy) + val pattp = newTyper(unapplyContext).infer.inferTypedPattern( + tree, + unappFormal, + pt, + canRemedy) // turn any unresolved type variables in freevars into existential skolems val skolems = freeVars map @@ -337,8 +340,7 @@ trait PatternTypers { self: Analyzer => } val unapplyArg = - (context.owner - .newValue(nme.SELECTOR_DUMMY, fun.pos, Flags.SYNTHETIC) setInfo + (context.owner.newValue(nme.SELECTOR_DUMMY, fun.pos, Flags.SYNTHETIC) setInfo (if (isApplicableSafe(Nil, unapplyType, pt :: Nil, WildcardType)) pt else freshUnapplyArgType())) diff --git a/repos/scala/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala b/repos/scala/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala index 32bc45fbb59..2b0302b9bfe 100644 --- a/repos/scala/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala +++ b/repos/scala/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala @@ -1600,15 +1600,16 @@ abstract class RefChecks "may be unable to provide a concrete implementation of" else "may be unable to override" - reporter.warning( - memberSym.pos, - "%s%s references %s %s.".format( - memberSym.fullLocationString, - comparison, - accessFlagsToString(otherSym), - otherSym - ) + "\nClasses which cannot access %s %s %s." - .format(otherSym.decodedName, cannot, memberSym.decodedName)) + reporter.warning(memberSym.pos, + "%s%s references %s %s.".format( + memberSym.fullLocationString, + comparison, + accessFlagsToString(otherSym), + otherSym + ) + "\nClasses which cannot access %s %s %s.".format( + otherSym.decodedName, + cannot, + memberSym.decodedName)) } /** Warn about situations where a method signature will include a type which diff --git a/repos/scala/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala b/repos/scala/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala index 66f6bd19b79..52a9db12d93 100644 --- a/repos/scala/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala +++ b/repos/scala/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala @@ -410,8 +410,9 @@ abstract class SuperAccessors case Apply(fn, args) => assert(fn.tpe != null, tree) - treeCopy - .Apply(tree, transform(fn), transformArgs(fn.tpe.params, args)) + treeCopy.Apply(tree, + transform(fn), + transformArgs(fn.tpe.params, args)) case Function(vparams, body) => withInvalidOwner { diff --git a/repos/scala/src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala b/repos/scala/src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala index 61ce86f1791..ba791f869ba 100644 --- a/repos/scala/src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala +++ b/repos/scala/src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala @@ -435,8 +435,9 @@ trait SyntheticMethods extends ast.TreeDSL { self: Analyzer => // TODO: shouldn't the next line be: `original resetFlag CASEACCESSOR`? ddef.symbol resetFlag CASEACCESSOR lb += logResult("case accessor new")(newAcc) - val renamedInClassMap = renamedCaseAccessors - .getOrElseUpdate(clazz, mutable.Map() withDefault (x => x)) + val renamedInClassMap = renamedCaseAccessors.getOrElseUpdate( + clazz, + mutable.Map() withDefault (x => x)) renamedInClassMap(original.name.toTermName) = newAcc.symbol.name.toTermName } diff --git a/repos/scala/src/compiler/scala/tools/nsc/typechecker/TreeCheckers.scala b/repos/scala/src/compiler/scala/tools/nsc/typechecker/TreeCheckers.scala index 48aa44844e9..c133d04eb9b 100644 --- a/repos/scala/src/compiler/scala/tools/nsc/typechecker/TreeCheckers.scala +++ b/repos/scala/src/compiler/scala/tools/nsc/typechecker/TreeCheckers.scala @@ -424,8 +424,9 @@ abstract class TreeCheckers extends Analyzer { def mk[T](what: String, x: T, str: T => String = (x: T) => "" + x): ((Any, String)) = - x -> s"%10s %-20s %s" - .format(what, classString(x), truncate(str(x), 80).trim) + x -> s"%10s %-20s %s".format(what, + classString(x), + truncate(str(x), 80).trim) def encls = enclosingMemberDefs.filterNot(_.symbol == treeSym).zipWithIndex map { diff --git a/repos/scala/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala b/repos/scala/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala index 37fbbd99431..37a96decdd5 100644 --- a/repos/scala/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala +++ b/repos/scala/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala @@ -155,8 +155,7 @@ trait TypeDiagnostics { self: Analyzer => DEFERRED.toLong | MUTABLE else DEFERRED - getter.owner - .newValue(getter.name.toTermName, getter.pos, flags) setInfo getter.tpe.resultType + getter.owner.newValue(getter.name.toTermName, getter.pos, flags) setInfo getter.tpe.resultType } def treeSymTypeMsg(tree: Tree): String = { @@ -249,8 +248,10 @@ trait TypeDiagnostics { self: Analyzer => ) val explainDef = { val prepend = if (isJava) "Java-defined " else "" - "%s%s is %s in %s." - .format(prepend, reqsym, param.variance, param) + "%s%s is %s in %s.".format(prepend, + reqsym, + param.variance, + param) } // Don't suggest they change the class declaration if it's somewhere // under scala.* or defined in a java class, because attempting either @@ -261,8 +262,10 @@ trait TypeDiagnostics { self: Analyzer => "investigate a wildcard type such as `_ %s %s`. (SLS 3.2.10)" .format(op, reqArg) else - "define %s as %s%s instead. (SLS 4.5)" - .format(param.name, suggest, param.name)) + "define %s as %s%s instead. (SLS 4.5)".format( + param.name, + suggest, + param.name)) Some("Note: " + explainFound + explainDef + suggestChange) } diff --git a/repos/scala/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/repos/scala/src/compiler/scala/tools/nsc/typechecker/Typers.scala index 1296dbfba44..fd3f6cdeefb 100644 --- a/repos/scala/src/compiler/scala/tools/nsc/typechecker/Typers.scala +++ b/repos/scala/src/compiler/scala/tools/nsc/typechecker/Typers.scala @@ -1358,8 +1358,7 @@ trait Typers if (isAdaptableWithView(qual)) { qual.tpe.dealiasWiden match { case et: ExistentialType => - qual setType et - .skolemizeExistential(context.owner, qual) // open the existential + qual setType et.skolemizeExistential(context.owner, qual) // open the existential case _ => } inferView(qual, qual.tpe, searchTemplate, reportAmbiguous, saveErrors) match { @@ -2003,8 +2002,8 @@ trait Typers if ((clazz isNonBottomSubClass ClassfileAnnotationClass) && (clazz != ClassfileAnnotationClass)) { if (!clazz.owner.isPackageClass) - context - .error(clazz.pos, "inner classes cannot be classfile annotations") + context.error(clazz.pos, + "inner classes cannot be classfile annotations") // Ignore @SerialVersionUID, because it is special-cased and handled completely differently. // It only extends ClassfileAnnotationClass instead of StaticAnnotation to get the enforcement // of constant argument values "for free". Related to SI-7041. @@ -2143,8 +2142,8 @@ trait Typers ConstrArgsInParentOfTraitError(parents1.head, clazz) if ((clazz isSubClass ClassfileAnnotationClass) && !clazz.isTopLevel) - context - .error(clazz.pos, "inner classes cannot be classfile annotations") + context.error(clazz.pos, + "inner classes cannot be classfile annotations") if (!phase.erasedTypes && !clazz.info.resultType.isError) // @S: prevent crash for duplicated type members @@ -2260,8 +2259,7 @@ trait Typers } else tpt1.tpe transformedOrTyped(vdef.rhs, EXPRmode | BYVALmode, tpt2) } - treeCopy - .ValDef(vdef, typedMods, vdef.name, tpt1, checkDead(rhs1)) setType NoType + treeCopy.ValDef(vdef, typedMods, vdef.name, tpt1, checkDead(rhs1)) setType NoType } /** Enter all aliases of local parameter accessors. @@ -2580,8 +2578,7 @@ trait Typers if (tdef.symbol.isDeferred && tdef.symbol.info.isHigherKinded) checkFeature(tdef.pos, HigherKindsFeature) - treeCopy - .TypeDef(tdef, typedMods, tdef.name, tparams1, rhs1) setType NoType + treeCopy.TypeDef(tdef, typedMods, tdef.name, tparams1, rhs1) setType NoType } private def enterLabelDef(stat: Tree) { @@ -2612,8 +2609,10 @@ trait Typers treeCopy.LabelDef(ldef, ldef.name, ldef.params, rhs1) setType restpe } else { context.scope.unlink(ldef.symbol) - val sym2 = namer.enterInScope(context.owner - .newLabel(ldef.name, ldef.pos) setInfo MethodType(List(), restpe)) + val sym2 = namer.enterInScope( + context.owner.newLabel(ldef.name, ldef.pos) setInfo MethodType( + List(), + restpe)) val LabelDef(_, _, rhs1) = resetAttrs(ldef) val rhs2 = typed(brutallyResetAttrs(rhs1), restpe) ldef.params foreach (param => param setType param.symbol.tpe) @@ -2626,8 +2625,9 @@ trait Typers val syntheticPrivates = new ListBuffer[Symbol] try { namer.enterSyms(block0.stats) - val block = treeCopy - .Block(block0, pluginsEnterStats(this, block0.stats), block0.expr) + val block = treeCopy.Block(block0, + pluginsEnterStats(this, block0.stats), + block0.expr) for (stat <- block.stats) enterLabelDef(stat) if (phaseId(currentPeriod) <= currentRun.typerPhase.id) { @@ -2887,8 +2887,7 @@ trait Typers )) def mkParam(methodSym: Symbol, tp: Type = argTp) = - methodSym - .newValueParameter(paramName, paramPos.focus, SYNTHETIC) setInfo tp + methodSym.newValueParameter(paramName, paramPos.focus, SYNTHETIC) setInfo tp def mkDefaultCase(body: Tree) = atPos(tree.pos.makeTransparent) { @@ -3387,8 +3386,11 @@ trait Typers // Use synthesizeSAMFunction to expand `(p1: T1, ..., pN: TN) => body` // to an instance of the corresponding anonymous subclass of `pt`. case _ if samViable => - newTyper(context.outer) - .synthesizeSAMFunction(sam, fun, respt, pt, mode) + newTyper(context.outer).synthesizeSAMFunction(sam, + fun, + respt, + pt, + mode) // regular Function case _ => @@ -4581,8 +4583,9 @@ trait Typers gen.mkTuple(List(CODE.LIT(""), arg)) } - val t = treeCopy - .Apply(orig, unmarkDynamicRewrite(fun), args map argToBinding) + val t = treeCopy.Apply(orig, + unmarkDynamicRewrite(fun), + args map argToBinding) wrapErrors(t, _.typed(t, mode, pt)) } @@ -4786,8 +4789,7 @@ trait Typers val body1 = typed(body, mode, pt) val impliedType = - patmat - .binderTypeImpliedByPattern(body1, pt, sym) // SI-1503, SI-5204 + patmat.binderTypeImpliedByPattern(body1, pt, sym) // SI-1503, SI-5204 val symTp = if (treeInfo.isSequenceValued(body)) seqType(impliedType) else impliedType @@ -5764,8 +5766,9 @@ trait Typers if ((sym ne null) && (sym ne NoSymbol)) sym.initialize def typedPackageDef(pdef0: PackageDef) = { - val pdef = treeCopy - .PackageDef(pdef0, pdef0.pid, pluginsEnterStats(this, pdef0.stats)) + val pdef = treeCopy.PackageDef(pdef0, + pdef0.pid, + pluginsEnterStats(this, pdef0.stats)) val pid1 = typedQualifier(pdef.pid).asInstanceOf[RefTree] assert(sym.moduleClass ne NoSymbol, sym) val stats1 = @@ -5796,8 +5799,8 @@ trait Typers if (!context.starPatterns && !isPastTyper) StarPatternWithVarargParametersError(tree) - treeCopy - .Star(tree, typed(tree.elem, mode, pt)) setType makeFullyDefined(pt) + treeCopy.Star(tree, typed(tree.elem, mode, pt)) setType makeFullyDefined( + pt) } def issueTryWarnings(tree: Try): Try = { def checkForCatchAll(cdef: CaseDef) { @@ -5983,8 +5986,8 @@ trait Typers def maybeWarn(s: String): Unit = { def warn(message: String) = - context - .warning(lit.pos, s"possible missing interpolator: $message") + context.warning(lit.pos, + s"possible missing interpolator: $message") def suspiciousSym(name: TermName) = context.lookupSymbol(name, _ => true).symbol def suspiciousExpr = InterpolatorCodeRegex findFirstIn s diff --git a/repos/scala/src/compiler/scala/tools/reflect/FormatInterpolator.scala b/repos/scala/src/compiler/scala/tools/reflect/FormatInterpolator.scala index 02fbba21054..4bf75630e65 100644 --- a/repos/scala/src/compiler/scala/tools/reflect/FormatInterpolator.scala +++ b/repos/scala/src/compiler/scala/tools/reflect/FormatInterpolator.scala @@ -311,8 +311,7 @@ abstract class FormatInterpolator { */ def pickAcceptable(arg: Tree, variants: Type*): Option[Type] = variants find (arg.tpe <:< _) orElse - (variants find (c - .inferImplicitView(arg, arg.tpe, _) != EmptyTree)) orElse Some( + (variants find (c.inferImplicitView(arg, arg.tpe, _) != EmptyTree)) orElse Some( variants(0)) } object Conversion { diff --git a/repos/scala/src/interactive/scala/tools/nsc/interactive/CompilerControl.scala b/repos/scala/src/interactive/scala/tools/nsc/interactive/CompilerControl.scala index 981bd753343..f2aa3e21c87 100644 --- a/repos/scala/src/interactive/scala/tools/nsc/interactive/CompilerControl.scala +++ b/repos/scala/src/interactive/scala/tools/nsc/interactive/CompilerControl.scala @@ -448,8 +448,10 @@ trait CompilerControl { self: Global => response: Response[Tree]) extends WorkItem { def apply() = - self - .getParsedEntered(source, keepLoaded, response, this.onCompilerThread) + self.getParsedEntered(source, + keepLoaded, + response, + this.onCompilerThread) override def toString = "getParsedEntered " + source + ", keepLoaded = " + keepLoaded diff --git a/repos/scala/src/interactive/scala/tools/nsc/interactive/Global.scala b/repos/scala/src/interactive/scala/tools/nsc/interactive/Global.scala index 8d31be6a351..37054026626 100644 --- a/repos/scala/src/interactive/scala/tools/nsc/interactive/Global.scala +++ b/repos/scala/src/interactive/scala/tools/nsc/interactive/Global.scala @@ -1338,8 +1338,11 @@ with ContextTrees with RichCompilationUnits with Picklers { .newName( new String(pos.source.content, nameStart, pos.start - nameStart)) .encodedName - CompletionResult - .TypeMembers(positionDelta, qual, tree, allTypeMembers, subName) + CompletionResult.TypeMembers(positionDelta, + qual, + tree, + allTypeMembers, + subName) } focus1 match { case imp @ Import(i @ Ident(name), head :: Nil) diff --git a/repos/scala/src/interactive/scala/tools/nsc/interactive/REPL.scala b/repos/scala/src/interactive/scala/tools/nsc/interactive/REPL.scala index 0c555c702a4..05a819565d9 100644 --- a/repos/scala/src/interactive/scala/tools/nsc/interactive/REPL.scala +++ b/repos/scala/src/interactive/scala/tools/nsc/interactive/REPL.scala @@ -117,8 +117,9 @@ object REPL { comp.askReload(List(toSourceFile(file)), reloadResult) Thread.sleep(millis.toLong) println("ask type now") - comp - .askLoadedTyped(toSourceFile(file), keepLoaded = true, typedResult) + comp.askLoadedTyped(toSourceFile(file), + keepLoaded = true, + typedResult) typedResult.get case List("typeat", file, off1, off2) => doTypeAt(makePos(file, off1, off2)) diff --git a/repos/scala/src/library/scala/collection/concurrent/TrieMap.scala b/repos/scala/src/library/scala/collection/concurrent/TrieMap.scala index fa40e9b33e2..efbb77adbfc 100644 --- a/repos/scala/src/library/scala/collection/concurrent/TrieMap.scala +++ b/repos/scala/src/library/scala/collection/concurrent/TrieMap.scala @@ -756,8 +756,9 @@ final class TrieMap[K, V] private ( def this(hashf: Hashing[K], ef: Equiv[K]) = this( INode.newRootNode, - AtomicReferenceFieldUpdater - .newUpdater(classOf[TrieMap[K, V]], classOf[AnyRef], "root"), + AtomicReferenceFieldUpdater.newUpdater(classOf[TrieMap[K, V]], + classOf[AnyRef], + "root"), hashf, ef ) @@ -781,8 +782,10 @@ final class TrieMap[K, V] private ( private def readObject(in: java.io.ObjectInputStream) { root = INode.newRootNode - rootupdater = AtomicReferenceFieldUpdater - .newUpdater(classOf[TrieMap[K, V]], classOf[AnyRef], "root") + rootupdater = AtomicReferenceFieldUpdater.newUpdater( + classOf[TrieMap[K, V]], + classOf[AnyRef], + "root") hashingobj = in.readObject().asInstanceOf[Hashing[K]] equalityobj = in.readObject().asInstanceOf[Equiv[K]] @@ -1071,8 +1074,10 @@ final class TrieMap[K, V] private ( } object TrieMap extends MutableMapFactory[TrieMap] { - val inodeupdater = AtomicReferenceFieldUpdater - .newUpdater(classOf[INodeBase[_, _]], classOf[MainNode[_, _]], "mainnode") + val inodeupdater = AtomicReferenceFieldUpdater.newUpdater( + classOf[INodeBase[_, _]], + classOf[MainNode[_, _]], + "mainnode") implicit def canBuildFrom[K, V]: CanBuildFrom[Coll, (K, V), TrieMap[K, V]] = new MapCanBuildFrom[K, V] diff --git a/repos/scala/src/library/scala/collection/generic/GenTraversableFactory.scala b/repos/scala/src/library/scala/collection/generic/GenTraversableFactory.scala index e147c048be3..0e09314aa4b 100644 --- a/repos/scala/src/library/scala/collection/generic/GenTraversableFactory.scala +++ b/repos/scala/src/library/scala/collection/generic/GenTraversableFactory.scala @@ -225,8 +225,10 @@ abstract class GenTraversableFactory[ if (step == zero) throw new IllegalArgumentException("zero step") val b = newBuilder[T] - b sizeHint immutable.NumericRange - .count(start, end, step, isInclusive = false) + b sizeHint immutable.NumericRange.count(start, + end, + step, + isInclusive = false) var i = start while (if (step < zero) end < i else i < end) { b += i diff --git a/repos/scala/src/library/scala/collection/immutable/Range.scala b/repos/scala/src/library/scala/collection/immutable/Range.scala index 7482b2aae4c..d09a1c23daf 100644 --- a/repos/scala/src/library/scala/collection/immutable/Range.scala +++ b/repos/scala/src/library/scala/collection/immutable/Range.scala @@ -141,8 +141,10 @@ class Range(val start: Int, val end: Int, val step: Int) override def length = if (numRangeElements < 0) fail() else numRangeElements private def description = - "%d %s %d by %s" - .format(start, if (isInclusive) "to" else "until", end, step) + "%d %s %d by %s".format(start, + if (isInclusive) "to" else "until", + end, + step) private def fail() = throw new IllegalArgumentException( description + ": seqs cannot contain more than Int.MaxValue elements.") diff --git a/repos/scala/src/library/scala/collection/parallel/ParSeqLike.scala b/repos/scala/src/library/scala/collection/parallel/ParSeqLike.scala index a793842a10f..6bc8b45af56 100644 --- a/repos/scala/src/library/scala/collection/parallel/ParSeqLike.scala +++ b/repos/scala/src/library/scala/collection/parallel/ParSeqLike.scala @@ -250,8 +250,9 @@ trait ParSeqLike[ if (patch.isParSeq && bf(repr).isCombiner && (size - realreplaced + patch.size) > MIN_FOR_COPY) { val that = patch.asParSeq - val pits = splitter - .psplitWithSignalling(from, replaced, length - from - realreplaced) + val pits = splitter.psplitWithSignalling(from, + replaced, + length - from - realreplaced) val cfactory = combinerFactory(() => bf(repr).asCombiner) val copystart = new Copy[U, That](cfactory, pits(0)) val copymiddle = wrap { diff --git a/repos/scala/src/library/scala/io/Source.scala b/repos/scala/src/library/scala/io/Source.scala index b0e2d65de09..57075427adc 100644 --- a/repos/scala/src/library/scala/io/Source.scala +++ b/repos/scala/src/library/scala/io/Source.scala @@ -332,8 +332,12 @@ abstract class Source extends Iterator[Char] with Closeable { val line = Position line pos val col = Position column pos - out println "%s:%d:%d: %s%s%s^" - .format(descr, line, col, msg, lineNum(line), spaces(col - 1)) + out println "%s:%d:%d: %s%s%s^".format(descr, + line, + col, + msg, + lineNum(line), + spaces(col - 1)) } /** diff --git a/repos/scala/src/library/scala/runtime/Tuple3Zipped.scala b/repos/scala/src/library/scala/runtime/Tuple3Zipped.scala index 5f3568c9295..a3e8c27fc05 100644 --- a/repos/scala/src/library/scala/runtime/Tuple3Zipped.scala +++ b/repos/scala/src/library/scala/runtime/Tuple3Zipped.scala @@ -121,8 +121,9 @@ final class Tuple3Zipped[El1, Repr1, El2, Repr2, El3, Repr3]( } override def toString: String = - "(%s, %s, %s).zipped" - .format(colls._1.toString, colls._2.toString, colls._3.toString) + "(%s, %s, %s).zipped".format(colls._1.toString, + colls._2.toString, + colls._3.toString) } object Tuple3Zipped { diff --git a/repos/scala/src/partest-extras/scala/tools/partest/ReplTest.scala b/repos/scala/src/partest-extras/scala/tools/partest/ReplTest.scala index 3f6e54a164c..10c85797389 100644 --- a/repos/scala/src/partest-extras/scala/tools/partest/ReplTest.scala +++ b/repos/scala/src/partest-extras/scala/tools/partest/ReplTest.scala @@ -103,8 +103,10 @@ abstract class SessionTest extends ReplTest { if (evaled.size != wanted.size) Console println s"Expected ${wanted.size} lines, got ${evaled.size}" if (evaled != wanted) - Console print nest.FileManager - .compareContents(wanted, evaled, "expected", "actual") + Console print nest.FileManager.compareContents(wanted, + evaled, + "expected", + "actual") } } object SessionTest { diff --git a/repos/scala/src/reflect/scala/reflect/internal/Definitions.scala b/repos/scala/src/reflect/scala/reflect/internal/Definitions.scala index fb26e7e42ac..f84b6abe29f 100644 --- a/repos/scala/src/reflect/scala/reflect/internal/Definitions.scala +++ b/repos/scala/src/reflect/scala/reflect/internal/Definitions.scala @@ -1456,8 +1456,9 @@ trait Definitions extends api.StandardDefinitions { self: SymbolTable => } lazy val AnnotationDefaultAttr: ClassSymbol = { - val sym = RuntimePackageClass - .newClassSymbol(tpnme.AnnotationDefaultATTR, NoPosition, 0L) + val sym = RuntimePackageClass.newClassSymbol(tpnme.AnnotationDefaultATTR, + NoPosition, + 0L) sym setInfo ClassInfoType(List(AnnotationClass.tpe), newScope, sym) markAllCompleted(sym) RuntimePackageClass.info.decls.toList.filter(_.name == sym.name) match { diff --git a/repos/scala/src/reflect/scala/reflect/internal/Importers.scala b/repos/scala/src/reflect/scala/reflect/internal/Importers.scala index 13e69ba66b7..8152ec2865b 100644 --- a/repos/scala/src/reflect/scala/reflect/internal/Importers.scala +++ b/repos/scala/src/reflect/scala/reflect/internal/Importers.scala @@ -136,8 +136,10 @@ trait Importers { to: SymbolTable => case theirloc: from.Tree => importTree(theirloc) case theirloc: from.Symbol => importSymbol(theirloc) } - myowner - .newTypeSkolemSymbol(myname.toTypeName, origin, mypos, myflags) + myowner.newTypeSkolemSymbol(myname.toTypeName, + origin, + mypos, + myflags) case their: from.ModuleClassSymbol => val my = myowner.newModuleClass(myname.toTypeName, mypos, myflags) symMap.weakUpdate(their, my) diff --git a/repos/scala/src/reflect/scala/reflect/internal/Trees.scala b/repos/scala/src/reflect/scala/reflect/internal/Trees.scala index 423b614ab42..d073f4731b5 100644 --- a/repos/scala/src/reflect/scala/reflect/internal/Trees.scala +++ b/repos/scala/src/reflect/scala/reflect/internal/Trees.scala @@ -28,8 +28,12 @@ trait Trees extends api.Trees { self: SymbolTable => if (enclosingTree eq null) " " else " P#%5s".format(enclosingTree.id) - "[L%4s%8s] #%-6s %-15s %-10s // %s" - .format(t.pos.line, parent, t.id, t.pos.show, t.shortClass, treeLine(t)) + "[L%4s%8s] #%-6s %-15s %-10s // %s".format(t.pos.line, + parent, + t.id, + t.pos.show, + t.shortClass, + treeLine(t)) } protected def treeSymStatus(t: Tree) = { val line = @@ -1192,8 +1196,9 @@ trait Trees extends api.Trees { self: SymbolTable => } override def toString = - "Modifiers(%s, %s, %s)" - .format(flagString, annotations mkString ", ", positions) + "Modifiers(%s, %s, %s)".format(flagString, + annotations mkString ", ", + positions) } object Modifiers extends ModifiersExtractor @@ -1549,13 +1554,16 @@ trait Trees extends api.Trees { self: SymbolTable => transform(rhs)) } case Block(stats, expr) => - treeCopy - .Block(tree, transformStats(stats, currentOwner), transform(expr)) + treeCopy.Block(tree, + transformStats(stats, currentOwner), + transform(expr)) case If(cond, thenp, elsep) => treeCopy.If(tree, transform(cond), transform(thenp), transform(elsep)) case CaseDef(pat, guard, body) => - treeCopy - .CaseDef(tree, transform(pat), transform(guard), transform(body)) + treeCopy.CaseDef(tree, + transform(pat), + transform(guard), + transform(body)) case TypeApply(fun, args) => treeCopy.TypeApply(tree, transform(fun), transformTrees(args)) case AppliedTypeTree(tpt, args) => @@ -1620,8 +1628,7 @@ trait Trees extends api.Trees { self: SymbolTable => transform(rhs)) } case LabelDef(name, params, rhs) => - treeCopy - .LabelDef(tree, name, transformIdents(params), transform(rhs)) //bq: Martin, once, atOwner(...) works, also change `LambdaLifter.proxy' + treeCopy.LabelDef(tree, name, transformIdents(params), transform(rhs)) //bq: Martin, once, atOwner(...) works, also change `LambdaLifter.proxy' case PackageDef(pid, stats) => treeCopy.PackageDef( tree, @@ -1649,8 +1656,7 @@ trait Trees extends api.Trees { self: SymbolTable => case Star(elem) => treeCopy.Star(tree, transform(elem)) case UnApply(fun, args) => - treeCopy - .UnApply(tree, transform(fun), transformTrees(args)) // bq: see test/.../unapplyContexts2.scala + treeCopy.UnApply(tree, transform(fun), transformTrees(args)) // bq: see test/.../unapplyContexts2.scala case ArrayValue(elemtpt, trees) => treeCopy.ArrayValue(tree, transform(elemtpt), transformTrees(trees)) case ApplyDynamic(qual, args) => diff --git a/repos/scala/src/reflect/scala/reflect/internal/Types.scala b/repos/scala/src/reflect/scala/reflect/internal/Types.scala index 68ec9d133ab..21af9ef46c0 100644 --- a/repos/scala/src/reflect/scala/reflect/internal/Types.scala +++ b/repos/scala/src/reflect/scala/reflect/internal/Types.scala @@ -2957,8 +2957,7 @@ trait Types val quantifiedFresh = cloneSymbols(quantified) val tvars = quantifiedFresh map (tparam => TypeVar(tparam)) val underlying1 = - underlying - .instantiateTypeParams(quantified, tvars) // fuse subst quantified -> quantifiedFresh -> tvars + underlying.instantiateTypeParams(quantified, tvars) // fuse subst quantified -> quantifiedFresh -> tvars op(underlying1) && { solve(tvars, quantifiedFresh, diff --git a/repos/scala/src/reflect/scala/reflect/internal/tpe/TypeConstraints.scala b/repos/scala/src/reflect/scala/reflect/internal/tpe/TypeConstraints.scala index d62f02cc8cb..47a1d956bed 100644 --- a/repos/scala/src/reflect/scala/reflect/internal/tpe/TypeConstraints.scala +++ b/repos/scala/src/reflect/scala/reflect/internal/tpe/TypeConstraints.scala @@ -225,8 +225,8 @@ private[internal] trait TypeConstraints { self: SymbolTable => case TypeRef(_, `tparam`, _) => debuglog( s"$tvar addHiBound $tparam2.tpeHK.instantiateTypeParams($tparams, $tvars)") - tvar addHiBound tparam2.tpeHK - .instantiateTypeParams(tparams, tvars) + tvar addHiBound tparam2.tpeHK.instantiateTypeParams(tparams, + tvars) case _ => } } else { @@ -240,8 +240,8 @@ private[internal] trait TypeConstraints { self: SymbolTable => case TypeRef(_, `tparam`, _) => debuglog( s"$tvar addLoBound $tparam2.tpeHK.instantiateTypeParams($tparams, $tvars)") - tvar addLoBound tparam2.tpeHK - .instantiateTypeParams(tparams, tvars) + tvar addLoBound tparam2.tpeHK.instantiateTypeParams(tparams, + tvars) case _ => } } diff --git a/repos/scala/src/reflect/scala/reflect/internal/util/AbstractFileClassLoader.scala b/repos/scala/src/reflect/scala/reflect/internal/util/AbstractFileClassLoader.scala index 589dd0412e4..b8fba3291b7 100644 --- a/repos/scala/src/reflect/scala/reflect/internal/util/AbstractFileClassLoader.scala +++ b/repos/scala/src/reflect/scala/reflect/internal/util/AbstractFileClassLoader.scala @@ -125,8 +125,15 @@ class AbstractFileClassLoader(val root: AbstractFile, parent: ClassLoader) classOf[URL], classOf[ClassLoader]) ctor.setAccessible(true) - ctor - .newInstance(name, null, null, null, null, null, null, null, this) + ctor.newInstance(name, + null, + null, + null, + null, + null, + null, + null, + this) }) } diff --git a/repos/scala/src/reflect/scala/reflect/io/File.scala b/repos/scala/src/reflect/scala/reflect/io/File.scala index 71ea30f728e..936a76204de 100644 --- a/repos/scala/src/reflect/scala/reflect/io/File.scala +++ b/repos/scala/src/reflect/scala/reflect/io/File.scala @@ -121,8 +121,9 @@ class File(jfile: JFile)(implicit constructorCodec: Codec) */ def setExecutable(executable: Boolean, ownerOnly: Boolean = true): Boolean = { type JBoolean = java.lang.Boolean - val method = try classOf[JFile] - .getMethod("setExecutable", classOf[Boolean], classOf[Boolean]) + val method = try classOf[JFile].getMethod("setExecutable", + classOf[Boolean], + classOf[Boolean]) catch { case _: NoSuchMethodException => return false } diff --git a/repos/scala/src/reflect/scala/reflect/runtime/JavaMirrors.scala b/repos/scala/src/reflect/scala/reflect/runtime/JavaMirrors.scala index 944a77d5af6..4b78b490b97 100644 --- a/repos/scala/src/reflect/scala/reflect/runtime/JavaMirrors.scala +++ b/repos/scala/src/reflect/scala/reflect/runtime/JavaMirrors.scala @@ -700,8 +700,9 @@ private[scala] trait JavaMirrors case Array_apply => ScalaRunTime.array_apply(objReceiver, args(0).asInstanceOf[Int]) case Array_update => - ScalaRunTime - .array_update(objReceiver, args(0).asInstanceOf[Int], args(1)) + ScalaRunTime.array_update(objReceiver, + args(0).asInstanceOf[Int], + args(1)) case Array_clone => ScalaRunTime.array_clone(objReceiver) case sym if isStringConcat(sym) => receiver.toString + objArg0 case sym if sym.owner.isPrimitiveValueClass => invokePrimitiveMethod @@ -839,8 +840,11 @@ private[scala] trait JavaMirrors val bytes = ssig.getBytes val len = ByteCodecs.decode(bytes) assignAssociatedFile(clazz, module, jclazz) - unpickler - .unpickle(bytes take len, 0, clazz, module, jclazz.getName) + unpickler.unpickle(bytes take len, + 0, + clazz, + module, + jclazz.getName) markAllCompleted(clazz, module) case None => loadBytes[Array[String]]("scala.reflect.ScalaLongSignature") match { @@ -1435,8 +1439,9 @@ private[scala] trait JavaMirrors private def jmethodAsScala1(jmeth: jMethod): MethodSymbol = { val clazz = sOwner(jmeth) - val meth = clazz - .newMethod(newTermName(jmeth.getName), NoPosition, jmeth.scalaFlags) + val meth = clazz.newMethod(newTermName(jmeth.getName), + NoPosition, + jmeth.scalaFlags) methodCache enter (jmeth, meth) val tparams = jmeth.getTypeParameters.toList map createTypeParameter val paramtpes = jmeth.getGenericParameterTypes.toList map typeToScala diff --git a/repos/scala/src/reflect/scala/reflect/runtime/ReflectionUtils.scala b/repos/scala/src/reflect/scala/reflect/runtime/ReflectionUtils.scala index b1f5aa4abc2..d6082dea244 100644 --- a/repos/scala/src/reflect/scala/reflect/runtime/ReflectionUtils.scala +++ b/repos/scala/src/reflect/scala/reflect/runtime/ReflectionUtils.scala @@ -64,8 +64,11 @@ object ReflectionUtils { } cl match { case cl if cl != null => - "%s of type %s with classpath [%s] and parent being %s" - .format(cl, cl.getClass, inferClasspath(cl), show(cl.getParent)) + "%s of type %s with classpath [%s] and parent being %s".format( + cl, + cl.getClass, + inferClasspath(cl), + show(cl.getParent)) case null => "primordial classloader with boot classpath [%s]".format( inferClasspath(cl)) diff --git a/repos/scala/src/repl/scala/tools/nsc/MainGenericRunner.scala b/repos/scala/src/repl/scala/tools/nsc/MainGenericRunner.scala index d63189b2977..61734ff66da 100644 --- a/repos/scala/src/repl/scala/tools/nsc/MainGenericRunner.scala +++ b/repos/scala/src/repl/scala/tools/nsc/MainGenericRunner.scala @@ -77,11 +77,13 @@ class MainGenericRunner { def runTarget(): Either[Throwable, Boolean] = howToRun match { case AsObject => - ObjectRunner - .runAndCatch(settings.classpathURLs, thingToRun, command.arguments) + ObjectRunner.runAndCatch(settings.classpathURLs, + thingToRun, + command.arguments) case AsScript => - ScriptRunner - .runScriptAndCatch(settings, thingToRun, command.arguments) + ScriptRunner.runScriptAndCatch(settings, + thingToRun, + command.arguments) case AsJar => JarRunner.runJar(settings, thingToRun, command.arguments) case Error => @@ -99,8 +101,9 @@ class MainGenericRunner { * This all needs a rewrite though. */ if (isE) { - ScriptRunner - .runCommand(settings, combinedCode, thingToRun +: command.arguments) + ScriptRunner.runCommand(settings, + combinedCode, + thingToRun +: command.arguments) } else runTarget() match { case Left(ex) => diff --git a/repos/scala/src/repl/scala/tools/nsc/interpreter/IMain.scala b/repos/scala/src/repl/scala/tools/nsc/interpreter/IMain.scala index 6e8ecf4d30f..214df2a6ae9 100644 --- a/repos/scala/src/repl/scala/tools/nsc/interpreter/IMain.scala +++ b/repos/scala/src/repl/scala/tools/nsc/interpreter/IMain.scala @@ -737,8 +737,10 @@ class IMain(@BeanProperty val factory: ScriptEngineFactory, IR.Error case Right(_) => - val line = "%sval %s = %s.value" - .format(modifiers map (_ + " ") mkString, name, bindRep.evalPath) + val line = "%sval %s = %s.value".format( + modifiers map (_ + " ") mkString, + name, + bindRep.evalPath) repldbg("Interpreting: " + line) interpret(line) } diff --git a/repos/scala/src/repl/scala/tools/nsc/interpreter/MemberHandlers.scala b/repos/scala/src/repl/scala/tools/nsc/interpreter/MemberHandlers.scala index f35fa37247c..3c1f6569b2d 100644 --- a/repos/scala/src/repl/scala/tools/nsc/interpreter/MemberHandlers.scala +++ b/repos/scala/src/repl/scala/tools/nsc/interpreter/MemberHandlers.scala @@ -196,8 +196,9 @@ trait MemberHandlers { override def resultExtractionCode(req: Request) = { val lhsType = string2code(req lookupTypeOf name) val res = string2code(req fullPath name) - """ + "%s: %s = " + %s + "\n" """ - .format(string2code(lhs.toString), lhsType, res) + "\n" + """ + "%s: %s = " + %s + "\n" """.format(string2code(lhs.toString), + lhsType, + res) + "\n" } } diff --git a/repos/scala/src/scaladoc/scala/tools/nsc/ScalaDoc.scala b/repos/scala/src/scaladoc/scala/tools/nsc/ScalaDoc.scala index 80058b7a05f..261a92a31d1 100644 --- a/repos/scala/src/scaladoc/scala/tools/nsc/ScalaDoc.scala +++ b/repos/scala/src/scaladoc/scala/tools/nsc/ScalaDoc.scala @@ -14,8 +14,8 @@ import scala.reflect.internal.util.FakePos * that generates documentation from source files. */ class ScalaDoc { - val versionMsg = "Scaladoc %s -- %s" - .format(Properties.versionString, Properties.copyrightString) + val versionMsg = "Scaladoc %s -- %s".format(Properties.versionString, + Properties.copyrightString) def process(args: Array[String]): Boolean = { var reporter: ScalaDocReporter = null diff --git a/repos/scala/src/scaladoc/scala/tools/nsc/doc/base/CommentFactoryBase.scala b/repos/scala/src/scaladoc/scala/tools/nsc/doc/base/CommentFactoryBase.scala index bc9f3024f1c..23c13aa8384 100644 --- a/repos/scala/src/scaladoc/scala/tools/nsc/doc/base/CommentFactoryBase.scala +++ b/repos/scala/src/scaladoc/scala/tools/nsc/doc/base/CommentFactoryBase.scala @@ -406,8 +406,8 @@ trait CommentFactoryBase { this: MemberLookupBase => ((bodyTags remove key): @unchecked) match { case Some(r :: rs) if !(filterEmpty && r.blocks.isEmpty) => if (!rs.isEmpty) - reporter - .warning(pos, s"Only one '@${key.name}' tag is allowed") + reporter.warning(pos, + s"Only one '@${key.name}' tag is allowed") Some(r) case _ => None } diff --git a/repos/scala/src/scaladoc/scala/tools/nsc/doc/model/ModelFactory.scala b/repos/scala/src/scaladoc/scala/tools/nsc/doc/model/ModelFactory.scala index 6cb7ff8360e..86422e12d65 100644 --- a/repos/scala/src/scaladoc/scala/tools/nsc/doc/model/ModelFactory.scala +++ b/repos/scala/src/scaladoc/scala/tools/nsc/doc/model/ModelFactory.scala @@ -952,8 +952,7 @@ class ModelFactory(val global: Global, val settings: doc.Settings) { val allSyms = useCases(aSym, inTpl.sym) map { case (bSym, bComment, bPos) => - docComments - .put(bSym, DocComment(bComment, bPos)) // put the comment in the list, don't parse it yet, closes SI-4898 + docComments.put(bSym, DocComment(bComment, bPos)) // put the comment in the list, don't parse it yet, closes SI-4898 bSym } diff --git a/repos/scala/src/scaladoc/scala/tools/nsc/doc/model/ModelFactoryImplicitSupport.scala b/repos/scala/src/scaladoc/scala/tools/nsc/doc/model/ModelFactoryImplicitSupport.scala index 15871a52d0c..5408762ec6c 100644 --- a/repos/scala/src/scaladoc/scala/tools/nsc/doc/model/ModelFactoryImplicitSupport.scala +++ b/repos/scala/src/scaladoc/scala/tools/nsc/doc/model/ModelFactoryImplicitSupport.scala @@ -98,8 +98,7 @@ trait ModelFactoryImplicitSupport { global.analyzer.rootContext(NoCompilationUnit) val results = - global.analyzer - .allViewsFrom(sym.tpe_*, context, sym.typeParams) ++ global.analyzer + global.analyzer.allViewsFrom(sym.tpe_*, context, sym.typeParams) ++ global.analyzer .allViewsFrom(byNameType(sym.tpe_*), context, sym.typeParams) var conversions = results.flatMap(result => makeImplicitConversion(sym, result._1, result._2, context, inTpl)) @@ -199,8 +198,7 @@ trait ModelFactoryImplicitSupport { val newContext = context.makeImplicit(context.ambiguousErrors) newContext.macrosEnabled = false val newTyper = global.analyzer.newTyper(newContext) - newTyper - .silent(_.typed(appliedTree), reportAmbiguousErrors = false) match { + newTyper.silent(_.typed(appliedTree), reportAmbiguousErrors = false) match { case global.analyzer.SilentResultValue(t: Tree) => t case global.analyzer.SilentTypeError(err) => diff --git a/repos/scala/src/scalap/scala/tools/scalap/Arguments.scala b/repos/scala/src/scalap/scala/tools/scalap/Arguments.scala index fc1377e57ae..55b1a22229a 100644 --- a/repos/scala/src/scalap/scala/tools/scalap/Arguments.scala +++ b/repos/scala/src/scalap/scala/tools/scalap/Arguments.scala @@ -93,8 +93,8 @@ object Arguments { while ((i == j) && iter.hasNext) { val prefix = iter.next if (args(i) startsWith prefix) { - res - .addPrefixed(prefix, args(i).substring(prefix.length()).trim()) + res.addPrefixed(prefix, + args(i).substring(prefix.length()).trim()) i += 1 } } diff --git a/repos/scala/src/scalap/scala/tools/scalap/Main.scala b/repos/scala/src/scalap/scala/tools/scalap/Main.scala index d9c6fec2bc5..1d8f8e0d655 100644 --- a/repos/scala/src/scalap/scala/tools/scalap/Main.scala +++ b/repos/scala/src/scalap/scala/tools/scalap/Main.scala @@ -31,8 +31,9 @@ class Main { val SCALA_LONG_SIG_ANNOTATION = "Lscala/reflect/ScalaLongSignature;" val BYTES_VALUE = "bytes" - val versionMsg = "Scala classfile decoder %s -- %s\n" - .format(Properties.versionString, Properties.copyrightString) + val versionMsg = "Scala classfile decoder %s -- %s\n".format( + Properties.versionString, + Properties.copyrightString) /**Verbose program run? */ diff --git a/repos/scala/test/disabled/presentation/akka/src/akka/AkkaException.scala b/repos/scala/test/disabled/presentation/akka/src/akka/AkkaException.scala index d51a0e3cff8..3f1844f7485 100644 --- a/repos/scala/test/disabled/presentation/akka/src/akka/AkkaException.scala +++ b/repos/scala/test/disabled/presentation/akka/src/akka/AkkaException.scala @@ -20,8 +20,10 @@ class AkkaException(message: String = "", cause: Throwable = null) with Serializable { val uuid = "%s_%s".format(AkkaException.hostname, newUuid) - override lazy val toString = "%s: %s\n[%s]\n%s" - .format(getClass.getName, message, uuid, stackTraceToString) + override lazy val toString = "%s: %s\n[%s]\n%s".format(getClass.getName, + message, + uuid, + stackTraceToString) def stackTraceToString = { val trace = getStackTrace diff --git a/repos/scala/test/disabled/presentation/akka/src/akka/actor/ActorRegistry.scala b/repos/scala/test/disabled/presentation/akka/src/akka/actor/ActorRegistry.scala index 3ca783e41d9..e5a4a514c07 100644 --- a/repos/scala/test/disabled/presentation/akka/src/akka/actor/ActorRegistry.scala +++ b/repos/scala/test/disabled/presentation/akka/src/akka/actor/ActorRegistry.scala @@ -386,8 +386,7 @@ class Index[K <: AnyRef, V <: AnyRef: ArrayTag] { if (set.remove(value)) { //If we can remove the value if (set.isEmpty) //and the set becomes empty - container - .remove(key, emptySet) //We try to remove the key if it's mapped to an empty set + container.remove(key, emptySet) //We try to remove the key if it's mapped to an empty set true //Remove succeeded } else false //Remove failed diff --git a/repos/scala/test/files/run/Course-2002-03.scala b/repos/scala/test/files/run/Course-2002-03.scala index 432af752aad..8761e2d026f 100644 --- a/repos/scala/test/files/run/Course-2002-03.scala +++ b/repos/scala/test/files/run/Course-2002-03.scala @@ -267,8 +267,8 @@ object M8 { if (that.contains(elem)) accu.incl(elem) else accu)); def filter0(f: Int => Boolean, accu: IntSet): IntSet = - right - .filter0(f, left.filter0(f, if (f(elem)) accu.incl(elem) else accu)); + right.filter0(f, + left.filter0(f, if (f(elem)) accu.incl(elem) else accu)); } def test = { diff --git a/repos/scala/test/files/run/Course-2002-10.scala b/repos/scala/test/files/run/Course-2002-10.scala index f65cd041884..d9c5bed741f 100644 --- a/repos/scala/test/files/run/Course-2002-10.scala +++ b/repos/scala/test/files/run/Course-2002-10.scala @@ -37,8 +37,8 @@ object M1 { val nm1 = s apply 0; val n = s apply 1; val np1 = s apply 2; - Stream - .cons(np1 - ((np1 - n) * (np1 - n) / (nm1 - 2 * n + np1)), euler(s.tail)) + Stream.cons(np1 - ((np1 - n) * (np1 - n) / (nm1 - 2 * n + np1)), + euler(s.tail)) }; def better( diff --git a/repos/scala/test/files/run/ReplacementMatching.scala b/repos/scala/test/files/run/ReplacementMatching.scala index e5bc4aceb03..8909983a71e 100644 --- a/repos/scala/test/files/run/ReplacementMatching.scala +++ b/repos/scala/test/files/run/ReplacementMatching.scala @@ -29,14 +29,12 @@ object Test { def groupsMatching { val Date = """(\d+)/(\d+)/(\d+)""".r - for (Regex - .Groups(a, b, c) <- Date findFirstMatchIn "1/1/2001 marks the start of the millennium. 31/12/2000 doesn't.") { + for (Regex.Groups(a, b, c) <- Date findFirstMatchIn "1/1/2001 marks the start of the millennium. 31/12/2000 doesn't.") { assert(a == "1") assert(b == "1") assert(c == "2001") } - for (Regex - .Groups(a, b, c) <- (Date findAllIn "1/1/2001 marks the start of the millennium. 31/12/2000 doesn't.").matchData) { + for (Regex.Groups(a, b, c) <- (Date findAllIn "1/1/2001 marks the start of the millennium. 31/12/2000 doesn't.").matchData) { assert(a == "1" || a == "31") assert(b == "1" || b == "12") assert(c == "2001" || c == "2000") diff --git a/repos/scala/test/files/run/typealias_overriding.scala b/repos/scala/test/files/run/typealias_overriding.scala index 872e8f8e4d0..4b6a0037ec9 100644 --- a/repos/scala/test/files/run/typealias_overriding.scala +++ b/repos/scala/test/files/run/typealias_overriding.scala @@ -16,8 +16,7 @@ object Test extends App { type TNode = Node // can also directly write `class Node extends super.NodeImpl' -- doesn't change the bug class Node extends super.NodeImpl { override def toString = "LinkedNode" } - removing - .printNode(new Node, (x: removing.TNode) => x.toString) // make inference explicit, doesn't affect the bug + removing.printNode(new Node, (x: removing.TNode) => x.toString) // make inference explicit, doesn't affect the bug } new Linked diff --git a/repos/scala/test/files/scalacheck/parallel-collections/ParallelSeqCheck.scala b/repos/scala/test/files/scalacheck/parallel-collections/ParallelSeqCheck.scala index 0c11a5fcfb2..a7d0adff4ab 100644 --- a/repos/scala/test/files/scalacheck/parallel-collections/ParallelSeqCheck.scala +++ b/repos/scala/test/files/scalacheck/parallel-collections/ParallelSeqCheck.scala @@ -193,8 +193,9 @@ abstract class ParallelSeqCheck[T](collName: String) s.startsWith(s.tail, 1) == coll.startsWith(coll.tail, 1))) && ("with each other" |: coll.startsWith(s)) && ("modified" |: s.startsWith(collmodif) == coll.startsWith(collmodif)) && - ("modified2" |: s.startsWith(collmodif, pos) == coll - .startsWith(collmodif, pos)) && (for (sq <- startEndSeqs) yield { + ("modified2" |: s.startsWith(collmodif, pos) == coll.startsWith( + collmodif, + pos)) && (for (sq <- startEndSeqs) yield { val ss = s.startsWith(sq, pos) val cs = coll.startsWith(fromSeq(sq), pos) if (ss != cs) { @@ -252,11 +253,15 @@ abstract class ParallelSeqCheck[T](collName: String) ("with par" |: s.patch(from, pat, repl) == coll.patch(from, fromSeq(pat), repl)) && - ("with empty" |: s.patch(from, Nil, repl) == coll - .patch(from, fromSeq(Nil), repl)) && + ("with empty" |: s.patch(from, Nil, repl) == coll.patch( + from, + fromSeq(Nil), + repl)) && ("with one" |: - (s.length == 0 || s.patch(from, List(s(0)), 1) == coll - .patch(from, fromSeq(List(coll(0))), 1))) + (s.length == 0 || s.patch(from, List(s(0)), 1) == coll.patch( + from, + fromSeq(List(coll(0))), + 1))) } if (!isCheckingViews) diff --git a/repos/scala/test/junit/scala/collection/immutable/StringLikeTest.scala b/repos/scala/test/junit/scala/collection/immutable/StringLikeTest.scala index f96a66d94ae..980d46f0220 100644 --- a/repos/scala/test/junit/scala/collection/immutable/StringLikeTest.scala +++ b/repos/scala/test/junit/scala/collection/immutable/StringLikeTest.scala @@ -36,17 +36,13 @@ class StringLikeTest { val surrogatepair = List(high, low).mkString val twopairs = surrogatepair + "_" + surrogatepair - AssertUtil - .assertSameElements("abcd".split('d'), Array("abc")) // not Array("abc", "") - AssertUtil - .assertSameElements("abccc".split('c'), Array("ab")) // not Array("ab", "", "", "") - AssertUtil - .assertSameElements("xxx".split('x'), Array[String]()) // not Array("", "", "", "") + AssertUtil.assertSameElements("abcd".split('d'), Array("abc")) // not Array("abc", "") + AssertUtil.assertSameElements("abccc".split('c'), Array("ab")) // not Array("ab", "", "", "") + AssertUtil.assertSameElements("xxx".split('x'), Array[String]()) // not Array("", "", "", "") AssertUtil.assertSameElements("".split('x'), Array("")) // not Array() AssertUtil.assertSameElements( "--ch--omp--".split("-"), Array("", "", "ch", "", "omp")) // All the cases! - AssertUtil - .assertSameElements(twopairs.split(high), Array(twopairs)) //don't split on characters that are half a surrogate pair + AssertUtil.assertSameElements(twopairs.split(high), Array(twopairs)) //don't split on characters that are half a surrogate pair } } diff --git a/repos/scala/test/junit/scala/collection/mutable/ArrayBufferTest.scala b/repos/scala/test/junit/scala/collection/mutable/ArrayBufferTest.scala index 5443547a89d..e6ef30bf886 100644 --- a/repos/scala/test/junit/scala/collection/mutable/ArrayBufferTest.scala +++ b/repos/scala/test/junit/scala/collection/mutable/ArrayBufferTest.scala @@ -27,8 +27,8 @@ class ArrayBufferTest { Assert.assertEquals(ArrayBuffer(2, 4, 1, 3, 6, 9, 5, 7), insertAt(2)) // No strange last position weirdness - Assert - .assertEquals(ArrayBuffer(2, 4, 5, 7, 1, 3, 6, 9), insertAt(traver.size)) + Assert.assertEquals(ArrayBuffer(2, 4, 5, 7, 1, 3, 6, 9), + insertAt(traver.size)) // Overflow is caught AssertUtil.assertThrows[IndexOutOfBoundsException] { insertAt(-1) } diff --git a/repos/scala/test/junit/scala/util/SortingTest.scala b/repos/scala/test/junit/scala/util/SortingTest.scala index 2972addc5ca..2db0e50d3b5 100644 --- a/repos/scala/test/junit/scala/util/SortingTest.scala +++ b/repos/scala/test/junit/scala/util/SortingTest.scala @@ -109,8 +109,8 @@ class SortingTest { for (size <- sizes) { val b = Array.fill(size)(rng.nextBoolean) val bfwd = Sorting.stableSort(b.clone: Seq[Boolean]) - val bbkw = Sorting - .stableSort(b.clone: Seq[Boolean], (x: Boolean, y: Boolean) => x && !y) + val bbkw = Sorting.stableSort(b.clone: Seq[Boolean], + (x: Boolean, y: Boolean) => x && !y) assertTrue("All falses should be first", bfwd.dropWhile(_ == false).forall(_ == true)) assertTrue("All falses should be last when sorted backwards", diff --git a/repos/scala/test/pending/run/delambdafy-lambdametafactory.scala b/repos/scala/test/pending/run/delambdafy-lambdametafactory.scala index f24e2b20bfc..a1b47e4b6c7 100644 --- a/repos/scala/test/pending/run/delambdafy-lambdametafactory.scala +++ b/repos/scala/test/pending/run/delambdafy-lambdametafactory.scala @@ -39,11 +39,13 @@ object Test { val caller = MethodHandles.lookup val methodType = MethodType.methodType(classOf[AnyRef], Array[Class[_]](classOf[AnyRef])) - val instantiatedMethodType = MethodType - .methodType(instantiatedRet, Array[Class[_]](instantiatedParam)) + val instantiatedMethodType = MethodType.methodType( + instantiatedRet, + Array[Class[_]](instantiatedParam)) val (capturedParamTypes, captured) = capturedParams.unzip - val targetMethodType = MethodType - .methodType(instantiatedRet, capturedParamTypes :+ instantiatedParam) + val targetMethodType = MethodType.methodType( + instantiatedRet, + capturedParamTypes :+ instantiatedParam) val invokedType = MethodType.methodType(classOf[Function1ish[_, _]], capturedParamTypes) val target = caller.findStatic(hostClass, accessorName, targetMethodType) diff --git a/repos/scala/test/pending/run/t2364.scala b/repos/scala/test/pending/run/t2364.scala index 5fb205e85a2..8116def0cb2 100644 --- a/repos/scala/test/pending/run/t2364.scala +++ b/repos/scala/test/pending/run/t2364.scala @@ -32,8 +32,9 @@ object Test { node.label.toLowerCase, attributes(node.attributes)) for (m <- node.child) serialize(m, serializer) - serializer - .endElement("", node.label.toLowerCase, node.label.toLowerCase) + serializer.endElement("", + node.label.toLowerCase, + node.label.toLowerCase) } } def parse(str: ByteArrayInputStream) = { diff --git a/repos/scalafx/scalafx-demos/src/main/scala/scalafx/colorselector/ColorSelector.scala b/repos/scalafx/scalafx-demos/src/main/scala/scalafx/colorselector/ColorSelector.scala index 4c6258e7bda..b96b8711df0 100644 --- a/repos/scalafx/scalafx-demos/src/main/scala/scalafx/colorselector/ColorSelector.scala +++ b/repos/scalafx/scalafx-demos/src/main/scala/scalafx/colorselector/ColorSelector.scala @@ -187,8 +187,8 @@ object ColorSelector extends JFXApp { Color.White) }) controlBlue.selectedControl.onChange(controlSelected(controlBlue)) - controlBlue - .changeColor(Color.rgb(0, 0, controlBlue.value.value.toInt), Color.White) + controlBlue.changeColor(Color.rgb(0, 0, controlBlue.value.value.toInt), + Color.White) val controlAlpha = new SliderControl("A") { value = 255 diff --git a/repos/scalafx/scalafx-demos/src/main/scala/scalafx/colorselector/Formatter.scala b/repos/scalafx/scalafx-demos/src/main/scala/scalafx/colorselector/Formatter.scala index 6ef6599595f..7873b9e4e3a 100644 --- a/repos/scalafx/scalafx-demos/src/main/scala/scalafx/colorselector/Formatter.scala +++ b/repos/scalafx/scalafx-demos/src/main/scala/scalafx/colorselector/Formatter.scala @@ -85,8 +85,9 @@ object PercentFormatter extends Formatter("Percent") { c.opacity) def formatWithoutAlpha(c: Color): String = - RGB_FORMAT - .format(doubleToInt(c.red), doubleToInt(c.green), doubleToInt(c.blue)) + RGB_FORMAT.format(doubleToInt(c.red), + doubleToInt(c.green), + doubleToInt(c.blue)) } object HsbFormatter extends Formatter("HSB") { diff --git a/repos/scalafx/scalafx/src/main/scala/scalafx/collections/package.scala b/repos/scalafx/scalafx/src/main/scala/scalafx/collections/package.scala index 34aa10e13a0..b558c725f56 100644 --- a/repos/scalafx/scalafx/src/main/scala/scalafx/collections/package.scala +++ b/repos/scalafx/scalafx/src/main/scala/scalafx/collections/package.scala @@ -93,8 +93,9 @@ package object collections { */ def fillSFXCollection[J <: Object](originalList: jfxc.ObservableList[J], filler: Iterable[SFXDelegate[J]]) { - this - .internalFiller(originalList, filler, (s: SFXDelegate[J]) => s.delegate) + this.internalFiller(originalList, + filler, + (s: SFXDelegate[J]) => s.delegate) } /** diff --git a/repos/scalafx/scalafx/src/main/scala/scalafx/collections/transformation/FilteredBuffer.scala b/repos/scalafx/scalafx/src/main/scala/scalafx/collections/transformation/FilteredBuffer.scala index e3e51373d1e..6df887273e7 100644 --- a/repos/scalafx/scalafx/src/main/scala/scalafx/collections/transformation/FilteredBuffer.scala +++ b/repos/scalafx/scalafx/src/main/scala/scalafx/collections/transformation/FilteredBuffer.scala @@ -94,9 +94,10 @@ class FilteredBuffer[E](override val delegate: jfxct.FilteredList[E]) ObjectProperty.fillProperty(delegate.predicateProperty, v) } def predicate_=(predicate: (E) => Boolean) { - ObjectProperty - .fillProperty(delegate.predicateProperty, new ju.function.Predicate[E] { - override def test(t: E): Boolean = predicate(t) - }) + ObjectProperty.fillProperty(delegate.predicateProperty, + new ju.function.Predicate[E] { + override def test(t: E): Boolean = + predicate(t) + }) } } diff --git a/repos/scalafx/scalafx/src/main/scala/scalafx/print/Printer.scala b/repos/scalafx/scalafx/src/main/scala/scalafx/print/Printer.scala index 71ba3fcff68..b16179130f0 100644 --- a/repos/scalafx/scalafx/src/main/scala/scalafx/print/Printer.scala +++ b/repos/scalafx/scalafx/src/main/scala/scalafx/print/Printer.scala @@ -160,8 +160,12 @@ final class Printer(override val delegate: jfxp.Printer) rMargin: Double, tMargin: Double, bMargin: Double): PageLayout = - delegate - .createPageLayout(paper, orient, lMargin, rMargin, tMargin, bMargin) + delegate.createPageLayout(paper, + orient, + lMargin, + rMargin, + tMargin, + bMargin) /** * Obtain a new PageLayout instance for this printer using the specified parameters. diff --git a/repos/scalafx/scalafx/src/main/scala/scalafx/scene/control/TreeTableView.scala b/repos/scalafx/scalafx/src/main/scala/scalafx/scene/control/TreeTableView.scala index 9831c903e36..0a78f50cfd7 100644 --- a/repos/scalafx/scalafx/src/main/scala/scalafx/scene/control/TreeTableView.scala +++ b/repos/scalafx/scalafx/src/main/scala/scalafx/scene/control/TreeTableView.scala @@ -342,8 +342,10 @@ object TreeTableView { minColumn: TableColumnBase[jfxsc.TreeItem[S], _], maxRow: Int, maxColumn: TableColumnBase[jfxsc.TreeItem[S], _]) { - delegate - .selectRange(minRow, minColumn.delegate, maxRow, maxColumn.delegate) + delegate.selectRange(minRow, + minColumn.delegate, + maxRow, + maxColumn.delegate) } /** diff --git a/repos/scalafx/scalafx/src/main/scala/scalafx/scene/layout/GridPane.scala b/repos/scalafx/scalafx/src/main/scala/scalafx/scene/layout/GridPane.scala index 25d563b5436..8d6fec78d6f 100644 --- a/repos/scalafx/scalafx/src/main/scala/scalafx/scene/layout/GridPane.scala +++ b/repos/scalafx/scalafx/src/main/scala/scalafx/scene/layout/GridPane.scala @@ -189,8 +189,11 @@ object GridPane { rowIndex: Int, columnspan: Int, rowspan: Int) { - jfxsl.GridPane - .setConstraints(child, columnIndex, rowIndex, columnspan, rowspan) + jfxsl.GridPane.setConstraints(child, + columnIndex, + rowIndex, + columnspan, + rowspan) } /** diff --git a/repos/scalafx/scalafx/src/main/scala/scalafx/scene/paint/Color.scala b/repos/scalafx/scalafx/src/main/scala/scalafx/scene/paint/Color.scala index 75b93c6ade1..b01e85e9df7 100644 --- a/repos/scalafx/scalafx/src/main/scala/scalafx/scene/paint/Color.scala +++ b/repos/scalafx/scalafx/src/main/scala/scalafx/scene/paint/Color.scala @@ -827,8 +827,10 @@ class Color(override val delegate: jfxsp.Color) saturationFactor: Double, brightnessFactor: Double, opacityFactor: Double): Color = - delegate - .deriveColor(hueShift, saturationFactor, brightnessFactor, opacityFactor) + delegate.deriveColor(hueShift, + saturationFactor, + brightnessFactor, + opacityFactor) /** * Creates a new Color that is a less saturated version of this Color. diff --git a/repos/scalafx/scalafx/src/main/scala/scalafx/scene/transform/Transform.scala b/repos/scalafx/scalafx/src/main/scala/scalafx/scene/transform/Transform.scala index 917bace3b55..4fcc3095ede 100644 --- a/repos/scalafx/scalafx/src/main/scala/scalafx/scene/transform/Transform.scala +++ b/repos/scalafx/scalafx/src/main/scala/scalafx/scene/transform/Transform.scala @@ -65,8 +65,18 @@ object Transform { mzy: Double, mzz: Double, tz: Double): Affine = - jfxst.Transform - .affine(mxx, mxy, mxz, tx, myx, myy, myz, ty, mzx, mzy, mzz, tz) + jfxst.Transform.affine(mxx, + mxy, + mxz, + tx, + myx, + myy, + myz, + ty, + mzx, + mzy, + mzz, + tz) /** * Returns a Rotate object that rotates coordinates around a pivot point. diff --git a/repos/scalafx/scalafx/src/test/scala/scalafx/animation/InterpolatorSpec.scala b/repos/scalafx/scalafx/src/test/scala/scalafx/animation/InterpolatorSpec.scala index 041ec3bb822..15602a1a26d 100644 --- a/repos/scalafx/scalafx/src/test/scala/scalafx/animation/InterpolatorSpec.scala +++ b/repos/scalafx/scalafx/src/test/scala/scalafx/animation/InterpolatorSpec.scala @@ -74,8 +74,10 @@ class InterpolatorSpec extends FlatSpec with PropertyComparator { kv1.interpolator.getClass should equal( Interpolator.TANGENT((100 ms), .3).getClass) val kv2 = - doubleProperty -> 50 tween Interpolator - .TANGENT((50 ms), .5, (100 ms), .3) + doubleProperty -> 50 tween Interpolator.TANGENT((50 ms), + .5, + (100 ms), + .3) // equals method doesn't work, so the best we can do is test the class type kv2.interpolator.getClass should equal( Interpolator.TANGENT((50 ms), .5, (100 ms), .3).getClass) diff --git a/repos/scalafx/scalafx/src/test/scala/scalafx/testutil/SFXEnumDelegateSpec.scala b/repos/scalafx/scalafx/src/test/scala/scalafx/testutil/SFXEnumDelegateSpec.scala index c2714f744b0..66acd5a1ef5 100644 --- a/repos/scalafx/scalafx/src/test/scala/scalafx/testutil/SFXEnumDelegateSpec.scala +++ b/repos/scalafx/scalafx/src/test/scala/scalafx/testutil/SFXEnumDelegateSpec.scala @@ -85,9 +85,11 @@ abstract class SFXEnumDelegateSpec[E <: java.lang.Enum[E], } private def assertScalaEnumWithOrdinal(s: S, index: Int) { - assert(s.delegate.ordinal() == index, - "%s - Expected position: %d, actual: %d" - .format(s, s.delegate.ordinal(), index)) + assert( + s.delegate.ordinal() == index, + "%s - Expected position: %d, actual: %d".format(s, + s.delegate.ordinal(), + index)) } protected override def getDesirableMethodName(javaMethod: Method): String = diff --git a/repos/scalafx/scalafx/src/test/scala/scalafx/util/StringConverterSpec.scala b/repos/scalafx/scalafx/src/test/scala/scalafx/util/StringConverterSpec.scala index 26bfd218154..72a1692ef37 100644 --- a/repos/scalafx/scalafx/src/test/scala/scalafx/util/StringConverterSpec.scala +++ b/repos/scalafx/scalafx/src/test/scala/scalafx/util/StringConverterSpec.scala @@ -105,8 +105,9 @@ class StringConverterSpec extends FlatSpec { value: T, converterName: String, typeName: String) { - converterName should "convert '%s' in a %s and vice-versa" - .format(string, typeName) in { + converterName should "convert '%s' in a %s and vice-versa".format( + string, + typeName) in { val numericValue = converter.fromString(string) numericValue should equal(value) converter.toString(numericValue) should equal(string) diff --git a/repos/scalatra/atmosphere/src/main/scala/org/atmosphere/cpr/ScalatraBroadcasterFactory.scala b/repos/scalatra/atmosphere/src/main/scala/org/atmosphere/cpr/ScalatraBroadcasterFactory.scala index 674c6b2febd..b884a0ee721 100644 --- a/repos/scalatra/atmosphere/src/main/scala/org/atmosphere/cpr/ScalatraBroadcasterFactory.scala +++ b/repos/scalatra/atmosphere/src/main/scala/org/atmosphere/cpr/ScalatraBroadcasterFactory.scala @@ -141,8 +141,9 @@ class ScalatraBroadcasterFactory(var cfg: AtmosphereConfig, def remove(b: Broadcaster, id: Any): Boolean = { val removed: Boolean = store.remove(id, b) if (removed) { - logger - .debug("Removing Broadcaster {} factory size now {} ", id, store.size) + logger.debug("Removing Broadcaster {} factory size now {} ", + id, + store.size) } removed } diff --git a/repos/scalatra/atmosphere/src/main/scala/org/scalatra/atmosphere/ScalatraAtmosphereHandler.scala b/repos/scalatra/atmosphere/src/main/scala/org/scalatra/atmosphere/ScalatraAtmosphereHandler.scala index f3c5c2ece5e..2bd6f2ff0a2 100644 --- a/repos/scalatra/atmosphere/src/main/scala/org/scalatra/atmosphere/ScalatraAtmosphereHandler.scala +++ b/repos/scalatra/atmosphere/src/main/scala/org/scalatra/atmosphere/ScalatraAtmosphereHandler.scala @@ -84,42 +84,41 @@ class ScalatraAtmosphereHandler(scalatraApp: ScalatraBase)( var session = resource.session() val isNew = !session.contains(org.scalatra.atmosphere.AtmosphereClientKey) - scalatraApp - .withRequestResponse(resource.getRequest, resource.getResponse) { - scalatraApp.withRouteMultiParams(route) { - - (req.requestMethod, route.isDefined) match { - case (Post, _) => - var client: AtmosphereClient = null + scalatraApp.withRequestResponse(resource.getRequest, resource.getResponse) { + scalatraApp.withRouteMultiParams(route) { + + (req.requestMethod, route.isDefined) match { + case (Post, _) => + var client: AtmosphereClient = null + if (isNew) { + session = AtmosphereResourceFactory.getDefault + .find(resource.uuid) + .session + } + + client = session(org.scalatra.atmosphere.AtmosphereClientKey) + .asInstanceOf[AtmosphereClient] + handleIncomingMessage(req, client) + case (_, true) => + val cl = if (isNew) { - session = AtmosphereResourceFactory.getDefault - .find(resource.uuid) - .session - } - - client = session(org.scalatra.atmosphere.AtmosphereClientKey) - .asInstanceOf[AtmosphereClient] - handleIncomingMessage(req, client) - case (_, true) => - val cl = - if (isNew) { - createClient(route.get, session, resource) - } else null - - addEventListener(resource) - resumeIfNeeded(resource) - configureBroadcaster(resource) - if (isNew && cl != null) handleIncomingMessage(Connected, cl) - resource.suspend - case _ => - val ex = new ScalatraAtmosphereException( - "There is no atmosphere route defined for " + - req.getRequestURI) - internalLogger.warn(ex.getMessage) - throw ex - } + createClient(route.get, session, resource) + } else null + + addEventListener(resource) + resumeIfNeeded(resource) + configureBroadcaster(resource) + if (isNew && cl != null) handleIncomingMessage(Connected, cl) + resource.suspend + case _ => + val ex = new ScalatraAtmosphereException( + "There is no atmosphere route defined for " + + req.getRequestURI) + internalLogger.warn(ex.getMessage) + throw ex } } + } } private[this] def createClient(route: MatchedRoute, diff --git a/repos/scalatra/commands/src/main/scala/org/scalatra/commands/JacksonJsonParsing.scala b/repos/scalatra/commands/src/main/scala/org/scalatra/commands/JacksonJsonParsing.scala index 57d967a4b9e..57ed2e29010 100644 --- a/repos/scalatra/commands/src/main/scala/org/scalatra/commands/JacksonJsonParsing.scala +++ b/repos/scalatra/commands/src/main/scala/org/scalatra/commands/JacksonJsonParsing.scala @@ -21,11 +21,13 @@ trait JacksonJsonParsing requestFormat match { case "json" | "xml" => - newCommand - .bindTo(parsedBody(request), multiParams(request), request.headers) + newCommand.bindTo(parsedBody(request), + multiParams(request), + request.headers) case _ => - newCommand - .bindTo(params(request), multiParams(request), request.headers) + newCommand.bindTo(params(request), + multiParams(request), + request.headers) } request.update(commandRequestKey[T], newCommand) newCommand diff --git a/repos/scalatra/commands/src/main/scala/org/scalatra/commands/NativeJsonParsing.scala b/repos/scalatra/commands/src/main/scala/org/scalatra/commands/NativeJsonParsing.scala index e683e4a9199..7a66aa76242 100644 --- a/repos/scalatra/commands/src/main/scala/org/scalatra/commands/NativeJsonParsing.scala +++ b/repos/scalatra/commands/src/main/scala/org/scalatra/commands/NativeJsonParsing.scala @@ -16,11 +16,13 @@ trait NativeJsonParsing mf: Manifest[T]): T = { format match { case "json" | "xml" => - newCommand - .bindTo(parsedBody(request), multiParams(request), request.headers) + newCommand.bindTo(parsedBody(request), + multiParams(request), + request.headers) case _ => - newCommand - .bindTo(params(request), multiParams(request), request.headers) + newCommand.bindTo(params(request), + multiParams(request), + request.headers) } request.update(commandRequestKey[T], newCommand) newCommand diff --git a/repos/scalatra/commands/src/main/scala/org/scalatra/commands/binding.scala b/repos/scalatra/commands/src/main/scala/org/scalatra/commands/binding.scala index ffaa54c41b4..b9e95aec3d7 100644 --- a/repos/scalatra/commands/src/main/scala/org/scalatra/commands/binding.scala +++ b/repos/scalatra/commands/src/main/scala/org/scalatra/commands/binding.scala @@ -144,8 +144,11 @@ sealed trait Binding { def apply(toBind: Either[String, Option[S]]): Binding override def toString() = - "BindingContainer[%s](name: %s, value: %s, original: %s)" - .format(valueManifest.erasure.getSimpleName, name, validation, original) + "BindingContainer[%s](name: %s, value: %s, original: %s)".format( + valueManifest.erasure.getSimpleName, + name, + validation, + original) } trait BindingSyntax extends BindingValidatorImplicits { diff --git a/repos/scalatra/commands/src/main/scala/org/scalatra/commands/field.scala b/repos/scalatra/commands/src/main/scala/org/scalatra/commands/field.scala index 185a6de14cc..0ef5f147b90 100644 --- a/repos/scalatra/commands/src/main/scala/org/scalatra/commands/field.scala +++ b/repos/scalatra/commands/src/main/scala/org/scalatra/commands/field.scala @@ -193,8 +193,9 @@ trait DataboundFieldDescriptor[S, T] extends FieldDescriptor[T] { this.asInstanceOf[DataboundFieldDescriptor[V, T]] override def toString() = - "FieldDescriptor(name: %s, original: %s, value: %s)" - .format(name, original, value) + "FieldDescriptor(name: %s, original: %s, value: %s)".format(name, + original, + value) def validate: ValidatedFieldDescriptor[S, T] def validateWith( bindingValidators: BindingValidator[T]*): DataboundFieldDescriptor[S, T] @@ -247,8 +248,10 @@ class BoundFieldDescriptor[S, T](val original: Option[S], case _ => false } override def toString() = - "BoundFieldDescriptor(name: %s, original: %s, converted: %s)" - .format(name, original, value) + "BoundFieldDescriptor(name: %s, original: %s, converted: %s)".format( + name, + original, + value) def validateWith(bindingValidators: BindingValidator[T]*) : DataboundFieldDescriptor[S, T] = { @@ -332,8 +335,10 @@ class ValidatedBoundFieldDescriptor[S, T]( case _ => false } override def toString() = - "BoundFieldDescriptor(name: %s, original: %s, converted: %s)" - .format(name, original, value) + "BoundFieldDescriptor(name: %s, original: %s, converted: %s)".format( + name, + original, + value) def validateWith(bindingValidators: BindingValidator[T]*) : DataboundFieldDescriptor[S, T] = { diff --git a/repos/scalatra/core/src/main/scala/org/scalatra/RouteRegistry.scala b/repos/scalatra/core/src/main/scala/org/scalatra/RouteRegistry.scala index ab54cd4b4a8..49987d7e29d 100644 --- a/repos/scalatra/core/src/main/scala/org/scalatra/RouteRegistry.scala +++ b/repos/scalatra/core/src/main/scala/org/scalatra/RouteRegistry.scala @@ -26,8 +26,8 @@ class RouteRegistry { def apply(method: HttpMethod): Seq[Route] = method match { case Head => - _methodRoutes - .getOrElse(Head, _methodRoutes.getOrElse(Get, Vector.empty)) + _methodRoutes.getOrElse(Head, + _methodRoutes.getOrElse(Get, Vector.empty)) case m => _methodRoutes.getOrElse(m, Vector.empty) } diff --git a/repos/scalatra/core/src/main/scala/org/scalatra/ScalatraFilter.scala b/repos/scalatra/core/src/main/scala/org/scalatra/ScalatraFilter.scala index ff91de1ddff..26ac7b8cdaf 100644 --- a/repos/scalatra/core/src/main/scala/org/scalatra/ScalatraFilter.scala +++ b/repos/scalatra/core/src/main/scala/org/scalatra/ScalatraFilter.scala @@ -63,8 +63,8 @@ trait ScalatraFilter extends Filter with ServletBase { case Some(uri) => uri.toString case _ => { val requestPath = getRequestPath - request - .setAttribute("org.scalatra.ScalatraFilter.requestPath", requestPath) + request.setAttribute("org.scalatra.ScalatraFilter.requestPath", + requestPath) requestPath.toString } } diff --git a/repos/scalatra/core/src/main/scala/org/scalatra/util/FileCharset.scala b/repos/scalatra/core/src/main/scala/org/scalatra/util/FileCharset.scala index 83da89a57b7..43a55a4bdac 100644 --- a/repos/scalatra/core/src/main/scala/org/scalatra/util/FileCharset.scala +++ b/repos/scalatra/core/src/main/scala/org/scalatra/util/FileCharset.scala @@ -29,8 +29,8 @@ object FileCharset { getCharset(detector, Codec.fileEncodingCodec) } catch { case t: Throwable => - logger - .warn("Failed to detect charset for file: " + file.getPath + ".", t) + logger.warn("Failed to detect charset for file: " + file.getPath + ".", + t) Codec.defaultCharsetCodec.charSet } finally { detector.reset() diff --git a/repos/scalatra/core/src/test/scala/org/scalatra/CorsSupportSpec.scala b/repos/scalatra/core/src/test/scala/org/scalatra/CorsSupportSpec.scala index c1e9c78a865..efa8db66c41 100644 --- a/repos/scalatra/core/src/test/scala/org/scalatra/CorsSupportSpec.scala +++ b/repos/scalatra/core/src/test/scala/org/scalatra/CorsSupportSpec.scala @@ -14,8 +14,8 @@ class CorsSupportSpec extends ScalatraSpec { config.context.setInitParameter( CorsSupport.AllowedHeadersKey, "X-Requested-With,Authorization,Content-Type,Accept,Origin") - config.context - .setInitParameter(CorsSupport.AllowedMethodsKey, "GET,HEAD,POST") + config.context.setInitParameter(CorsSupport.AllowedMethodsKey, + "GET,HEAD,POST") super.initialize(config) } }, "/*") @@ -73,8 +73,8 @@ class DisabledCorsSupportSpec extends ScalatraSpec { config.context.setInitParameter( CorsSupport.AllowedHeadersKey, "X-Requested-With,Authorization,Content-Type,Accept,Origin") - config.context - .setInitParameter(CorsSupport.AllowedMethodsKey, "GET,HEAD,POST") + config.context.setInitParameter(CorsSupport.AllowedMethodsKey, + "GET,HEAD,POST") config.context.setInitParameter(CorsSupport.EnableKey, "false") super.initialize(config) } diff --git a/repos/scalatra/core/src/test/scala/org/scalatra/UrlSupportTest.scala b/repos/scalatra/core/src/test/scala/org/scalatra/UrlSupportTest.scala index 18ca5004822..0e3b1b8a3df 100644 --- a/repos/scalatra/core/src/test/scala/org/scalatra/UrlSupportTest.scala +++ b/repos/scalatra/core/src/test/scala/org/scalatra/UrlSupportTest.scala @@ -12,8 +12,9 @@ class UrlSupportTest extends ScalatraFunSuite { } get("/option") { - this - .url(params("url"), Seq("id" -> params.get("id")), absolutize = false) + this.url(params("url"), + Seq("id" -> params.get("id")), + absolutize = false) } get("/strip-context") { diff --git a/repos/scalatra/example/src/main/scala/org/scalatra/BasicAuthExample.scala b/repos/scalatra/example/src/main/scala/org/scalatra/BasicAuthExample.scala index 2586c3b9126..c8a5e57b1df 100644 --- a/repos/scalatra/example/src/main/scala/org/scalatra/BasicAuthExample.scala +++ b/repos/scalatra/example/src/main/scala/org/scalatra/BasicAuthExample.scala @@ -59,8 +59,9 @@ class BasicAuthExample extends ScalatraServlet with AuthenticationSupport {

click

) - Template - .page("Basic Auth Example", nodes, url(_, includeServletPath = false)) + Template.page("Basic Auth Example", + nodes, + url(_, includeServletPath = false)) } get("/linked") { @@ -70,7 +71,8 @@ class BasicAuthExample extends ScalatraServlet with AuthenticationSupport {

back

) - Template - .page("Basic Auth Example", nodes, url(_, includeServletPath = false)) + Template.page("Basic Auth Example", + nodes, + url(_, includeServletPath = false)) } } diff --git a/repos/scalatra/example/src/main/scala/org/scalatra/FileUploadExample.scala b/repos/scalatra/example/src/main/scala/org/scalatra/FileUploadExample.scala index 048cf8be271..bf6e8482878 100644 --- a/repos/scalatra/example/src/main/scala/org/scalatra/FileUploadExample.scala +++ b/repos/scalatra/example/src/main/scala/org/scalatra/FileUploadExample.scala @@ -16,8 +16,9 @@ class FileUploadExample MultipartConfig(maxFileSize = Some(3 * 1024 * 1024))) def displayPage(content: Seq[Node]) = - Template - .page("File upload example", content, url(_, includeServletPath = false)) + Template.page("File upload example", + content, + url(_, includeServletPath = false)) error { case e: SizeConstraintExceededException => diff --git a/repos/scalatra/scalate/src/main/scala/org/scalatra/scalate/ScalateRenderSupport.scala b/repos/scalatra/scalate/src/main/scala/org/scalatra/scalate/ScalateRenderSupport.scala index c8cb980a12f..e4f704b0ce2 100644 --- a/repos/scalatra/scalate/src/main/scala/org/scalatra/scalate/ScalateRenderSupport.scala +++ b/repos/scalatra/scalate/src/main/scala/org/scalatra/scalate/ScalateRenderSupport.scala @@ -23,8 +23,8 @@ trait ScalateRenderSupport { self: ScalatraBase with ScalateSupport => statusCode: Int = 200)(implicit request: HttpServletRequest, response: HttpServletResponse) { contentType = responseContentType - response - .setHeader("Cache-Control", "public, max-age=%d" format cacheMaxAge) + response.setHeader("Cache-Control", + "public, max-age=%d" format cacheMaxAge) response.setStatus(statusCode) renderResponseBody( templateEngine.layout( diff --git a/repos/scalatra/swagger-ext/src/test/scala/org/scalatra/swagger/SwaggerCommandSupportSpec.scala b/repos/scalatra/swagger-ext/src/test/scala/org/scalatra/swagger/SwaggerCommandSupportSpec.scala index e4f7614b1cf..d04efe19fd3 100644 --- a/repos/scalatra/swagger-ext/src/test/scala/org/scalatra/swagger/SwaggerCommandSupportSpec.scala +++ b/repos/scalatra/swagger-ext/src/test/scala/org/scalatra/swagger/SwaggerCommandSupportSpec.scala @@ -6,8 +6,9 @@ import org.scalatra.commands._ import org.scalatra.test.specs2.MutableScalatraSpec object SwaggerCommandSupportSpec { - implicit val stringFormat = DefaultJsonFormats - .GenericFormat(DefaultReaders.StringReader, DefaultWriters.StringWriter) + implicit val stringFormat = DefaultJsonFormats.GenericFormat( + DefaultReaders.StringReader, + DefaultWriters.StringWriter) class SimpleCommand extends ParamsOnlyCommand { val name: Field[String] = asString("name").notBlank.position(1) val age: Field[Int] = bind[Int]("age").optional(0) diff --git a/repos/scalatra/swagger/src/main/scala/org/scalatra/swagger/reflect/Reflector.scala b/repos/scalatra/swagger/src/main/scala/org/scalatra/swagger/reflect/Reflector.scala index a8f0b8ac5a0..58b043da2a1 100644 --- a/repos/scalatra/swagger/src/main/scala/org/scalatra/swagger/reflect/Reflector.scala +++ b/repos/scalatra/swagger/src/main/scala/org/scalatra/swagger/reflect/Reflector.scala @@ -165,8 +165,10 @@ object Reflector { case v: TypeVariable[_] => val a = owner.typeVars.getOrElse(v, scalaTypeOf(v)) if (a.erasure == classOf[java.lang.Object]) { - val r = ScalaSigReader - .readConstructor(name, owner, index, ctorParameterNames) + val r = ScalaSigReader.readConstructor(name, + owner, + index, + ctorParameterNames) scalaTypeOf(r) } else a case v: ParameterizedType => diff --git a/repos/scalatra/swagger/src/main/scala/org/scalatra/swagger/reflect/descriptors.scala b/repos/scalatra/swagger/src/main/scala/org/scalatra/swagger/reflect/descriptors.scala index 1abca27ed7c..2109c0dabfd 100644 --- a/repos/scalatra/swagger/src/main/scala/org/scalatra/swagger/reflect/descriptors.scala +++ b/repos/scalatra/swagger/src/main/scala/org/scalatra/swagger/reflect/descriptors.scala @@ -59,8 +59,9 @@ object ManifestScalaType { def apply(erasure: Class[_], typeArgs: Seq[ScalaType] = Seq.empty): ScalaType = { - val mf = ManifestFactory - .manifestOf(erasure, typeArgs.map(ManifestFactory.manifestOf(_))) + val mf = ManifestFactory.manifestOf( + erasure, + typeArgs.map(ManifestFactory.manifestOf(_))) ManifestScalaType(mf) } @@ -257,8 +258,9 @@ class ManifestScalaType(val manifest: Manifest[_]) extends ScalaType { else if (erasure == classOf[Number]) ManifestScalaType.NumberType /* end optimization */ else { - val mf = ManifestFactory - .manifestOf(erasure, typeArgs.map(ManifestFactory.manifestOf(_))) + val mf = ManifestFactory.manifestOf( + erasure, + typeArgs.map(ManifestFactory.manifestOf(_))) val st = new CopiedManifestScalaType(mf, typeVars, isPrimitive) if (typeArgs.isEmpty) types.replace(mf, st) else st diff --git a/repos/scalatra/test/src/main/scala/org/scalatra/test/JettyContainer.scala b/repos/scalatra/test/src/main/scala/org/scalatra/test/JettyContainer.scala index a37087516e6..dedca00577a 100644 --- a/repos/scalatra/test/src/main/scala/org/scalatra/test/JettyContainer.scala +++ b/repos/scalatra/test/src/main/scala/org/scalatra/test/JettyContainer.scala @@ -56,8 +56,9 @@ trait JettyContainer extends Container { case _ => } - servletContextHandler - .addServlet(holder, if (path.endsWith("/*")) path else path + "/*") + servletContextHandler.addServlet( + holder, + if (path.endsWith("/*")) path else path + "/*") } def addServlet(servlet: Class[_ <: HttpServlet], path: String) = @@ -83,8 +84,9 @@ trait JettyContainer extends Container { // Add a default servlet. If there is no underlying servlet, then // filters just return 404. if (!skipDefaultServlet) - servletContextHandler - .addServlet(new ServletHolder("default", classOf[DefaultServlet]), "/") + servletContextHandler.addServlet( + new ServletHolder("default", classOf[DefaultServlet]), + "/") protected def ensureSessionIsSerializable() { servletContextHandler.getSessionHandler.addEventListener( diff --git a/repos/scalaz/core/src/main/scala/scalaz/std/Either.scala b/repos/scalaz/core/src/main/scala/scalaz/std/Either.scala index 5f73a5c2b08..3b57bd68330 100644 --- a/repos/scalaz/core/src/main/scala/scalaz/std/Either.scala +++ b/repos/scalaz/core/src/main/scala/scalaz/std/Either.scala @@ -244,13 +244,13 @@ trait EitherInstances extends EitherInstances0 { implicit def eitherFirstRightLInstance[L] : Monad[λ[α => RightProjection[L, α] @@ First]] = - Tags.First - .subst1[Monad, RightProjection[L, ?]](Monad[RightProjection[L, ?]]) + Tags.First.subst1[Monad, RightProjection[L, ?]]( + Monad[RightProjection[L, ?]]) implicit def eitherLastRightLInstance[L] : Monad[λ[α => RightProjection[L, α] @@ Last]] = - Tags.Last - .subst1[Monad, RightProjection[L, ?]](Monad[RightProjection[L, ?]]) + Tags.Last.subst1[Monad, RightProjection[L, ?]]( + Monad[RightProjection[L, ?]]) implicit def eitherLeftRInstance[R] = new Monad[LeftProjection[?, R]] { diff --git a/repos/scalaz/tests/src/test/scala/scalaz/MapTest.scala b/repos/scalaz/tests/src/test/scala/scalaz/MapTest.scala index 0d112694dd4..f71441d2547 100644 --- a/repos/scalaz/tests/src/test/scala/scalaz/MapTest.scala +++ b/repos/scalaz/tests/src/test/scala/scalaz/MapTest.scala @@ -566,8 +566,8 @@ object MapTest extends SpecLite { "==>> submap" should { "isSubmapOfBy -> true" in { val o = implicitly[Order[Int]] - fromList(List('a' -> 1)) - .isSubmapOfBy(fromList(List('a' -> 1, 'b' -> 2)), o.equal) must_== true + fromList(List('a' -> 1)).isSubmapOfBy(fromList(List('a' -> 1, 'b' -> 2)), + o.equal) must_== true fromList(List('a' -> 1)).isSubmapOfBy(fromList(List('a' -> 1, 'b' -> 2)), o.lessThanOrEqual) must_== true fromList(List('a' -> 1, 'b' -> 2)) @@ -580,8 +580,8 @@ object MapTest extends SpecLite { o.equal) must_== false fromList(List('a' -> 1)).isSubmapOfBy(fromList(List('a' -> 1, 'b' -> 2)), o.lessThan) must_== false - fromList(List('a' -> 1, 'b' -> 2)) - .isSubmapOfBy(fromList(List('a' -> 1)), o.equal) must_== false + fromList(List('a' -> 1, 'b' -> 2)).isSubmapOfBy(fromList(List('a' -> 1)), + o.equal) must_== false } "isSubmapOf" ! forAll { (a: Byte ==>> Byte, b: Byte ==>> Byte) => diff --git a/repos/scalaz/tests/src/test/scala/scalaz/TreeTest.scala b/repos/scalaz/tests/src/test/scala/scalaz/TreeTest.scala index 2e73c6ef5c5..015c3ace1f1 100644 --- a/repos/scalaz/tests/src/test/scala/scalaz/TreeTest.scala +++ b/repos/scalaz/tests/src/test/scala/scalaz/TreeTest.scala @@ -32,7 +32,8 @@ object TreeTest extends SpecLite { "A tree must can be rendered as an ASCII string" ! { Node(1, Stream(Node(2, Stream(Leaf(3))), Leaf(4))).drawTree must_== - Seq("1", "|", "+- 2", "| |", "| `- 3", "|", "`- 4") - .mkString("", "\n", "\n") + Seq("1", "|", "+- 2", "| |", "| `- 3", "|", "`- 4").mkString("", + "\n", + "\n") } } diff --git a/repos/scalaz/tests/src/test/scala/scalaz/std/java/TimeTest.scala b/repos/scalaz/tests/src/test/scala/scalaz/std/java/TimeTest.scala index ac281da22a3..54cf0fb9ec4 100644 --- a/repos/scalaz/tests/src/test/scala/scalaz/std/java/TimeTest.scala +++ b/repos/scalaz/tests/src/test/scala/scalaz/std/java/TimeTest.scala @@ -21,8 +21,9 @@ object TimeTest extends SpecLite { gen[Int].map { Duration.ofSeconds(_) } )) - implicit val PeriodArbitrary: Arbitrary[Period] = Apply[Arbitrary] - .apply3(smallIntArb, smallIntArb, smallIntArb)(Period.of(_, _, _)) + implicit val PeriodArbitrary: Arbitrary[Period] = + Apply[Arbitrary].apply3(smallIntArb, smallIntArb, smallIntArb)( + Period.of(_, _, _)) implicit val LocalDateArbitrary: Arbitrary[LocalDate] = Arbitrary( Apply[Gen].apply3( diff --git a/repos/scalding/scalding-commons/src/main/scala/com/twitter/scalding/commons/source/LzoGenericScheme.scala b/repos/scalding/scalding-commons/src/main/scala/com/twitter/scalding/commons/source/LzoGenericScheme.scala index 6beb581c284..0da34e80c62 100644 --- a/repos/scalding/scalding-commons/src/main/scala/com/twitter/scalding/commons/source/LzoGenericScheme.scala +++ b/repos/scalding/scalding-commons/src/main/scala/com/twitter/scalding/commons/source/LzoGenericScheme.scala @@ -154,8 +154,9 @@ class LzoGenericScheme[M](@transient conv: BinaryConverter[M], clazz: Class[M]) classOf[SourceConfigBinaryConverterProvider[_]], conf) - DelegateCombineFileInputFormat - .setDelegateInputFormat(conf, classOf[MultiInputFormat[_]]) + DelegateCombineFileInputFormat.setDelegateInputFormat( + conf, + classOf[MultiInputFormat[_]]) } override def sinkConfInit( @@ -170,7 +171,8 @@ class LzoGenericScheme[M](@transient conv: BinaryConverter[M], clazz: Class[M]) LzoGenericBlockOutputFormat.setGenericConverterClassConf( classOf[SinkConfigBinaryConverterProvider[_]], conf) - DeprecatedOutputFormatWrapper - .setOutputFormat(classOf[LzoGenericBlockOutputFormat[_]], conf) + DeprecatedOutputFormatWrapper.setOutputFormat( + classOf[LzoGenericBlockOutputFormat[_]], + conf) } } diff --git a/repos/scalding/scalding-commons/src/main/scala/com/twitter/scalding/commons/source/VersionedKeyValSource.scala b/repos/scalding/scalding-commons/src/main/scala/com/twitter/scalding/commons/source/VersionedKeyValSource.scala index 614dd6d63eb..522dd15c42b 100644 --- a/repos/scalding/scalding-commons/src/main/scala/com/twitter/scalding/commons/source/VersionedKeyValSource.scala +++ b/repos/scalding/scalding-commons/src/main/scala/com/twitter/scalding/commons/source/VersionedKeyValSource.scala @@ -234,8 +234,10 @@ class VersionedKeyValSource[K, V](val path: String, } override def toString = - "%s path:%s,sourceVersion:%s,sinkVersion:%s" - .format(getClass(), path, sourceVersion, sinkVersion) + "%s path:%s,sourceVersion:%s,sinkVersion:%s".format(getClass(), + path, + sourceVersion, + sinkVersion) override def equals(other: Any) = if (other.isInstanceOf[VersionedKeyValSource[_, _]]) { diff --git a/repos/scalding/scalding-core/src/main/scala/com/twitter/scalding/ExecutionApp.scala b/repos/scalding/scalding-core/src/main/scala/com/twitter/scalding/ExecutionApp.scala index 3cea328e043..604ea63889a 100644 --- a/repos/scalding/scalding-core/src/main/scala/com/twitter/scalding/ExecutionApp.scala +++ b/repos/scalding/scalding-core/src/main/scala/com/twitter/scalding/ExecutionApp.scala @@ -46,8 +46,10 @@ object ExecutionApp { // This adds a look back mechanism to match on other hadoop args we need to support // currently thats just libjars - val (hadoopArgs, tmpNonHadoop, finalLast) = argsWithLibJars - .foldLeft(Array[String](), Array[String](), Option.empty[String]) { + val (hadoopArgs, tmpNonHadoop, finalLast) = + argsWithLibJars.foldLeft(Array[String](), + Array[String](), + Option.empty[String]) { // Current is a -D, so store the last in non hadoop, and add current to hadoop args case ((hadoopArgs, nonHadoop, Some(l)), current) if dArgPattern.findFirstIn(current).isDefined => diff --git a/repos/scalding/scalding-core/src/main/scala/com/twitter/scalding/IterableSource.scala b/repos/scalding/scalding-core/src/main/scala/com/twitter/scalding/IterableSource.scala index 13fcc6949d7..8f0e8b3e580 100644 --- a/repos/scalding/scalding-core/src/main/scala/com/twitter/scalding/IterableSource.scala +++ b/repos/scalding/scalding-core/src/main/scala/com/twitter/scalding/IterableSource.scala @@ -82,6 +82,7 @@ case class IterableSource[+T](@transient iter: Iterable[T], * Don't use the whole string of the iterable, which can be huge. * We take the first 10 items + the identityHashCode of the iter. */ - override val sourceId: String = "IterableSource(%s)-%d" - .format(iter.take(10).toString, System.identityHashCode(iter)) + override val sourceId: String = "IterableSource(%s)-%d".format( + iter.take(10).toString, + System.identityHashCode(iter)) } diff --git a/repos/scalding/scalding-core/src/main/scala/com/twitter/scalding/JobTest.scala b/repos/scalding/scalding-core/src/main/scala/com/twitter/scalding/JobTest.scala index bd7a74182fd..9c7f49a073b 100644 --- a/repos/scalding/scalding-core/src/main/scala/com/twitter/scalding/JobTest.scala +++ b/repos/scalding/scalding-core/src/main/scala/com/twitter/scalding/JobTest.scala @@ -228,8 +228,7 @@ class JobTest(cons: (Args) => Job) { System.setProperty("cascading.update.skip", "true") // create cascading 3.0 planner trace files during tests - if (System.getenv.asScala - .getOrElse("SCALDING_CASCADING3_DEBUG", "0") == "1") { + if (System.getenv.asScala.getOrElse("SCALDING_CASCADING3_DEBUG", "0") == "1") { System.setProperty("cascading.planner.plan.path", "target/test/cascading/traceplan/" + job.name) System.setProperty( diff --git a/repos/scalding/scalding-core/src/main/scala/com/twitter/scalding/Mode.scala b/repos/scalding/scalding-core/src/main/scala/com/twitter/scalding/Mode.scala index 0f71e46a1b6..a652150104c 100644 --- a/repos/scalding/scalding-core/src/main/scala/com/twitter/scalding/Mode.scala +++ b/repos/scalding/scalding-core/src/main/scala/com/twitter/scalding/Mode.scala @@ -100,13 +100,13 @@ object Mode { config.set(CascadingFlowProcessClassKey, DefaultHadoopFlowProcess) Hdfs(strictSources, config) } else if (args.boolean("hadoop2-mr1")) { - config - .set(CascadingFlowConnectorClassKey, DefaultHadoop2Mr1FlowConnector) + config.set(CascadingFlowConnectorClassKey, + DefaultHadoop2Mr1FlowConnector) config.set(CascadingFlowProcessClassKey, DefaultHadoop2Mr1FlowProcess) Hdfs(strictSources, config) } else if (args.boolean("hadoop2-tez")) { - config - .set(CascadingFlowConnectorClassKey, DefaultHadoop2TezFlowConnector) + config.set(CascadingFlowConnectorClassKey, + DefaultHadoop2TezFlowConnector) config.set(CascadingFlowProcessClassKey, DefaultHadoop2TezFlowProcess) Hdfs(strictSources, config) } else @@ -181,8 +181,8 @@ trait HadoopMode extends Mode { // copy over Config config.toMap.foreach { case (k, v) => conf.set(k, v) } - val flowProcessClass = jobConf - .get(Mode.CascadingFlowProcessClassKey, Mode.DefaultHadoopFlowProcess) + val flowProcessClass = jobConf.get(Mode.CascadingFlowProcessClassKey, + Mode.DefaultHadoopFlowProcess) val fp = try { val clazz = Class.forName(flowProcessClass) diff --git a/repos/scalding/scalding-core/src/main/scala/com/twitter/scalding/macros/impl/FieldsProviderImpl.scala b/repos/scalding/scalding-core/src/main/scala/com/twitter/scalding/macros/impl/FieldsProviderImpl.scala index 97a1fe840b6..2ce304b5932 100644 --- a/repos/scalding/scalding-core/src/main/scala/com/twitter/scalding/macros/impl/FieldsProviderImpl.scala +++ b/repos/scalding/scalding-core/src/main/scala/com/twitter/scalding/macros/impl/FieldsProviderImpl.scala @@ -179,8 +179,9 @@ object FieldsProviderImpl { case m: MethodSymbol if m.isCaseAccessor => m }.map { accessorMethod => val fieldName = accessorMethod.name.toTermName.toString - val fieldType = accessorMethod.returnType - .asSeenFrom(outerTpe, outerTpe.typeSymbol.asClass) + val fieldType = + accessorMethod.returnType.asSeenFrom(outerTpe, + outerTpe.typeSymbol.asClass) (fieldType, fieldName) }.toVector diff --git a/repos/scalding/scalding-core/src/main/scala/com/twitter/scalding/mathematics/TypedSimilarity.scala b/repos/scalding/scalding-core/src/main/scala/com/twitter/scalding/mathematics/TypedSimilarity.scala index bca27113264..cc2024bc067 100644 --- a/repos/scalding/scalding-core/src/main/scala/com/twitter/scalding/mathematics/TypedSimilarity.scala +++ b/repos/scalding/scalding-core/src/main/scala/com/twitter/scalding/mathematics/TypedSimilarity.scala @@ -303,8 +303,9 @@ class DiscoInCosine[N]( (e.from, (e.to, e.data.degree)) }.group.withReducers(reducers) - TypedSimilarity - .discoCosineSimilarity(smallGroupedOnSrc, bigGroupedOnSrc, oversample) + TypedSimilarity.discoCosineSimilarity(smallGroupedOnSrc, + bigGroupedOnSrc, + oversample) } } @@ -334,7 +335,8 @@ class DimsumInCosine[N]( (e.from, (e.to, e.data._1.weight, e.data._2.norm)) }.group.withReducers(reducers) - TypedSimilarity - .dimsumCosineSimilarity(smallGroupedOnSrc, bigGroupedOnSrc, oversample) + TypedSimilarity.dimsumCosineSimilarity(smallGroupedOnSrc, + bigGroupedOnSrc, + oversample) } } diff --git a/repos/scalding/scalding-core/src/main/scala/com/twitter/scalding/typed/PartitionSchemed.scala b/repos/scalding/scalding-core/src/main/scala/com/twitter/scalding/typed/PartitionSchemed.scala index 13fd2a75fe5..9087aa86e0e 100644 --- a/repos/scalding/scalding-core/src/main/scala/com/twitter/scalding/typed/PartitionSchemed.scala +++ b/repos/scalding/scalding-core/src/main/scala/com/twitter/scalding/typed/PartitionSchemed.scala @@ -49,8 +49,8 @@ trait PartitionSchemed[P, T] // The partition fields, offset by the value arity. def partitionFields = - PartitionUtil - .toFields(valueSetter.arity, valueSetter.arity + partitionSetter.arity) + PartitionUtil.toFields(valueSetter.arity, + valueSetter.arity + partitionSetter.arity) /* Advertise all the sinkFields, both the value and partition ones, this needs to be like this even diff --git a/repos/scalding/scalding-core/src/main/scala/com/twitter/scalding/typed/PartitionedTextLine.scala b/repos/scalding/scalding-core/src/main/scala/com/twitter/scalding/typed/PartitionedTextLine.scala index d2c94d267cd..1cefdf7fbe9 100644 --- a/repos/scalding/scalding-core/src/main/scala/com/twitter/scalding/typed/PartitionedTextLine.scala +++ b/repos/scalding/scalding-core/src/main/scala/com/twitter/scalding/typed/PartitionedTextLine.scala @@ -66,8 +66,9 @@ case class PartitionedTextLine[P](path: String, with java.io.Serializable { // The partition fields, offset by the value arity. - val partitionFields = PartitionUtil - .toFields(valueSetter.arity, valueSetter.arity + partitionSetter.arity) + val partitionFields = PartitionUtil.toFields( + valueSetter.arity, + valueSetter.arity + partitionSetter.arity) // Create the underlying scheme and explicitly set the sink fields to be only the specified fields // see sinkFields in PartitionSchemed for other half of this work around. @@ -128,8 +129,8 @@ case class PartitionedTextLine[P](path: String, * into a pair of `P` and `(offset, line)`. */ override def converter[U >: (P, (Long, String))] = - PartitionUtil - .converter[P, (Long, String), U](valueConverter, partitionConverter) + PartitionUtil.converter[P, (Long, String), U](valueConverter, + partitionConverter) /** Flatten a pair of `P` and `line` into a cascading tuple.*/ override def setter[U <: (P, String)] = diff --git a/repos/scalding/scalding-core/src/test/scala/com/twitter/scalding/ExecutionTest.scala b/repos/scalding/scalding-core/src/test/scala/com/twitter/scalding/ExecutionTest.scala index 0b2b4289e3f..09e965a9242 100644 --- a/repos/scalding/scalding-core/src/test/scala/com/twitter/scalding/ExecutionTest.scala +++ b/repos/scalding/scalding-core/src/test/scala/com/twitter/scalding/ExecutionTest.scala @@ -439,8 +439,9 @@ class ExecutionTest extends WordSpec with Matchers { writeAll(400).shouldSucceed() } "handle failure" in { - val result = Execution - .withParallelism(Seq(Execution.failed(new Exception("failed"))), 1) + val result = Execution.withParallelism( + Seq(Execution.failed(new Exception("failed"))), + 1) assert(result.waitFor(Config.default, Local(true)).isFailure) } diff --git a/repos/scalding/scalding-core/src/test/scala/com/twitter/scalding/KryoTest.scala b/repos/scalding/scalding-core/src/test/scala/com/twitter/scalding/KryoTest.scala index 87244935cf7..8ffb8bc0502 100644 --- a/repos/scalding/scalding-core/src/test/scala/com/twitter/scalding/KryoTest.scala +++ b/repos/scalding/scalding-core/src/test/scala/com/twitter/scalding/KryoTest.scala @@ -55,8 +55,8 @@ class KryoTest extends WordSpec with Matchers { def getSerialization = { val conf = new Configuration val chillConf = new HadoopConfig(conf) - ConfiguredInstantiator - .setReflect(chillConf, classOf[serialization.KryoHadoop]) + ConfiguredInstantiator.setReflect(chillConf, + classOf[serialization.KryoHadoop]) new KryoSerialization(conf) } diff --git a/repos/scalding/scalding-core/src/test/scala/com/twitter/scalding/StringUtilityTest.scala b/repos/scalding/scalding-core/src/test/scala/com/twitter/scalding/StringUtilityTest.scala index c23fa4a232a..bdcb49ce93a 100644 --- a/repos/scalding/scalding-core/src/test/scala/com/twitter/scalding/StringUtilityTest.scala +++ b/repos/scalding/scalding-core/src/test/scala/com/twitter/scalding/StringUtilityTest.scala @@ -44,8 +44,8 @@ class StringUtilityTest extends WordSpec with Matchers { class StringUtilityPropertyTest extends PropSpec with Checkers { val randomStringGen = for { - s <- Gen - .pick(5, List.fill(100)(List("k", "l", "m", "x", "//.", "@")).flatten) + s <- Gen.pick(5, + List.fill(100)(List("k", "l", "m", "x", "//.", "@")).flatten) } yield s // test for one separator and two diff --git a/repos/scalding/scalding-parquet/src/main/scala/com/twitter/scalding/parquet/tuple/scheme/TypedParquetTupleScheme.scala b/repos/scalding/scalding-parquet/src/main/scala/com/twitter/scalding/parquet/tuple/scheme/TypedParquetTupleScheme.scala index 4f2cce57dd1..b2a616b1c86 100644 --- a/repos/scalding/scalding-parquet/src/main/scala/com/twitter/scalding/parquet/tuple/scheme/TypedParquetTupleScheme.scala +++ b/repos/scalding/scalding-parquet/src/main/scala/com/twitter/scalding/parquet/tuple/scheme/TypedParquetTupleScheme.scala @@ -90,8 +90,10 @@ class ReadSupportInstanceProxy[T] extends ReadSupport[T] { keyValueMetaData: JMap[String, String], fileSchema: MessageType, readContext: ReadContext): RecordMaterializer[T] = { - getDelegateInstance(configuration) - .prepareForRead(configuration, keyValueMetaData, fileSchema, readContext) + getDelegateInstance(configuration).prepareForRead(configuration, + keyValueMetaData, + fileSchema, + readContext) } } @@ -182,8 +184,9 @@ class TypedParquetTupleScheme[T](val readSupport: ParquetReadSupport[T], jobConf.setInputFormat(classOf[DeprecatedParquetInputFormat[T]]) jobConf.set(ParquetInputOutputFormat.READ_SUPPORT_INSTANCE, ParquetInputOutputFormat.injection(readSupport)) - ParquetInputFormat - .setReadSupportClass(jobConf, classOf[ReadSupportInstanceProxy[_]]) + ParquetInputFormat.setReadSupportClass( + jobConf, + classOf[ReadSupportInstanceProxy[_]]) } override def source(flowProcess: FlowProcess[JobConf], diff --git a/repos/scalding/scalding-serialization/src/main/scala/com/twitter/scalding/serialization/StringOrderedSerialization.scala b/repos/scalding/scalding-serialization/src/main/scala/com/twitter/scalding/serialization/StringOrderedSerialization.scala index d96114fd5b4..f5a593425ac 100644 --- a/repos/scalding/scalding-serialization/src/main/scala/com/twitter/scalding/serialization/StringOrderedSerialization.scala +++ b/repos/scalding/scalding-serialization/src/main/scala/com/twitter/scalding/serialization/StringOrderedSerialization.scala @@ -61,8 +61,8 @@ object StringOrderedSerialization { while ((counter > 0) && (ic == 0)) { // Unsigned compare of ints is cheaper than longs, because we can do it // by upcasting to Long - ic = UnsignedComparisons - .unsignedIntCompare(seekingLeft.readInt, seekingRight.readInt) + ic = UnsignedComparisons.unsignedIntCompare(seekingLeft.readInt, + seekingRight.readInt) counter = counter - 1 } if (ic != 0) ic diff --git a/repos/scalding/scalding-serialization/src/test/scala/com/twitter/scalding/serialization/UnsignedComparisonLaws.scala b/repos/scalding/scalding-serialization/src/test/scala/com/twitter/scalding/serialization/UnsignedComparisonLaws.scala index dce0c7eb20f..bcfaf7777bb 100644 --- a/repos/scalding/scalding-serialization/src/test/scala/com/twitter/scalding/serialization/UnsignedComparisonLaws.scala +++ b/repos/scalding/scalding-serialization/src/test/scala/com/twitter/scalding/serialization/UnsignedComparisonLaws.scala @@ -37,8 +37,8 @@ object UnsignedComparisonLaws extends Properties("UnsignedComparisonLaws") { case (false, true) => cmp > 0 // Convert to positive ints case (false, false) => - cmp == java.lang.Integer - .compare(l1 & Byte.MaxValue, l2 & Byte.MaxValue) + cmp == java.lang.Integer.compare(l1 & Byte.MaxValue, + l2 & Byte.MaxValue) } } } diff --git a/repos/scalding/scalding-thrift-macros/src/main/scala/com/twitter/scalding/thrift/macros/impl/ordered_serialization/ScroogeOrderedBuf.scala b/repos/scalding/scalding-thrift-macros/src/main/scala/com/twitter/scalding/thrift/macros/impl/ordered_serialization/ScroogeOrderedBuf.scala index c12eb97a26a..4229e01823a 100644 --- a/repos/scalding/scalding-thrift-macros/src/main/scala/com/twitter/scalding/thrift/macros/impl/ordered_serialization/ScroogeOrderedBuf.scala +++ b/repos/scalding/scalding-thrift-macros/src/main/scala/com/twitter/scalding/thrift/macros/impl/ordered_serialization/ScroogeOrderedBuf.scala @@ -72,8 +72,9 @@ object ScroogeOrderedBuf { .filter(m => fieldNames.contains(m.name.toTermName.toString.toLowerCase)) .map { accessorMethod => - val fieldType = accessorMethod.returnType - .asSeenFrom(outerType, outerType.typeSymbol.asClass) + val fieldType = + accessorMethod.returnType.asSeenFrom(outerType, + outerType.typeSymbol.asClass) val b: TreeOrderedBuf[c.type] = dispatcher(fieldType) (fieldType, accessorMethod.name.toTermName, b) } diff --git a/repos/scaloid/project/StringUtils.scala b/repos/scaloid/project/StringUtils.scala index c5f0753b8ab..463c91405a1 100644 --- a/repos/scaloid/project/StringUtils.scala +++ b/repos/scaloid/project/StringUtils.scala @@ -3,8 +3,7 @@ object StringUtils { if (s.isEmpty) s else s(0).toLower + s.substring(1) def simpleName(s: String) = s.split('.').last def toJavaConst(s: String) = - (s.head +: "[A-Z]".r - .replaceAllIn(s.tail, m => "_" + m.group(0))).toUpperCase + (s.head +: "[A-Z]".r.replaceAllIn(s.tail, m => "_" + m.group(0))).toUpperCase def managerToService(s: String) = { val jc = toJavaConst(s.replace("DropBox", "Dropbox")) (if (jc.endsWith("MANAGER")) jc.split('_').init.mkString("_") diff --git a/repos/slick/slick-testkit/src/codegen/scala/slick/test/codegen/GenerateRoundtripSources.scala b/repos/slick/slick-testkit/src/codegen/scala/slick/test/codegen/GenerateRoundtripSources.scala index 43d3b3f8d3a..93ed5d9b594 100644 --- a/repos/slick/slick-testkit/src/codegen/scala/slick/test/codegen/GenerateRoundtripSources.scala +++ b/repos/slick/slick-testkit/src/codegen/scala/slick/test/codegen/GenerateRoundtripSources.scala @@ -40,8 +40,9 @@ object GenerateRoundtripSources { override def autoIncLastAsOption = true } }) - val db = Database - .forURL(url = url, driver = jdbcDriver, keepAliveConnection = true) + val db = Database.forURL(url = url, + driver = jdbcDriver, + keepAliveConnection = true) val (gen, gen2) = try Await.result(db.run(ddl.create >> (a1 zip a2)), Duration.Inf) finally db.close diff --git a/repos/slick/slick-testkit/src/main/scala/com/typesafe/slick/testkit/tests/ModelBuilderTest.scala b/repos/slick/slick-testkit/src/main/scala/com/typesafe/slick/testkit/tests/ModelBuilderTest.scala index fe0a99cdc68..6db68bcb05e 100644 --- a/repos/slick/slick-testkit/src/main/scala/com/typesafe/slick/testkit/tests/ModelBuilderTest.scala +++ b/repos/slick/slick-testkit/src/main/scala/com/typesafe/slick/testkit/tests/ModelBuilderTest.scala @@ -228,8 +228,8 @@ class ModelBuilderTest extends AsyncTest[JdbcTestDB] { def test = ifCap(jcap.createModel) { def createModel(tables: Option[Seq[MTable]] = None, ignoreInvalidDefaults: Boolean = true) = - tdb.profile - .createModel(tables.map(DBIO.successful), ignoreInvalidDefaults) + tdb.profile.createModel(tables.map(DBIO.successful), + ignoreInvalidDefaults) // postgres uses lower case and things like int4 // seen in jtds: int identity diff --git a/repos/slick/slick-testkit/src/main/scala/com/typesafe/slick/testkit/util/SimpleParentRunner.scala b/repos/slick/slick-testkit/src/main/scala/com/typesafe/slick/testkit/util/SimpleParentRunner.scala index c6e5b671f77..f4ced6c7a62 100644 --- a/repos/slick/slick-testkit/src/main/scala/com/typesafe/slick/testkit/util/SimpleParentRunner.scala +++ b/repos/slick/slick-testkit/src/main/scala/com/typesafe/slick/testkit/util/SimpleParentRunner.scala @@ -57,8 +57,8 @@ abstract class SimpleParentRunner[T](testClass: Class[_]) } def getDescription = { - val desc = Description - .createSuiteDescription(testClass.getName, testClass.getAnnotations: _*) + val desc = Description.createSuiteDescription(testClass.getName, + testClass.getAnnotations: _*) for (ch <- children) desc.addChild(describeChild(ch)) desc } diff --git a/repos/slick/slick-testkit/src/test/scala/slick/benchmark/StreamsStressTest.scala b/repos/slick/slick-testkit/src/test/scala/slick/benchmark/StreamsStressTest.scala index cb472f23391..41520c4b6fa 100644 --- a/repos/slick/slick-testkit/src/test/scala/slick/benchmark/StreamsStressTest.scala +++ b/repos/slick/slick-testkit/src/test/scala/slick/benchmark/StreamsStressTest.scala @@ -52,8 +52,10 @@ object StreamsStressTest extends App { } val data = TableQuery[Data] val a = - data.schema.create >> (data ++= Range - .apply(0, elements.toInt)) >> data.sortBy(_.id).map(_.id).result + data.schema.create >> (data ++= Range.apply(0, elements.toInt)) >> data + .sortBy(_.id) + .map(_.id) + .result db.stream(a.withPinnedSession) } } diff --git a/repos/slick/slick-testkit/src/test/scala/slick/test/codegen/CodeGeneratorAllTest.scala b/repos/slick/slick-testkit/src/test/scala/slick/test/codegen/CodeGeneratorAllTest.scala index f52948ea078..e5ffeaaec5e 100644 --- a/repos/slick/slick-testkit/src/test/scala/slick/test/codegen/CodeGeneratorAllTest.scala +++ b/repos/slick/slick-testkit/src/test/scala/slick/test/codegen/CodeGeneratorAllTest.scala @@ -78,8 +78,8 @@ class CodeGeneratorAllTest(val tdb: JdbcTestDB) extends DBTest { val profileName = tdb.profile.getClass.toString.dropRight(1).split("[\\. ]").last - val codegen = Await - .result(db.run((createA >> codegenA).withPinnedSession), Duration.Inf) + val codegen = Await.result(db.run((createA >> codegenA).withPinnedSession), + Duration.Inf) codegen.writeToFile("slick.jdbc.H2Profile", "target/slick-testkit-codegen-tests/", "all.test", diff --git a/repos/slick/slick/src/main/scala/slick/compiler/ExpandSums.scala b/repos/slick/slick/src/main/scala/slick/compiler/ExpandSums.scala index a99279ba2d9..bb8f07e5c91 100644 --- a/repos/slick/slick/src/main/scala/slick/compiler/ExpandSums.scala +++ b/repos/slick/slick/src/main/scala/slick/compiler/ExpandSums.scala @@ -153,8 +153,8 @@ class ExpandSums extends Phase { pure) = bind val lComplex = !leftElemType.structural.isInstanceOf[AtomicType] val rComplex = !rightElemType.structural.isInstanceOf[AtomicType] - logger - .debug(s"Translating join ($jt, complex: $lComplex, $rComplex):", bind) + logger.debug(s"Translating join ($jt, complex: $lComplex, $rComplex):", + bind) // Find an existing column that can serve as a discriminator def findDisc(t: Type): Option[List[TermSymbol]] = { diff --git a/repos/slick/slick/src/main/scala/slick/compiler/HoistClientOps.scala b/repos/slick/slick/src/main/scala/slick/compiler/HoistClientOps.scala index 96846f67f74..5d74e76b1a4 100644 --- a/repos/slick/slick/src/main/scala/slick/compiler/HoistClientOps.scala +++ b/repos/slick/slick/src/main/scala/slick/compiler/HoistClientOps.scala @@ -94,8 +94,8 @@ class HoistClientOps extends Phase { select = Pure(StructNode(ConstArray.from(newDefsM.map(_.swap))))) .infer() - logger - .debug("Translated left join side:", Ellipsis(bl2, List(0))) + logger.debug("Translated left join side:", + Ellipsis(bl2, List(0))) val repl = hoisted.iterator.map { case (s, _, (n2, wrap)) => (s, (wrap, newDefsM(n2))) }.toMap @@ -121,8 +121,8 @@ class HoistClientOps extends Phase { select = Pure(StructNode(ConstArray.from(newDefsM.map(_.swap))))) .infer() - logger - .debug("Translated right join side:", Ellipsis(br2, List(0))) + logger.debug("Translated right join side:", + Ellipsis(br2, List(0))) val repl = hoisted.iterator.map { case (s, _, (n2, wrap)) => (s, (wrap, newDefsM(n2))) }.toMap diff --git a/repos/slick/slick/src/main/scala/slick/compiler/MergeToComprehensions.scala b/repos/slick/slick/src/main/scala/slick/compiler/MergeToComprehensions.scala index 8dda4703396..1d1970f4e59 100644 --- a/repos/slick/slick/src/main/scala/slick/compiler/MergeToComprehensions.scala +++ b/repos/slick/slick/src/main/scala/slick/compiler/MergeToComprehensions.scala @@ -49,8 +49,8 @@ class MergeToComprehensions extends Phase { n match { case Take(f1, count1) => val (c1, replacements1) = mergeTakeDrop(f1, true) - logger - .debug("Merging Take into Comprehension:", Ellipsis(n, List(0))) + logger.debug("Merging Take into Comprehension:", + Ellipsis(n, List(0))) val count2 = applyReplacements(count1, replacements1, c1) val fetch2 = c1.fetch match { case Some(t) => Some(constOp[Long]("min")(math.min)(t, count2)) @@ -62,8 +62,8 @@ class MergeToComprehensions extends Phase { case Drop(f1, count1) => val (c1, replacements1) = mergeTakeDrop(f1, true) - logger - .debug("Merging Drop into Comprehension:", Ellipsis(n, List(0))) + logger.debug("Merging Drop into Comprehension:", + Ellipsis(n, List(0))) val count2 = applyReplacements(count1, replacements1, c1) val (fetch2, offset2) = (c1.fetch, c1.offset) match { case (None, None) => (None, Some(count2)) @@ -103,8 +103,8 @@ class MergeToComprehensions extends Phase { n match { case SortBy(s1, f1, b1) => val (c1, replacements1) = mergeSortBy(f1, true) - logger - .debug("Merging SortBy into Comprehension:", Ellipsis(n, List(0))) + logger.debug("Merging SortBy into Comprehension:", + Ellipsis(n, List(0))) val b2 = b1.map { case (n, o) => (applyReplacements(n, replacements1, c1), o) } @@ -337,8 +337,8 @@ class MergeToComprehensions extends Phase { case n :@ Type.Structural(CollectionType(cons, el)) => convertOnlyInScalar(createTopLevel(n)._1) case a: Aggregate => - logger - .debug("Merging Aggregate into Comprehension:", Ellipsis(a, List(0))) + logger.debug("Merging Aggregate into Comprehension:", + Ellipsis(a, List(0))) val (c1, rep) = mergeFilterWhere(a.from, true) val sel2 = applyReplacements(a.select, rep, c1) val c2 = c1.copy(select = Pure(sel2)).infer() @@ -414,8 +414,8 @@ class MergeToComprehensions extends Phase { val (c1a, replacements1a) = if (c1.distinct.isDefined) toSubquery(c1, replacements1) else (c1, replacements1) - logger - .debug("Merging Filter into Comprehension:", Ellipsis(n, List(0))) + logger.debug("Merging Filter into Comprehension:", + Ellipsis(n, List(0))) val p2 = applyReplacements(p1, replacements1a, c1a) val c2 = if (c1a.groupBy.isEmpty) diff --git a/repos/slick/slick/src/sphinx/code/CodeGenerator.scala b/repos/slick/slick/src/sphinx/code/CodeGenerator.scala index 33a62fd3571..06017f41093 100644 --- a/repos/slick/slick/src/sphinx/code/CodeGenerator.scala +++ b/repos/slick/slick/src/sphinx/code/CodeGenerator.scala @@ -13,8 +13,8 @@ object CodeGenerator extends App { val user = "" val password = "" if (false) { - val db = Database - .forURL("jdbc:h2:mem:test1;DB_CLOSE_DELAY=-1", driver = "org.h2.Driver") + val db = Database.forURL("jdbc:h2:mem:test1;DB_CLOSE_DELAY=-1", + driver = "org.h2.Driver") //#default-runner slick.codegen.SourceCodeGenerator.main( Array(profile, jdbcDriver, url, outputFolder, pkg) diff --git a/repos/slick/slick/src/sphinx/code/Connection.scala b/repos/slick/slick/src/sphinx/code/Connection.scala index 0836fbd8566..3a8aa1a66f5 100644 --- a/repos/slick/slick/src/sphinx/code/Connection.scala +++ b/repos/slick/slick/src/sphinx/code/Connection.scala @@ -48,8 +48,8 @@ object Connection extends App { }; { //#forURL - val db = Database - .forURL("jdbc:h2:mem:test1;DB_CLOSE_DELAY=-1", driver = "org.h2.Driver") + val db = Database.forURL("jdbc:h2:mem:test1;DB_CLOSE_DELAY=-1", + driver = "org.h2.Driver") //#forURL db.close }; diff --git a/repos/slick/slick/src/sphinx/code/JoinsUnions.scala b/repos/slick/slick/src/sphinx/code/JoinsUnions.scala index 00b23cb06ea..924befbe563 100644 --- a/repos/slick/slick/src/sphinx/code/JoinsUnions.scala +++ b/repos/slick/slick/src/sphinx/code/JoinsUnions.scala @@ -105,8 +105,8 @@ object JoinsUnions extends App { } yield (c.name, s.name) val zipWithJoin = for { - res <- coffees - .zipWith(suppliers, (c: Coffees, s: Suppliers) => (c.name, s.name)) + res <- coffees.zipWith(suppliers, + (c: Coffees, s: Suppliers) => (c.name, s.name)) } yield res //#zip //println(zipJoinQuery.result.statements.head) diff --git a/repos/spark/core/src/main/scala/org/apache/spark/ExecutorAllocationManager.scala b/repos/spark/core/src/main/scala/org/apache/spark/ExecutorAllocationManager.scala index b9e1f2f7edb..3d962828135 100644 --- a/repos/spark/core/src/main/scala/org/apache/spark/ExecutorAllocationManager.scala +++ b/repos/spark/core/src/main/scala/org/apache/spark/ExecutorAllocationManager.scala @@ -93,8 +93,9 @@ private[spark] class ExecutorAllocationManager( conf.getInt("spark.dynamicAllocation.initialExecutors", minNumExecutors) // How long there must be backlogged tasks for before an addition is triggered (seconds) - private val schedulerBacklogTimeoutS = conf - .getTimeAsSeconds("spark.dynamicAllocation.schedulerBacklogTimeout", "1s") + private val schedulerBacklogTimeoutS = conf.getTimeAsSeconds( + "spark.dynamicAllocation.schedulerBacklogTimeout", + "1s") // Same as above, but used only after `schedulerBacklogTimeoutS` is exceeded private val sustainedSchedulerBacklogTimeoutS = conf.getTimeAsSeconds( @@ -615,8 +616,9 @@ private[spark] class ExecutorAllocationManager( } } } - stageIdToExecutorPlacementHints - .put(stageId, (numTasksPending, hostToLocalTaskCountPerStage.toMap)) + stageIdToExecutorPlacementHints.put( + stageId, + (numTasksPending, hostToLocalTaskCountPerStage.toMap)) // Update the executor placement hints updateExecutorPlacementHints() @@ -662,15 +664,15 @@ private[spark] class ExecutorAllocationManager( } // If this is the last pending task, mark the scheduler queue as empty - stageIdToTaskIndices - .getOrElseUpdate(stageId, new mutable.HashSet[Int]) += taskIndex + stageIdToTaskIndices.getOrElseUpdate(stageId, new mutable.HashSet[Int]) += taskIndex if (totalPendingTasks() == 0) { allocationManager.onSchedulerQueueEmpty() } // Mark the executor on which this task is scheduled as busy - executorIdToTaskIds - .getOrElseUpdate(executorId, new mutable.HashSet[Long]) += taskId + executorIdToTaskIds.getOrElseUpdate( + executorId, + new mutable.HashSet[Long]) += taskId allocationManager.onExecutorBusy(executorId) } } @@ -788,12 +790,12 @@ private[spark] class ExecutorAllocationManager( private def registerGauge[T](name: String, value: => T, defaultValue: T): Unit = { - metricRegistry - .register(MetricRegistry.name("executors", name), new Gauge[T] { - override def getValue: T = synchronized { - Option(value).getOrElse(defaultValue) - } - }) + metricRegistry.register(MetricRegistry.name("executors", name), + new Gauge[T] { + override def getValue: T = synchronized { + Option(value).getOrElse(defaultValue) + } + }) } registerGauge("numberExecutorsToAdd", numExecutorsToAdd, 0) diff --git a/repos/spark/core/src/main/scala/org/apache/spark/MapOutputTracker.scala b/repos/spark/core/src/main/scala/org/apache/spark/MapOutputTracker.scala index 17792c98abc..dd0be7d5c7b 100644 --- a/repos/spark/core/src/main/scala/org/apache/spark/MapOutputTracker.scala +++ b/repos/spark/core/src/main/scala/org/apache/spark/MapOutputTracker.scala @@ -172,8 +172,10 @@ private[spark] abstract class MapOutputTracker(conf: SparkConf) val statuses = getStatuses(shuffleId) // Synchronize on the returned array because, on the driver, it gets mutated in place statuses.synchronized { - return MapOutputTracker - .convertMapStatuses(shuffleId, startPartition, endPartition, statuses) + return MapOutputTracker.convertMapStatuses(shuffleId, + startPartition, + endPartition, + statuses) } } diff --git a/repos/spark/core/src/main/scala/org/apache/spark/SSLOptions.scala b/repos/spark/core/src/main/scala/org/apache/spark/SSLOptions.scala index 35bc7e5a916..a1b76e10ada 100644 --- a/repos/spark/core/src/main/scala/org/apache/spark/SSLOptions.scala +++ b/repos/spark/core/src/main/scala/org/apache/spark/SSLOptions.scala @@ -167,8 +167,8 @@ private[spark] object SSLOptions extends Logging { def parse(conf: SparkConf, ns: String, defaults: Option[SSLOptions] = None): SSLOptions = { - val enabled = conf - .getBoolean(s"$ns.enabled", defaultValue = defaults.exists(_.enabled)) + val enabled = conf.getBoolean(s"$ns.enabled", + defaultValue = defaults.exists(_.enabled)) val keyStore = conf .getOption(s"$ns.keyStore") diff --git a/repos/spark/core/src/main/scala/org/apache/spark/SecurityManager.scala b/repos/spark/core/src/main/scala/org/apache/spark/SecurityManager.scala index e2836b7af84..10102f954a0 100644 --- a/repos/spark/core/src/main/scala/org/apache/spark/SecurityManager.scala +++ b/repos/spark/core/src/main/scala/org/apache/spark/SecurityManager.scala @@ -297,8 +297,9 @@ private[spark] class SecurityManager(sparkConf: SparkConf) } def getSSLOptions(module: String): SSLOptions = { - val opts = SSLOptions - .parse(sparkConf, s"spark.ssl.$module", Some(defaultSSLOptions)) + val opts = SSLOptions.parse(sparkConf, + s"spark.ssl.$module", + Some(defaultSSLOptions)) logDebug(s"Created SSL options for $module: $opts") opts } @@ -391,8 +392,7 @@ private[spark] class SecurityManager(sparkConf: SparkConf) "generateSecretKey: yarn mode, secret key from credentials is null") val rnd = new SecureRandom() val length = - sparkConf - .getInt("spark.authenticate.secretBitLength", 256) / JByte.SIZE + sparkConf.getInt("spark.authenticate.secretBitLength", 256) / JByte.SIZE val secret = new Array[Byte](length) rnd.nextBytes(secret) diff --git a/repos/spark/core/src/main/scala/org/apache/spark/SparkContext.scala b/repos/spark/core/src/main/scala/org/apache/spark/SparkContext.scala index ffe1d992d8f..c73ca93dac9 100644 --- a/repos/spark/core/src/main/scala/org/apache/spark/SparkContext.scala +++ b/repos/spark/core/src/main/scala/org/apache/spark/SparkContext.scala @@ -2584,8 +2584,8 @@ object SparkContext extends Logging { val backend = try { val clazz = Utils.classForName( "org.apache.spark.scheduler.cluster.YarnClusterSchedulerBackend") - val cons = clazz - .getConstructor(classOf[TaskSchedulerImpl], classOf[SparkContext]) + val cons = clazz.getConstructor(classOf[TaskSchedulerImpl], + classOf[SparkContext]) cons .newInstance(scheduler, sc) .asInstanceOf[CoarseGrainedSchedulerBackend] @@ -2612,8 +2612,8 @@ object SparkContext extends Logging { val backend = try { val clazz = Utils.classForName( "org.apache.spark.scheduler.cluster.YarnClientSchedulerBackend") - val cons = clazz - .getConstructor(classOf[TaskSchedulerImpl], classOf[SparkContext]) + val cons = clazz.getConstructor(classOf[TaskSchedulerImpl], + classOf[SparkContext]) cons .newInstance(scheduler, sc) .asInstanceOf[CoarseGrainedSchedulerBackend] diff --git a/repos/spark/core/src/main/scala/org/apache/spark/SparkEnv.scala b/repos/spark/core/src/main/scala/org/apache/spark/SparkEnv.scala index 5bf29a92b38..82ece69e1ee 100644 --- a/repos/spark/core/src/main/scala/org/apache/spark/SparkEnv.scala +++ b/repos/spark/core/src/main/scala/org/apache/spark/SparkEnv.scala @@ -345,8 +345,9 @@ object SparkEnv extends Logging { "sort" -> "org.apache.spark.shuffle.sort.SortShuffleManager", "tungsten-sort" -> "org.apache.spark.shuffle.sort.SortShuffleManager") val shuffleMgrName = conf.get("spark.shuffle.manager", "sort") - val shuffleMgrClass = shortShuffleMgrNames - .getOrElse(shuffleMgrName.toLowerCase, shuffleMgrName) + val shuffleMgrClass = shortShuffleMgrNames.getOrElse( + shuffleMgrName.toLowerCase, + shuffleMgrName) val shuffleManager = instantiateClass[ShuffleManager](shuffleMgrClass) val useLegacyMemoryManager = diff --git a/repos/spark/core/src/main/scala/org/apache/spark/SparkHadoopWriter.scala b/repos/spark/core/src/main/scala/org/apache/spark/SparkHadoopWriter.scala index 96baf0fb182..572ed03959c 100644 --- a/repos/spark/core/src/main/scala/org/apache/spark/SparkHadoopWriter.scala +++ b/repos/spark/core/src/main/scala/org/apache/spark/SparkHadoopWriter.scala @@ -91,8 +91,10 @@ private[spark] class SparkHadoopWriter(jobConf: JobConf) } getOutputCommitter().setupTask(getTaskContext()) - writer = getOutputFormat() - .getRecordWriter(fs, conf.value, outputName, Reporter.NULL) + writer = getOutputFormat().getRecordWriter(fs, + conf.value, + outputName, + Reporter.NULL) } def write(key: AnyRef, value: AnyRef) { @@ -108,8 +110,10 @@ private[spark] class SparkHadoopWriter(jobConf: JobConf) } def commit() { - SparkHadoopMapRedUtil - .commitTask(getOutputCommitter(), getTaskContext(), jobID, splitID) + SparkHadoopMapRedUtil.commitTask(getOutputCommitter(), + getTaskContext(), + jobID, + splitID) } def commitJob() { diff --git a/repos/spark/core/src/main/scala/org/apache/spark/api/java/JavaSparkContext.scala b/repos/spark/core/src/main/scala/org/apache/spark/api/java/JavaSparkContext.scala index 46667e3452c..e8a0504f867 100644 --- a/repos/spark/core/src/main/scala/org/apache/spark/api/java/JavaSparkContext.scala +++ b/repos/spark/core/src/main/scala/org/apache/spark/api/java/JavaSparkContext.scala @@ -448,8 +448,11 @@ class JavaSparkContext(val sc: SparkContext) ): JavaPairRDD[K, V] = { implicit val ctagK: ClassTag[K] = ClassTag(keyClass) implicit val ctagV: ClassTag[V] = ClassTag(valueClass) - val rdd = sc - .hadoopFile(path, inputFormatClass, keyClass, valueClass, minPartitions) + val rdd = sc.hadoopFile(path, + inputFormatClass, + keyClass, + valueClass, + minPartitions) new JavaHadoopRDD(rdd.asInstanceOf[HadoopRDD[K, V]]) } diff --git a/repos/spark/core/src/main/scala/org/apache/spark/api/python/PythonRDD.scala b/repos/spark/core/src/main/scala/org/apache/spark/api/python/PythonRDD.scala index c4832db514f..8e47a521f0d 100644 --- a/repos/spark/core/src/main/scala/org/apache/spark/api/python/PythonRDD.scala +++ b/repos/spark/core/src/main/scala/org/apache/spark/api/python/PythonRDD.scala @@ -104,8 +104,7 @@ private[spark] class PythonRunner(func: PythonFunction, val localdir = env.blockManager.diskBlockManager.localDirs .map(f => f.getPath()) .mkString(",") - envVars - .put("SPARK_LOCAL_DIRS", localdir) // it's also used in monitor thread + envVars.put("SPARK_LOCAL_DIRS", localdir) // it's also used in monitor thread if (reuse_worker) { envVars.put("SPARK_REUSE_WORKER", "1") } diff --git a/repos/spark/core/src/main/scala/org/apache/spark/deploy/ExecutorDescription.scala b/repos/spark/core/src/main/scala/org/apache/spark/deploy/ExecutorDescription.scala index 205b72290e6..60ffbb4ca8e 100644 --- a/repos/spark/core/src/main/scala/org/apache/spark/deploy/ExecutorDescription.scala +++ b/repos/spark/core/src/main/scala/org/apache/spark/deploy/ExecutorDescription.scala @@ -29,6 +29,8 @@ private[deploy] class ExecutorDescription(val appId: String, extends Serializable { override def toString: String = - "ExecutorState(appId=%s, execId=%d, cores=%d, state=%s)" - .format(appId, execId, cores, state) + "ExecutorState(appId=%s, execId=%d, cores=%d, state=%s)".format(appId, + execId, + cores, + state) } diff --git a/repos/spark/core/src/main/scala/org/apache/spark/deploy/FaultToleranceTest.scala b/repos/spark/core/src/main/scala/org/apache/spark/deploy/FaultToleranceTest.scala index 4881a5f54f4..5b2d8b23e52 100644 --- a/repos/spark/core/src/main/scala/org/apache/spark/deploy/FaultToleranceTest.scala +++ b/repos/spark/core/src/main/scala/org/apache/spark/deploy/FaultToleranceTest.scala @@ -78,8 +78,7 @@ private object FaultToleranceTest extends App with Logging { private val containerSparkHome = "/opt/spark" private val dockerMountDir = "%s:%s".format(sparkHome, containerSparkHome) - System - .setProperty("spark.driver.host", "172.17.42.1") // default docker host ip + System.setProperty("spark.driver.host", "172.17.42.1") // default docker host ip private def afterEach() { if (sc != null) { @@ -398,8 +397,10 @@ private class TestMasterInfo(val ip: String, def kill() { Docker.kill(dockerId) } override def toString: String = - "[ip=%s, id=%s, logFile=%s, state=%s]" - .format(ip, dockerId.id, logFile.getAbsolutePath, state) + "[ip=%s, id=%s, logFile=%s, state=%s]".format(ip, + dockerId.id, + logFile.getAbsolutePath, + state) } private class TestWorkerInfo(val ip: String, @@ -425,8 +426,9 @@ private object SparkDocker { } def startWorker(mountDir: String, masters: String): TestWorkerInfo = { - val cmd = Docker - .makeRunCmd("spark-test-worker", args = masters, mountDir = mountDir) + val cmd = Docker.makeRunCmd("spark-test-worker", + args = masters, + mountDir = mountDir) val (ip, id, outFile) = startNode(cmd) new TestWorkerInfo(ip, id, outFile) } diff --git a/repos/spark/core/src/main/scala/org/apache/spark/deploy/SparkHadoopUtil.scala b/repos/spark/core/src/main/scala/org/apache/spark/deploy/SparkHadoopUtil.scala index 05d400189c0..15e47cf0367 100644 --- a/repos/spark/core/src/main/scala/org/apache/spark/deploy/SparkHadoopUtil.scala +++ b/repos/spark/core/src/main/scala/org/apache/spark/deploy/SparkHadoopUtil.scala @@ -297,8 +297,9 @@ class SparkHadoopUtil extends Logging { credentials: Credentials): Long = { val now = System.currentTimeMillis() - val renewalInterval = sparkConf - .getLong("spark.yarn.token.renewal.interval", (24 hours).toMillis) + val renewalInterval = sparkConf.getLong( + "spark.yarn.token.renewal.interval", + (24 hours).toMillis) credentials.getAllTokens.asScala .filter(_.getKind == DelegationTokenIdentifier.HDFS_DELEGATION_KIND) @@ -332,8 +333,7 @@ class SparkHadoopUtil extends Logging { case HADOOP_CONF_PATTERN(matched) => { logDebug(text + " matched " + HADOOP_CONF_PATTERN) val key = - matched - .substring(13, matched.length() - 1) // remove ${hadoopconf- .. } + matched.substring(13, matched.length() - 1) // remove ${hadoopconf- .. } val eval = Option[String](hadoopConf.get(key)).map { value => logDebug("Substituted " + matched + " with " + value) text.replace(matched, value) diff --git a/repos/spark/core/src/main/scala/org/apache/spark/deploy/history/ApplicationCache.scala b/repos/spark/core/src/main/scala/org/apache/spark/deploy/history/ApplicationCache.scala index dfcea3e99a1..df36de693c8 100644 --- a/repos/spark/core/src/main/scala/org/apache/spark/deploy/history/ApplicationCache.scala +++ b/repos/spark/core/src/main/scala/org/apache/spark/deploy/history/ApplicationCache.scala @@ -310,8 +310,9 @@ private[history] class ApplicationCache( operations.attachSparkUI(appId, attemptId, ui, completed) } else { // incomplete UIs have the cache-check filter put in front of them. - ApplicationCacheCheckFilterRelay - .registerFilter(ui, appId, attemptId) + ApplicationCacheCheckFilterRelay.registerFilter(ui, + appId, + attemptId) operations.attachSparkUI(appId, attemptId, ui, completed) } // build the cache entry diff --git a/repos/spark/core/src/main/scala/org/apache/spark/deploy/master/ApplicationSource.scala b/repos/spark/core/src/main/scala/org/apache/spark/deploy/master/ApplicationSource.scala index fab8f3b6f43..3570de93913 100644 --- a/repos/spark/core/src/main/scala/org/apache/spark/deploy/master/ApplicationSource.scala +++ b/repos/spark/core/src/main/scala/org/apache/spark/deploy/master/ApplicationSource.scala @@ -24,8 +24,9 @@ import org.apache.spark.metrics.source.Source private[master] class ApplicationSource(val application: ApplicationInfo) extends Source { override val metricRegistry = new MetricRegistry() - override val sourceName = "%s.%s.%s" - .format("application", application.desc.name, System.currentTimeMillis()) + override val sourceName = "%s.%s.%s".format("application", + application.desc.name, + System.currentTimeMillis()) metricRegistry.register(MetricRegistry.name("status"), new Gauge[String] { override def getValue: String = application.state.toString diff --git a/repos/spark/core/src/main/scala/org/apache/spark/deploy/master/Master.scala b/repos/spark/core/src/main/scala/org/apache/spark/deploy/master/Master.scala index 09b5392f954..89b66619b40 100644 --- a/repos/spark/core/src/main/scala/org/apache/spark/deploy/master/Master.scala +++ b/repos/spark/core/src/main/scala/org/apache/spark/deploy/master/Master.scala @@ -1234,8 +1234,8 @@ private[deploy] class Master(override val rpcEnv: RpcEnv, } private def newDriverId(submitDate: Date): String = { - val appId = "driver-%s-%04d" - .format(createDateFormat.format(submitDate), nextDriverNumber) + val appId = "driver-%s-%04d".format(createDateFormat.format(submitDate), + nextDriverNumber) nextDriverNumber += 1 appId } diff --git a/repos/spark/core/src/main/scala/org/apache/spark/deploy/mesos/ui/DriverPage.scala b/repos/spark/core/src/main/scala/org/apache/spark/deploy/mesos/ui/DriverPage.scala index 1890bb4ba6f..e9fdf561496 100644 --- a/repos/spark/core/src/main/scala/org/apache/spark/deploy/mesos/ui/DriverPage.scala +++ b/repos/spark/core/src/main/scala/org/apache/spark/deploy/mesos/ui/DriverPage.scala @@ -64,8 +64,9 @@ private[ui] class DriverPage(parent: MesosClusterUI) UIUtils.listingTable(commandHeaders, commandRow, command) val commandEnvTable = UIUtils.listingTable(commandEnvHeaders, propertiesRow, commandEnv) - val schedulerTable = UIUtils - .listingTable(schedulerHeaders, propertiesRow, schedulerProperties) + val schedulerTable = UIUtils.listingTable(schedulerHeaders, + propertiesRow, + schedulerProperties) val launchedTable = UIUtils.listingTable(launchedHeaders, launchedRow, submissionState) val retryTable = UIUtils.listingTable( diff --git a/repos/spark/core/src/main/scala/org/apache/spark/deploy/worker/DriverWrapper.scala b/repos/spark/core/src/main/scala/org/apache/spark/deploy/worker/DriverWrapper.scala index 23dc52b27b5..7410b31b96d 100644 --- a/repos/spark/core/src/main/scala/org/apache/spark/deploy/worker/DriverWrapper.scala +++ b/repos/spark/core/src/main/scala/org/apache/spark/deploy/worker/DriverWrapper.scala @@ -47,8 +47,8 @@ object DriverWrapper { 0, conf, new SecurityManager(conf)) - rpcEnv - .setupEndpoint("workerWatcher", new WorkerWatcher(rpcEnv, workerUrl)) + rpcEnv.setupEndpoint("workerWatcher", + new WorkerWatcher(rpcEnv, workerUrl)) val currentLoader = Thread.currentThread.getContextClassLoader val userJarUrl = new File(userJar).toURI().toURL() diff --git a/repos/spark/core/src/main/scala/org/apache/spark/executor/Executor.scala b/repos/spark/core/src/main/scala/org/apache/spark/executor/Executor.scala index cb02a93199c..01e19f3a085 100644 --- a/repos/spark/core/src/main/scala/org/apache/spark/executor/Executor.scala +++ b/repos/spark/core/src/main/scala/org/apache/spark/executor/Executor.scala @@ -337,18 +337,21 @@ private[spark] class Executor(executorId: String, } catch { case ffe: FetchFailedException => val reason = ffe.toTaskEndReason - execBackend - .statusUpdate(taskId, TaskState.FAILED, ser.serialize(reason)) + execBackend.statusUpdate(taskId, + TaskState.FAILED, + ser.serialize(reason)) case _: TaskKilledException | _: InterruptedException if task.killed => logInfo(s"Executor killed $taskName (TID $taskId)") - execBackend - .statusUpdate(taskId, TaskState.KILLED, ser.serialize(TaskKilled)) + execBackend.statusUpdate(taskId, + TaskState.KILLED, + ser.serialize(TaskKilled)) case cDE: CommitDeniedException => val reason = cDE.toTaskEndReason - execBackend - .statusUpdate(taskId, TaskState.FAILED, ser.serialize(reason)) + execBackend.statusUpdate(taskId, + TaskState.FAILED, + ser.serialize(reason)) case t: Throwable => // Attempt to exit cleanly by informing the driver of our failure. @@ -380,8 +383,9 @@ private[spark] class Executor(executorId: String, preserveCause = false)) } } - execBackend - .statusUpdate(taskId, TaskState.FAILED, serializedTaskEndReason) + execBackend.statusUpdate(taskId, + TaskState.FAILED, + serializedTaskEndReason) // Don't forcibly exit unless the exception was inherently fatal, to avoid // stopping other tasks unnecessarily. @@ -438,8 +442,11 @@ private[spark] class Executor(executorId: String, classOf[String], classOf[ClassLoader], classOf[Boolean]) - constructor - .newInstance(conf, env, classUri, parent, _userClassPathFirst) + constructor.newInstance(conf, + env, + classUri, + parent, + _userClassPathFirst) } catch { case _: ClassNotFoundException => logError( diff --git a/repos/spark/core/src/main/scala/org/apache/spark/executor/ExecutorSource.scala b/repos/spark/core/src/main/scala/org/apache/spark/executor/ExecutorSource.scala index c407eaf3c81..e734bf1f3af 100644 --- a/repos/spark/core/src/main/scala/org/apache/spark/executor/ExecutorSource.scala +++ b/repos/spark/core/src/main/scala/org/apache/spark/executor/ExecutorSource.scala @@ -37,8 +37,9 @@ private[spark] class ExecutorSource(threadPool: ThreadPoolExecutor, name: String, f: FileSystem.Statistics => T, defaultValue: T) = { - metricRegistry - .register(MetricRegistry.name("filesystem", scheme, name), new Gauge[T] { + metricRegistry.register( + MetricRegistry.name("filesystem", scheme, name), + new Gauge[T] { override def getValue: T = fileStats(scheme).map(f).getOrElse(defaultValue) }) diff --git a/repos/spark/core/src/main/scala/org/apache/spark/memory/UnifiedMemoryManager.scala b/repos/spark/core/src/main/scala/org/apache/spark/memory/UnifiedMemoryManager.scala index cea1eeb06e1..1afa04ce7ff 100644 --- a/repos/spark/core/src/main/scala/org/apache/spark/memory/UnifiedMemoryManager.scala +++ b/repos/spark/core/src/main/scala/org/apache/spark/memory/UnifiedMemoryManager.scala @@ -177,12 +177,12 @@ object UnifiedMemoryManager { def apply(conf: SparkConf, numCores: Int): UnifiedMemoryManager = { val maxMemory = getMaxMemory(conf) - new UnifiedMemoryManager( - conf, - maxMemory = maxMemory, - storageRegionSize = (maxMemory * conf - .getDouble("spark.memory.storageFraction", 0.5)).toLong, - numCores = numCores) + new UnifiedMemoryManager(conf, + maxMemory = maxMemory, + storageRegionSize = (maxMemory * conf.getDouble( + "spark.memory.storageFraction", + 0.5)).toLong, + numCores = numCores) } /** diff --git a/repos/spark/core/src/main/scala/org/apache/spark/partial/GroupedMeanEvaluator.scala b/repos/spark/core/src/main/scala/org/apache/spark/partial/GroupedMeanEvaluator.scala index 9dbf1c62b0b..6c8d2cb6cfb 100644 --- a/repos/spark/core/src/main/scala/org/apache/spark/partial/GroupedMeanEvaluator.scala +++ b/repos/spark/core/src/main/scala/org/apache/spark/partial/GroupedMeanEvaluator.scala @@ -74,8 +74,8 @@ private[spark] class GroupedMeanEvaluator[T](totalOutputs: Int, val confFactor = studentTCacher.get(counter.count) val low = mean - confFactor * stdev val high = mean + confFactor * stdev - result - .put(entry.getKey, new BoundedDouble(mean, confidence, low, high)) + result.put(entry.getKey, + new BoundedDouble(mean, confidence, low, high)) } result.asScala } diff --git a/repos/spark/core/src/main/scala/org/apache/spark/rdd/CoalescedRDD.scala b/repos/spark/core/src/main/scala/org/apache/spark/rdd/CoalescedRDD.scala index 89a8c164c1a..c1e17c55892 100644 --- a/repos/spark/core/src/main/scala/org/apache/spark/rdd/CoalescedRDD.scala +++ b/repos/spark/core/src/main/scala/org/apache/spark/rdd/CoalescedRDD.scala @@ -274,8 +274,7 @@ private class PartitionCoalescer(maxPartitions: Int, val pgroup = PartitionGroup(nxt_replica) groupArr += pgroup addPartToPGroup(nxt_part, pgroup) - groupHash - .put(nxt_replica, ArrayBuffer(pgroup)) // list in case we have multiple + groupHash.put(nxt_replica, ArrayBuffer(pgroup)) // list in case we have multiple numCreated += 1 } } diff --git a/repos/spark/core/src/main/scala/org/apache/spark/rdd/DoubleRDDFunctions.scala b/repos/spark/core/src/main/scala/org/apache/spark/rdd/DoubleRDDFunctions.scala index 8f0bb3af67a..41f4d04cc16 100644 --- a/repos/spark/core/src/main/scala/org/apache/spark/rdd/DoubleRDDFunctions.scala +++ b/repos/spark/core/src/main/scala/org/apache/spark/rdd/DoubleRDDFunctions.scala @@ -85,8 +85,10 @@ class DoubleRDDFunctions(self: RDD[Double]) extends Logging with Serializable { val processPartition = (ctx: TaskContext, ns: Iterator[Double]) => StatCounter(ns) val evaluator = new MeanEvaluator(self.partitions.length, confidence) - self.context - .runApproximateJob(self, processPartition, evaluator, timeout) + self.context.runApproximateJob(self, + processPartition, + evaluator, + timeout) } /** @@ -98,8 +100,10 @@ class DoubleRDDFunctions(self: RDD[Double]) extends Logging with Serializable { val processPartition = (ctx: TaskContext, ns: Iterator[Double]) => StatCounter(ns) val evaluator = new SumEvaluator(self.partitions.length, confidence) - self.context - .runApproximateJob(self, processPartition, evaluator, timeout) + self.context.runApproximateJob(self, + processPartition, + evaluator, + timeout) } /** diff --git a/repos/spark/core/src/main/scala/org/apache/spark/rdd/HadoopRDD.scala b/repos/spark/core/src/main/scala/org/apache/spark/rdd/HadoopRDD.scala index ac6a6d2c98e..ebdf4312de8 100644 --- a/repos/spark/core/src/main/scala/org/apache/spark/rdd/HadoopRDD.scala +++ b/repos/spark/core/src/main/scala/org/apache/spark/rdd/HadoopRDD.scala @@ -260,8 +260,9 @@ class HadoopRDD[K, V](sc: SparkContext, theSplit.index, context.attemptNumber, jobConf) - reader = inputFormat - .getRecordReader(split.inputSplit.value, jobConf, Reporter.NULL) + reader = inputFormat.getRecordReader(split.inputSplit.value, + jobConf, + Reporter.NULL) // Register an on-task-completion callback to close the input stream. context.addTaskCompletionListener { context => diff --git a/repos/spark/core/src/main/scala/org/apache/spark/rdd/NewHadoopRDD.scala b/repos/spark/core/src/main/scala/org/apache/spark/rdd/NewHadoopRDD.scala index 6640435f764..1e0f51f4f33 100644 --- a/repos/spark/core/src/main/scala/org/apache/spark/rdd/NewHadoopRDD.scala +++ b/repos/spark/core/src/main/scala/org/apache/spark/rdd/NewHadoopRDD.scala @@ -173,8 +173,8 @@ class NewHadoopRDD[K, V](sc: SparkContext, private var reader = format.createRecordReader( split.serializableHadoopSplit.value, hadoopAttemptContext) - reader - .initialize(split.serializableHadoopSplit.value, hadoopAttemptContext) + reader.initialize(split.serializableHadoopSplit.value, + hadoopAttemptContext) // Register an on-task-completion callback to close the input stream. context.addTaskCompletionListener(context => close()) diff --git a/repos/spark/core/src/main/scala/org/apache/spark/rdd/PairRDDFunctions.scala b/repos/spark/core/src/main/scala/org/apache/spark/rdd/PairRDDFunctions.scala index c7620c2ffb3..897da885ec9 100644 --- a/repos/spark/core/src/main/scala/org/apache/spark/rdd/PairRDDFunctions.scala +++ b/repos/spark/core/src/main/scala/org/apache/spark/rdd/PairRDDFunctions.scala @@ -304,11 +304,15 @@ class PairRDDFunctions[K, V](self: RDD[(K, V)])(implicit kt: ClassTag[K], val samplingFunc = if (withReplacement) { - StratifiedSamplingUtils - .getPoissonSamplingFunction(self, fractions, false, seed) + StratifiedSamplingUtils.getPoissonSamplingFunction(self, + fractions, + false, + seed) } else { - StratifiedSamplingUtils - .getBernoulliSamplingFunction(self, fractions, false, seed) + StratifiedSamplingUtils.getBernoulliSamplingFunction(self, + fractions, + false, + seed) } self.mapPartitionsWithIndex(samplingFunc, preservesPartitioning = true) } @@ -338,11 +342,15 @@ class PairRDDFunctions[K, V](self: RDD[(K, V)])(implicit kt: ClassTag[K], val samplingFunc = if (withReplacement) { - StratifiedSamplingUtils - .getPoissonSamplingFunction(self, fractions, true, seed) + StratifiedSamplingUtils.getPoissonSamplingFunction(self, + fractions, + true, + seed) } else { - StratifiedSamplingUtils - .getBernoulliSamplingFunction(self, fractions, true, seed) + StratifiedSamplingUtils.getBernoulliSamplingFunction(self, + fractions, + true, + seed) } self.mapPartitionsWithIndex(samplingFunc, preservesPartitioning = true) } diff --git a/repos/spark/core/src/main/scala/org/apache/spark/rdd/RDD.scala b/repos/spark/core/src/main/scala/org/apache/spark/rdd/RDD.scala index 999a77d2b2c..4d6710c3e9b 100644 --- a/repos/spark/core/src/main/scala/org/apache/spark/rdd/RDD.scala +++ b/repos/spark/core/src/main/scala/org/apache/spark/rdd/RDD.scala @@ -576,8 +576,10 @@ abstract class RDD[T: ClassTag]( if (!withReplacement && num >= initialCount) { Utils.randomizeInPlace(this.collect(), rand) } else { - val fraction = SamplingUtils - .computeFractionForSampleSize(num, initialCount, withReplacement) + val fraction = + SamplingUtils.computeFractionForSampleSize(num, + initialCount, + withReplacement) var samples = this.sample(withReplacement, fraction, rand.nextInt()).collect() @@ -1749,24 +1751,26 @@ abstract class RDD[T: ClassTag]( * doCheckpoint() is called recursively on the parent RDDs. */ private[spark] def doCheckpoint(): Unit = { - RDDOperationScope - .withScope(sc, "checkpoint", allowNesting = false, ignoreParent = true) { - if (!doCheckpointCalled) { - doCheckpointCalled = true - if (checkpointData.isDefined) { - if (checkpointAllMarkedAncestors) { - // TODO We can collect all the RDDs that needs to be checkpointed, and then checkpoint - // them in parallel. - // Checkpoint parents first because our lineage will be truncated after we - // checkpoint ourselves - dependencies.foreach(_.rdd.doCheckpoint()) - } - checkpointData.get.checkpoint() - } else { + RDDOperationScope.withScope(sc, + "checkpoint", + allowNesting = false, + ignoreParent = true) { + if (!doCheckpointCalled) { + doCheckpointCalled = true + if (checkpointData.isDefined) { + if (checkpointAllMarkedAncestors) { + // TODO We can collect all the RDDs that needs to be checkpointed, and then checkpoint + // them in parallel. + // Checkpoint parents first because our lineage will be truncated after we + // checkpoint ourselves dependencies.foreach(_.rdd.doCheckpoint()) } + checkpointData.get.checkpoint() + } else { + dependencies.foreach(_.rdd.doCheckpoint()) } } + } } /** diff --git a/repos/spark/core/src/main/scala/org/apache/spark/scheduler/DAGScheduler.scala b/repos/spark/core/src/main/scala/org/apache/spark/scheduler/DAGScheduler.scala index ef95a1e9fcd..76a028dee91 100644 --- a/repos/spark/core/src/main/scala/org/apache/spark/scheduler/DAGScheduler.scala +++ b/repos/spark/core/src/main/scala/org/apache/spark/scheduler/DAGScheduler.scala @@ -385,8 +385,8 @@ private[spark] class DAGScheduler( // Kind of ugly: need to register RDDs with the cache and map output tracker here // since we can't do it in the RDD constructor because # of partitions is unknown logInfo("Registering RDD " + rdd.id + " (" + rdd.getCreationSite + ")") - mapOutputTracker - .registerShuffle(shuffleDep.shuffleId, rdd.partitions.length) + mapOutputTracker.registerShuffle(shuffleDep.shuffleId, + rdd.partitions.length) } stage } @@ -1056,8 +1056,9 @@ private[spark] class DAGScheduler( // event. stage match { case s: ShuffleMapStage => - outputCommitCoordinator - .stageStart(stage = s.id, maxPartitionId = s.numPartitions - 1) + outputCommitCoordinator.stageStart( + stage = s.id, + maxPartitionId = s.numPartitions - 1) case s: ResultStage => outputCommitCoordinator.stageStart( stage = s.id, diff --git a/repos/spark/core/src/main/scala/org/apache/spark/scheduler/DAGSchedulerSource.scala b/repos/spark/core/src/main/scala/org/apache/spark/scheduler/DAGSchedulerSource.scala index 19a87bc200f..ea80aada68a 100644 --- a/repos/spark/core/src/main/scala/org/apache/spark/scheduler/DAGSchedulerSource.scala +++ b/repos/spark/core/src/main/scala/org/apache/spark/scheduler/DAGSchedulerSource.scala @@ -26,30 +26,35 @@ private[scheduler] class DAGSchedulerSource(val dagScheduler: DAGScheduler) override val metricRegistry = new MetricRegistry() override val sourceName = "DAGScheduler" - metricRegistry - .register(MetricRegistry.name("stage", "failedStages"), new Gauge[Int] { - override def getValue: Int = dagScheduler.failedStages.size - }) - - metricRegistry - .register(MetricRegistry.name("stage", "runningStages"), new Gauge[Int] { - override def getValue: Int = dagScheduler.runningStages.size - }) - - metricRegistry - .register(MetricRegistry.name("stage", "waitingStages"), new Gauge[Int] { - override def getValue: Int = dagScheduler.waitingStages.size - }) - - metricRegistry - .register(MetricRegistry.name("job", "allJobs"), new Gauge[Int] { - override def getValue: Int = dagScheduler.numTotalJobs - }) - - metricRegistry - .register(MetricRegistry.name("job", "activeJobs"), new Gauge[Int] { - override def getValue: Int = dagScheduler.activeJobs.size - }) + metricRegistry.register(MetricRegistry.name("stage", "failedStages"), + new Gauge[Int] { + override def getValue: Int = + dagScheduler.failedStages.size + }) + + metricRegistry.register(MetricRegistry.name("stage", "runningStages"), + new Gauge[Int] { + override def getValue: Int = + dagScheduler.runningStages.size + }) + + metricRegistry.register(MetricRegistry.name("stage", "waitingStages"), + new Gauge[Int] { + override def getValue: Int = + dagScheduler.waitingStages.size + }) + + metricRegistry.register(MetricRegistry.name("job", "allJobs"), + new Gauge[Int] { + override def getValue: Int = + dagScheduler.numTotalJobs + }) + + metricRegistry.register(MetricRegistry.name("job", "activeJobs"), + new Gauge[Int] { + override def getValue: Int = + dagScheduler.activeJobs.size + }) /** Timer that tracks the time to process messages in the DAGScheduler's event loop */ val messageProcessingTimer: Timer = diff --git a/repos/spark/core/src/main/scala/org/apache/spark/scheduler/TaskSchedulerImpl.scala b/repos/spark/core/src/main/scala/org/apache/spark/scheduler/TaskSchedulerImpl.scala index 4be7455b204..e87af43fb8d 100644 --- a/repos/spark/core/src/main/scala/org/apache/spark/scheduler/TaskSchedulerImpl.scala +++ b/repos/spark/core/src/main/scala/org/apache/spark/scheduler/TaskSchedulerImpl.scala @@ -174,8 +174,9 @@ private[spark] class TaskSchedulerImpl(val sc: SparkContext, this.synchronized { val manager = createTaskSetManager(taskSet, maxTaskFailures) val stage = taskSet.stageId - val stageTaskSets = taskSetsByStageIdAndAttempt - .getOrElseUpdate(stage, new HashMap[Int, TaskSetManager]) + val stageTaskSets = taskSetsByStageIdAndAttempt.getOrElseUpdate( + stage, + new HashMap[Int, TaskSetManager]) stageTaskSets(taskSet.stageAttemptId) = manager val conflictingTaskSet = stageTaskSets.exists { case (_, ts) => @@ -377,13 +378,16 @@ private[spark] class TaskSchedulerImpl(val sc: SparkContext, } if (state == TaskState.FINISHED) { taskSet.removeRunningTask(tid) - taskResultGetter - .enqueueSuccessfulTask(taskSet, tid, serializedData) + taskResultGetter.enqueueSuccessfulTask(taskSet, + tid, + serializedData) } else if (Set(TaskState.FAILED, TaskState.KILLED, TaskState.LOST) .contains(state)) { taskSet.removeRunningTask(tid) - taskResultGetter - .enqueueFailedTask(taskSet, tid, state, serializedData) + taskResultGetter.enqueueFailedTask(taskSet, + tid, + state, + serializedData) } case None => logError( diff --git a/repos/spark/core/src/main/scala/org/apache/spark/scheduler/TaskSetManager.scala b/repos/spark/core/src/main/scala/org/apache/spark/scheduler/TaskSetManager.scala index 364b9a8e58d..f9cc867fe90 100644 --- a/repos/spark/core/src/main/scala/org/apache/spark/scheduler/TaskSetManager.scala +++ b/repos/spark/core/src/main/scala/org/apache/spark/scheduler/TaskSetManager.scala @@ -198,8 +198,7 @@ private[spark] class TaskSetManager(sched: TaskSchedulerImpl, exe match { case Some(set) => { for (e <- set) { - pendingTasksForExecutor - .getOrElseUpdate(e, new ArrayBuffer) += index + pendingTasksForExecutor.getOrElseUpdate(e, new ArrayBuffer) += index } logInfo( s"Pending task $index has a cached location at ${e.host} " + @@ -775,8 +774,11 @@ private[spark] class TaskSetManager(sched: TaskSchedulerImpl, failedExecutors .getOrElseUpdate(index, new HashMap[String, Long]()) .put(info.executorId, clock.getTimeMillis()) - sched.dagScheduler - .taskEnded(tasks(index), reason, null, accumUpdates, info) + sched.dagScheduler.taskEnded(tasks(index), + reason, + null, + accumUpdates, + info) addPendingTask(index) if (!isZombie && state != TaskState.KILLED && reason.isInstanceOf[TaskFailedReason] && diff --git a/repos/spark/core/src/main/scala/org/apache/spark/scheduler/cluster/CoarseGrainedSchedulerBackend.scala b/repos/spark/core/src/main/scala/org/apache/spark/scheduler/cluster/CoarseGrainedSchedulerBackend.scala index 3f903df20cf..ed405660add 100644 --- a/repos/spark/core/src/main/scala/org/apache/spark/scheduler/cluster/CoarseGrainedSchedulerBackend.scala +++ b/repos/spark/core/src/main/scala/org/apache/spark/scheduler/cluster/CoarseGrainedSchedulerBackend.scala @@ -57,12 +57,14 @@ private[spark] class CoarseGrainedSchedulerBackend( private val maxRpcMessageSize = RpcUtils.maxMessageSizeBytes(conf) // Submit tasks only after (registered resources / total expected resources) // is equal to at least this value, that is double between 0 and 1. - var minRegisteredRatio = math - .min(1, conf.getDouble("spark.scheduler.minRegisteredResourcesRatio", 0)) + var minRegisteredRatio = math.min( + 1, + conf.getDouble("spark.scheduler.minRegisteredResourcesRatio", 0)) // Submit tasks after maxRegisteredWaitingTime milliseconds // if minRegisteredRatio has not yet been reached - val maxRegisteredWaitingTimeMs = conf - .getTimeAsMs("spark.scheduler.maxRegisteredResourcesWaitingTime", "30s") + val maxRegisteredWaitingTimeMs = conf.getTimeAsMs( + "spark.scheduler.maxRegisteredResourcesWaitingTime", + "30s") val createTime = System.currentTimeMillis() private val executorDataMap = new HashMap[String, ExecutorData] @@ -309,8 +311,8 @@ private[spark] class CoarseGrainedSchedulerBackend( } totalCoreCount.addAndGet(-executorInfo.totalCores) totalRegisteredExecutors.addAndGet(-1) - scheduler - .executorLost(executorId, if (killed) ExecutorKilled else reason) + scheduler.executorLost(executorId, + if (killed) ExecutorKilled else reason) listenerBus.post( SparkListenerExecutorRemoved(System.currentTimeMillis(), executorId, diff --git a/repos/spark/core/src/main/scala/org/apache/spark/scheduler/cluster/mesos/MesosClusterScheduler.scala b/repos/spark/core/src/main/scala/org/apache/spark/scheduler/cluster/mesos/MesosClusterScheduler.scala index ce1d15c0db8..052b0dc0e83 100644 --- a/repos/spark/core/src/main/scala/org/apache/spark/scheduler/cluster/mesos/MesosClusterScheduler.scala +++ b/repos/spark/core/src/main/scala/org/apache/spark/scheduler/cluster/mesos/MesosClusterScheduler.scala @@ -126,8 +126,10 @@ private[spark] class MesosClusterScheduler( extends Scheduler with MesosSchedulerUtils { var frameworkUrl: String = _ - private val metricsSystem = MetricsSystem - .createMetricsSystem("mesos_cluster", conf, new SecurityManager(conf)) + private val metricsSystem = MetricsSystem.createMetricsSystem( + "mesos_cluster", + conf, + new SecurityManager(conf)) private val master = conf.get("spark.master") private val appName = conf.get("spark.app.name") private val queuedCapacity = conf.getInt("spark.mesos.maxDrivers", 200) diff --git a/repos/spark/core/src/main/scala/org/apache/spark/scheduler/cluster/mesos/MesosClusterSchedulerSource.scala b/repos/spark/core/src/main/scala/org/apache/spark/scheduler/cluster/mesos/MesosClusterSchedulerSource.scala index 8c4e2e5e32c..6d88b8d6869 100644 --- a/repos/spark/core/src/main/scala/org/apache/spark/scheduler/cluster/mesos/MesosClusterSchedulerSource.scala +++ b/repos/spark/core/src/main/scala/org/apache/spark/scheduler/cluster/mesos/MesosClusterSchedulerSource.scala @@ -27,15 +27,17 @@ private[mesos] class MesosClusterSchedulerSource( override def sourceName: String = "mesos_cluster" override def metricRegistry: MetricRegistry = new MetricRegistry() - metricRegistry - .register(MetricRegistry.name("waitingDrivers"), new Gauge[Int] { - override def getValue: Int = scheduler.getQueuedDriversSize - }) + metricRegistry.register(MetricRegistry.name("waitingDrivers"), + new Gauge[Int] { + override def getValue: Int = + scheduler.getQueuedDriversSize + }) - metricRegistry - .register(MetricRegistry.name("launchedDrivers"), new Gauge[Int] { - override def getValue: Int = scheduler.getLaunchedDriversSize - }) + metricRegistry.register(MetricRegistry.name("launchedDrivers"), + new Gauge[Int] { + override def getValue: Int = + scheduler.getLaunchedDriversSize + }) metricRegistry.register(MetricRegistry.name("retryDrivers"), new Gauge[Int] { override def getValue: Int = scheduler.getPendingRetryDriversSize diff --git a/repos/spark/core/src/main/scala/org/apache/spark/serializer/KryoSerializer.scala b/repos/spark/core/src/main/scala/org/apache/spark/serializer/KryoSerializer.scala index 92de4478510..f711ad7bdb8 100644 --- a/repos/spark/core/src/main/scala/org/apache/spark/serializer/KryoSerializer.scala +++ b/repos/spark/core/src/main/scala/org/apache/spark/serializer/KryoSerializer.scala @@ -130,8 +130,8 @@ class KryoSerializer(conf: SparkConf) kryo.register(classOf[SerializableJobConf], new KryoJavaSerializer()) kryo.register(classOf[PythonBroadcast], new KryoJavaSerializer()) - kryo - .register(classOf[GenericRecord], new GenericAvroSerializer(avroSchemas)) + kryo.register(classOf[GenericRecord], + new GenericAvroSerializer(avroSchemas)) kryo.register(classOf[GenericData.Record], new GenericAvroSerializer(avroSchemas)) diff --git a/repos/spark/core/src/main/scala/org/apache/spark/shuffle/sort/SortShuffleWriter.scala b/repos/spark/core/src/main/scala/org/apache/spark/shuffle/sort/SortShuffleWriter.scala index b4ffc39519c..7c0387b376c 100644 --- a/repos/spark/core/src/main/scala/org/apache/spark/shuffle/sort/SortShuffleWriter.scala +++ b/repos/spark/core/src/main/scala/org/apache/spark/shuffle/sort/SortShuffleWriter.scala @@ -84,8 +84,10 @@ private[spark] class SortShuffleWriter[K, V, C]( mapId, IndexShuffleBlockResolver.NOOP_REDUCE_ID) val partitionLengths = sorter.writePartitionedFile(blockId, tmp) - shuffleBlockResolver - .writeIndexFileAndCommit(dep.shuffleId, mapId, partitionLengths, tmp) + shuffleBlockResolver.writeIndexFileAndCommit(dep.shuffleId, + mapId, + partitionLengths, + tmp) mapStatus = MapStatus(blockManager.shuffleServerId, partitionLengths) } diff --git a/repos/spark/core/src/main/scala/org/apache/spark/storage/BlockManagerMasterEndpoint.scala b/repos/spark/core/src/main/scala/org/apache/spark/storage/BlockManagerMasterEndpoint.scala index 93cfbbfdf0f..7f841045af3 100644 --- a/repos/spark/core/src/main/scala/org/apache/spark/storage/BlockManagerMasterEndpoint.scala +++ b/repos/spark/core/src/main/scala/org/apache/spark/storage/BlockManagerMasterEndpoint.scala @@ -375,8 +375,10 @@ private[spark] class BlockManagerMasterEndpoint(override val rpcEnv: RpcEnv, return true } - blockManagerInfo(blockManagerId) - .updateBlockInfo(blockId, storageLevel, memSize, diskSize) + blockManagerInfo(blockManagerId).updateBlockInfo(blockId, + storageLevel, + memSize, + diskSize) var locations: mutable.HashSet[BlockManagerId] = null if (blockLocations.containsKey(blockId)) { diff --git a/repos/spark/core/src/main/scala/org/apache/spark/storage/BlockManagerSource.scala b/repos/spark/core/src/main/scala/org/apache/spark/storage/BlockManagerSource.scala index ac1d7fbf4da..4cac32a8aac 100644 --- a/repos/spark/core/src/main/scala/org/apache/spark/storage/BlockManagerSource.scala +++ b/repos/spark/core/src/main/scala/org/apache/spark/storage/BlockManagerSource.scala @@ -26,14 +26,15 @@ private[spark] class BlockManagerSource(val blockManager: BlockManager) override val metricRegistry = new MetricRegistry() override val sourceName = "BlockManager" - metricRegistry - .register(MetricRegistry.name("memory", "maxMem_MB"), new Gauge[Long] { - override def getValue: Long = { - val storageStatusList = blockManager.master.getStorageStatus - val maxMem = storageStatusList.map(_.maxMem).sum - maxMem / 1024 / 1024 - } - }) + metricRegistry.register(MetricRegistry.name("memory", "maxMem_MB"), + new Gauge[Long] { + override def getValue: Long = { + val storageStatusList = + blockManager.master.getStorageStatus + val maxMem = storageStatusList.map(_.maxMem).sum + maxMem / 1024 / 1024 + } + }) metricRegistry.register( MetricRegistry.name("memory", "remainingMem_MB"), @@ -45,8 +46,9 @@ private[spark] class BlockManagerSource(val blockManager: BlockManager) } }) - metricRegistry - .register(MetricRegistry.name("memory", "memUsed_MB"), new Gauge[Long] { + metricRegistry.register( + MetricRegistry.name("memory", "memUsed_MB"), + new Gauge[Long] { override def getValue: Long = { val storageStatusList = blockManager.master.getStorageStatus val memUsed = storageStatusList.map(_.memUsed).sum diff --git a/repos/spark/core/src/main/scala/org/apache/spark/storage/ShuffleBlockFetcherIterator.scala b/repos/spark/core/src/main/scala/org/apache/spark/storage/ShuffleBlockFetcherIterator.scala index 1edcc3a6fde..7882b8f7b95 100644 --- a/repos/spark/core/src/main/scala/org/apache/spark/storage/ShuffleBlockFetcherIterator.scala +++ b/repos/spark/core/src/main/scala/org/apache/spark/storage/ShuffleBlockFetcherIterator.scala @@ -173,45 +173,43 @@ private[spark] final class ShuffleBlockFetcherIterator( val blockIds = req.blocks.map(_._1.toString) val address = req.address - shuffleClient - .fetchBlocks( - address.host, - address.port, - address.executorId, - blockIds.toArray, - new BlockFetchingListener { - override def onBlockFetchSuccess(blockId: String, - buf: ManagedBuffer): Unit = { - // Only add the buffer to results queue if the iterator is not zombie, - // i.e. cleanup() has not been called yet. - ShuffleBlockFetcherIterator.this.synchronized { - if (!isZombie) { - // Increment the ref count because we need to pass this to a different thread. - // This needs to be released after use. - buf.retain() - remainingBlocks -= blockId - results.put( - new SuccessFetchResult(BlockId(blockId), - address, - sizeMap(blockId), - buf, - remainingBlocks.isEmpty)) - logDebug("remainingBlocks: " + remainingBlocks) - } + shuffleClient.fetchBlocks( + address.host, + address.port, + address.executorId, + blockIds.toArray, + new BlockFetchingListener { + override def onBlockFetchSuccess(blockId: String, + buf: ManagedBuffer): Unit = { + // Only add the buffer to results queue if the iterator is not zombie, + // i.e. cleanup() has not been called yet. + ShuffleBlockFetcherIterator.this.synchronized { + if (!isZombie) { + // Increment the ref count because we need to pass this to a different thread. + // This needs to be released after use. + buf.retain() + remainingBlocks -= blockId + results.put( + new SuccessFetchResult(BlockId(blockId), + address, + sizeMap(blockId), + buf, + remainingBlocks.isEmpty)) + logDebug("remainingBlocks: " + remainingBlocks) } - logTrace( - "Got remote block " + blockId + " after " + - Utils.getUsedTimeMs(startTime)) } + logTrace( + "Got remote block " + blockId + " after " + + Utils.getUsedTimeMs(startTime)) + } - override def onBlockFetchFailure(blockId: String, - e: Throwable): Unit = { - logError( - s"Failed to get block(s) from ${req.address.host}:${req.address.port}", - e) - results.put(new FailureFetchResult(BlockId(blockId), address, e)) - } - }) + override def onBlockFetchFailure(blockId: String, e: Throwable): Unit = { + logError( + s"Failed to get block(s) from ${req.address.host}:${req.address.port}", + e) + results.put(new FailureFetchResult(BlockId(blockId), address, e)) + } + }) } private[this] def splitLocalRemoteBlocks(): ArrayBuffer[FetchRequest] = { diff --git a/repos/spark/core/src/main/scala/org/apache/spark/storage/memory/MemoryStore.scala b/repos/spark/core/src/main/scala/org/apache/spark/storage/memory/MemoryStore.scala index f24ff395b77..bbb3bccb067 100644 --- a/repos/spark/core/src/main/scala/org/apache/spark/storage/memory/MemoryStore.scala +++ b/repos/spark/core/src/main/scala/org/apache/spark/storage/memory/MemoryStore.scala @@ -222,8 +222,9 @@ private[spark] class MemoryStore(conf: SparkConf, // Acquire storage memory if necessary to store this block in memory. val enoughStorageMemory = { if (unrollMemoryUsedByThisBlock <= size) { - val acquiredExtra = memoryManager - .acquireStorageMemory(blockId, size - unrollMemoryUsedByThisBlock) + val acquiredExtra = memoryManager.acquireStorageMemory( + blockId, + size - unrollMemoryUsedByThisBlock) if (acquiredExtra) { transferUnrollToStorage(unrollMemoryUsedByThisBlock) } @@ -425,8 +426,9 @@ private[spark] class MemoryStore(conf: SparkConf, val success = memoryManager.acquireUnrollMemory(blockId, memory) if (success) { val taskAttemptId = currentTaskAttemptId() - unrollMemoryMap(taskAttemptId) = unrollMemoryMap - .getOrElse(taskAttemptId, 0L) + memory + unrollMemoryMap(taskAttemptId) = unrollMemoryMap.getOrElse( + taskAttemptId, + 0L) + memory } success } diff --git a/repos/spark/core/src/main/scala/org/apache/spark/ui/JettyUtils.scala b/repos/spark/core/src/main/scala/org/apache/spark/ui/JettyUtils.scala index 085d1340954..72dfa35b940 100644 --- a/repos/spark/core/src/main/scala/org/apache/spark/ui/JettyUtils.scala +++ b/repos/spark/core/src/main/scala/org/apache/spark/ui/JettyUtils.scala @@ -109,8 +109,8 @@ private[spark] object JettyUtils extends Logging { } } catch { case e: IllegalArgumentException => - response - .sendError(HttpServletResponse.SC_BAD_REQUEST, e.getMessage) + response.sendError(HttpServletResponse.SC_BAD_REQUEST, + e.getMessage) case e: Exception => logWarning(s"GET ${request.getRequestURI} failed: $e", e) throw e @@ -199,8 +199,8 @@ private[spark] object JettyUtils extends Logging { def createStaticHandler(resourceBase: String, path: String): ServletContextHandler = { val contextHandler = new ServletContextHandler - contextHandler - .setInitParameter("org.eclipse.jetty.servlet.Default.gzip", "false") + contextHandler.setInitParameter("org.eclipse.jetty.servlet.Default.gzip", + "false") val staticHandler = new DefaultServlet val holder = new ServletHolder(staticHandler) Option(Utils.getSparkClassLoader.getResource(resourceBase)) match { diff --git a/repos/spark/core/src/main/scala/org/apache/spark/ui/jobs/JobPage.scala b/repos/spark/core/src/main/scala/org/apache/spark/ui/jobs/JobPage.scala index 77747f97c90..86bf99eacfc 100644 --- a/repos/spark/core/src/main/scala/org/apache/spark/ui/jobs/JobPage.scala +++ b/repos/spark/core/src/main/scala/org/apache/spark/ui/jobs/JobPage.scala @@ -188,8 +188,9 @@ private[ui] class JobPage(parent: JobsTab) extends WebUIPage("job") { val content =

No information to display for job {jobId}

- return UIUtils - .headerSparkPage(s"Details for Job $jobId", content, parent) + return UIUtils.headerSparkPage(s"Details for Job $jobId", + content, + parent) } val jobData = jobDataOption.get val isComplete = jobData.status != JobExecutionStatus.RUNNING diff --git a/repos/spark/core/src/main/scala/org/apache/spark/ui/jobs/JobProgressListener.scala b/repos/spark/core/src/main/scala/org/apache/spark/ui/jobs/JobProgressListener.scala index cdeadb26662..b83d8e3163f 100644 --- a/repos/spark/core/src/main/scala/org/apache/spark/ui/jobs/JobProgressListener.scala +++ b/repos/spark/core/src/main/scala/org/apache/spark/ui/jobs/JobProgressListener.scala @@ -312,8 +312,9 @@ class JobProgressListener(conf: SparkConf) extends SparkListener with Logging { }.getOrElse(SparkUI.DEFAULT_POOL_NAME) stageIdToInfo(stage.stageId) = stage - val stageData = stageIdToData - .getOrElseUpdate((stage.stageId, stage.attemptId), new StageUIData) + val stageData = + stageIdToData.getOrElseUpdate((stage.stageId, stage.attemptId), + new StageUIData) stageData.schedulingPool = poolName stageData.description = Option(stageSubmitted.properties).flatMap { p => @@ -340,14 +341,14 @@ class JobProgressListener(conf: SparkConf) extends SparkListener with Logging { val taskInfo = taskStart.taskInfo if (taskInfo != null) { val metrics = new TaskMetrics - val stageData = stageIdToData - .getOrElseUpdate((taskStart.stageId, taskStart.stageAttemptId), { + val stageData = stageIdToData.getOrElseUpdate( + (taskStart.stageId, taskStart.stageAttemptId), { logWarning("Task start for unknown stage " + taskStart.stageId) new StageUIData }) stageData.numActiveTasks += 1 - stageData.taskData - .put(taskInfo.taskId, new TaskUIData(taskInfo, Some(metrics))) + stageData.taskData.put(taskInfo.taskId, + new TaskUIData(taskInfo, Some(metrics))) } for (activeJobsDependentOnStage <- stageIdToActiveJobIds.get( taskStart.stageId); @@ -369,8 +370,8 @@ class JobProgressListener(conf: SparkConf) extends SparkListener with Logging { // completion event is for. Let's just drop it here. This means we might have some speculation // tasks on the web ui that's never marked as complete. if (info != null && taskEnd.stageAttemptId != -1) { - val stageData = stageIdToData - .getOrElseUpdate((taskEnd.stageId, taskEnd.stageAttemptId), { + val stageData = stageIdToData.getOrElseUpdate( + (taskEnd.stageId, taskEnd.stageAttemptId), { logWarning("Task end for unknown stage " + taskEnd.stageId) new StageUIData }) diff --git a/repos/spark/core/src/main/scala/org/apache/spark/ui/jobs/PoolPage.scala b/repos/spark/core/src/main/scala/org/apache/spark/ui/jobs/PoolPage.scala index 6362c09847b..738dac52f39 100644 --- a/repos/spark/core/src/main/scala/org/apache/spark/ui/jobs/PoolPage.scala +++ b/repos/spark/core/src/main/scala/org/apache/spark/ui/jobs/PoolPage.scala @@ -60,8 +60,9 @@ private[ui] class PoolPage(parent: StagesTab) extends WebUIPage("pool") { val content =

Summary

++ poolTable.toNodeSeq ++

{activeStages.size} Active Stages

++ activeStagesTable.toNodeSeq - UIUtils - .headerSparkPage("Fair Scheduler Pool: " + poolName, content, parent) + UIUtils.headerSparkPage("Fair Scheduler Pool: " + poolName, + content, + parent) } } } diff --git a/repos/spark/core/src/main/scala/org/apache/spark/util/CollectionsUtils.scala b/repos/spark/core/src/main/scala/org/apache/spark/util/CollectionsUtils.scala index cfba77a70e6..404dee5f37d 100644 --- a/repos/spark/core/src/main/scala/org/apache/spark/util/CollectionsUtils.scala +++ b/repos/spark/core/src/main/scala/org/apache/spark/util/CollectionsUtils.scala @@ -57,8 +57,9 @@ private[spark] object CollectionsUtils { val comparator = implicitly[Ordering[K]].asInstanceOf[java.util.Comparator[Any]] (l, x) => - util.Arrays - .binarySearch(l.asInstanceOf[Array[AnyRef]], x, comparator) + util.Arrays.binarySearch(l.asInstanceOf[Array[AnyRef]], + x, + comparator) } } } diff --git a/repos/spark/core/src/main/scala/org/apache/spark/util/SizeEstimator.scala b/repos/spark/core/src/main/scala/org/apache/spark/util/SizeEstimator.scala index bcfd6681c50..edc48134b08 100644 --- a/repos/spark/core/src/main/scala/org/apache/spark/util/SizeEstimator.scala +++ b/repos/spark/core/src/main/scala/org/apache/spark/util/SizeEstimator.scala @@ -147,12 +147,14 @@ object SizeEstimator extends Logging { // scalastyle:off classforname val hotSpotMBeanClass = Class.forName("com.sun.management.HotSpotDiagnosticMXBean") - val getVMMethod = hotSpotMBeanClass - .getDeclaredMethod("getVMOption", Class.forName("java.lang.String")) + val getVMMethod = hotSpotMBeanClass.getDeclaredMethod( + "getVMOption", + Class.forName("java.lang.String")) // scalastyle:on classforname - val bean = ManagementFactory - .newPlatformMXBeanProxy(server, hotSpotMBeanName, hotSpotMBeanClass) + val bean = ManagementFactory.newPlatformMXBeanProxy(server, + hotSpotMBeanName, + hotSpotMBeanClass) // TODO: We could use reflection on the VMOption returned ? getVMMethod.invoke(bean, "UseCompressedOops").toString.contains("true") } catch { diff --git a/repos/spark/core/src/main/scala/org/apache/spark/util/StatCounter.scala b/repos/spark/core/src/main/scala/org/apache/spark/util/StatCounter.scala index 81d0de72aa9..4d0e692155d 100644 --- a/repos/spark/core/src/main/scala/org/apache/spark/util/StatCounter.scala +++ b/repos/spark/core/src/main/scala/org/apache/spark/util/StatCounter.scala @@ -138,8 +138,11 @@ class StatCounter(values: TraversableOnce[Double]) extends Serializable { def sampleStdev: Double = math.sqrt(sampleVariance) override def toString: String = { - "(count: %d, mean: %f, stdev: %f, max: %f, min: %f)" - .format(count, mean, stdev, max, min) + "(count: %d, mean: %f, stdev: %f, max: %f, min: %f)".format(count, + mean, + stdev, + max, + min) } } diff --git a/repos/spark/core/src/main/scala/org/apache/spark/util/TimeStampedHashMap.scala b/repos/spark/core/src/main/scala/org/apache/spark/util/TimeStampedHashMap.scala index 612b9bebb96..a5e48e05593 100644 --- a/repos/spark/core/src/main/scala/org/apache/spark/util/TimeStampedHashMap.scala +++ b/repos/spark/core/src/main/scala/org/apache/spark/util/TimeStampedHashMap.scala @@ -47,8 +47,9 @@ private[spark] class TimeStampedHashMap[A, B]( def get(key: A): Option[B] = { val value = internalMap.get(key) if (value != null && updateTimeStampOnGet) { - internalMap - .replace(key, value, TimeStampedValue(value.value, currentTime)) + internalMap.replace(key, + value, + TimeStampedValue(value.value, currentTime)) } Option(value).map(_.value) } diff --git a/repos/spark/core/src/main/scala/org/apache/spark/util/Utils.scala b/repos/spark/core/src/main/scala/org/apache/spark/util/Utils.scala index ae770aac88c..c273595cfef 100644 --- a/repos/spark/core/src/main/scala/org/apache/spark/util/Utils.scala +++ b/repos/spark/core/src/main/scala/org/apache/spark/util/Utils.scala @@ -2232,8 +2232,10 @@ private[spark] object Utils extends Logging { } else { "$" + libraryPathEnvName } - val libraryPath = (libraryPaths :+ libraryPathScriptVar) - .mkString("\"", File.pathSeparator, "\"") + val libraryPath = (libraryPaths :+ libraryPathScriptVar).mkString( + "\"", + File.pathSeparator, + "\"") val ampersand = if (Utils.isWindows) { " &" diff --git a/repos/spark/core/src/main/scala/org/apache/spark/util/collection/ExternalAppendOnlyMap.scala b/repos/spark/core/src/main/scala/org/apache/spark/util/collection/ExternalAppendOnlyMap.scala index 38dea4ea97e..385e084101a 100644 --- a/repos/spark/core/src/main/scala/org/apache/spark/util/collection/ExternalAppendOnlyMap.scala +++ b/repos/spark/core/src/main/scala/org/apache/spark/util/collection/ExternalAppendOnlyMap.scala @@ -190,8 +190,11 @@ class ExternalAppendOnlyMap[K, V, C]( override protected[this] def spill(collection: SizeTracker): Unit = { val (blockId, file) = diskBlockManager.createTempLocalBlock() curWriteMetrics = new ShuffleWriteMetrics() - var writer = blockManager - .getDiskWriter(blockId, file, ser, fileBufferSize, curWriteMetrics) + var writer = blockManager.getDiskWriter(blockId, + file, + ser, + fileBufferSize, + curWriteMetrics) var objectsWritten = 0 // List of batch sizes (bytes) in the order they are written to disk @@ -218,8 +221,11 @@ class ExternalAppendOnlyMap[K, V, C]( if (objectsWritten == serializerBatchSize) { flush() curWriteMetrics = new ShuffleWriteMetrics() - writer = blockManager - .getDiskWriter(blockId, file, ser, fileBufferSize, curWriteMetrics) + writer = blockManager.getDiskWriter(blockId, + file, + ser, + fileBufferSize, + curWriteMetrics) } } if (objectsWritten > 0) { diff --git a/repos/spark/core/src/main/scala/org/apache/spark/util/collection/Spillable.scala b/repos/spark/core/src/main/scala/org/apache/spark/util/collection/Spillable.scala index b6e3f882ba6..d5a1a74e4d0 100644 --- a/repos/spark/core/src/main/scala/org/apache/spark/util/collection/Spillable.scala +++ b/repos/spark/core/src/main/scala/org/apache/spark/util/collection/Spillable.scala @@ -82,8 +82,10 @@ private[spark] trait Spillable[C] extends Logging { if (elementsRead % 32 == 0 && currentMemory >= myMemoryThreshold) { // Claim up to double our current memory from the shuffle memory pool val amountToRequest = 2 * currentMemory - myMemoryThreshold - val granted = taskMemoryManager - .acquireExecutionMemory(amountToRequest, MemoryMode.ON_HEAP, null) + val granted = taskMemoryManager.acquireExecutionMemory( + amountToRequest, + MemoryMode.ON_HEAP, + null) myMemoryThreshold += granted // If we were granted too little memory to grow further (either tryToAcquire returned 0, // or we already had more memory than myMemoryThreshold), spill the current collection diff --git a/repos/spark/core/src/main/scala/org/apache/spark/util/random/StratifiedSamplingUtils.scala b/repos/spark/core/src/main/scala/org/apache/spark/util/random/StratifiedSamplingUtils.scala index c7aeb1179b7..e61c09ee804 100644 --- a/repos/spark/core/src/main/scala/org/apache/spark/util/random/StratifiedSamplingUtils.scala +++ b/repos/spark/core/src/main/scala/org/apache/spark/util/random/StratifiedSamplingUtils.scala @@ -121,10 +121,14 @@ private[spark] object StratifiedSamplingUtils extends Logging { // We use the streaming version of the algorithm for sampling without replacement to avoid // using an extra pass over the RDD for computing the count. // Hence, acceptBound and waitListBound change on every iteration. - acceptResult.acceptBound = BinomialBounds - .getLowerBound(delta, acceptResult.numItems, fraction) - acceptResult.waitListBound = BinomialBounds - .getUpperBound(delta, acceptResult.numItems, fraction) + acceptResult.acceptBound = BinomialBounds.getLowerBound( + delta, + acceptResult.numItems, + fraction) + acceptResult.waitListBound = BinomialBounds.getUpperBound( + delta, + acceptResult.numItems, + fraction) val x = rng.nextUniform() if (x < acceptResult.acceptBound) { diff --git a/repos/spark/core/src/test/scala/org/apache/spark/AccumulatorSuite.scala b/repos/spark/core/src/test/scala/org/apache/spark/AccumulatorSuite.scala index 37e2fa629a4..f257f9d131a 100644 --- a/repos/spark/core/src/test/scala/org/apache/spark/AccumulatorSuite.scala +++ b/repos/spark/core/src/test/scala/org/apache/spark/AccumulatorSuite.scala @@ -398,8 +398,8 @@ private class SaveInfoListener extends SparkListener { completedTaskInfos.values.flatten.toSeq def getCompletedTaskInfos(stageId: StageId, stageAttemptId: StageAttemptId): Seq[TaskInfo] = - completedTaskInfos - .getOrElse((stageId, stageAttemptId), Seq.empty[TaskInfo]) + completedTaskInfos.getOrElse((stageId, stageAttemptId), + Seq.empty[TaskInfo]) /** * If `jobCompletionCallback` is set, block until the next call has finished. diff --git a/repos/spark/core/src/test/scala/org/apache/spark/DriverSuite.scala b/repos/spark/core/src/test/scala/org/apache/spark/DriverSuite.scala index 9576f62ff4e..7d1db1cda46 100644 --- a/repos/spark/core/src/test/scala/org/apache/spark/DriverSuite.scala +++ b/repos/spark/core/src/test/scala/org/apache/spark/DriverSuite.scala @@ -28,8 +28,8 @@ import org.apache.spark.util.Utils class DriverSuite extends SparkFunSuite with Timeouts { ignore("driver should exit after finishing without cleanup (SPARK-530)") { - val sparkHome = sys.props - .getOrElse("spark.test.home", fail("spark.test.home is not set!")) + val sparkHome = sys.props.getOrElse("spark.test.home", + fail("spark.test.home is not set!")) val masters = Table("master", "local", "local-cluster[2,1,1024]") forAll(masters) { (master: String) => val process = Utils.executeCommand( diff --git a/repos/spark/core/src/test/scala/org/apache/spark/FileSuite.scala b/repos/spark/core/src/test/scala/org/apache/spark/FileSuite.scala index dd3965865e3..bbc201de004 100644 --- a/repos/spark/core/src/test/scala/org/apache/spark/FileSuite.scala +++ b/repos/spark/core/src/test/scala/org/apache/spark/FileSuite.scala @@ -268,10 +268,11 @@ class FileSuite extends SparkFunSuite with LocalSparkContext { val nums = sc.makeRDD(1 to 3).map(x => (new IntWritable(x), new Text("a" * x))) nums.saveAsSequenceFile(outputDir) - val output = sc - .newAPIHadoopFile[IntWritable, - Text, - SequenceFileInputFormat[IntWritable, Text]](outputDir) + val output = + sc.newAPIHadoopFile[IntWritable, + Text, + SequenceFileInputFormat[IntWritable, Text]]( + outputDir) assert( output.map(_.toString).collect().toList === List("(1,a)", "(2,aa)", diff --git a/repos/spark/core/src/test/scala/org/apache/spark/MapOutputTrackerSuite.scala b/repos/spark/core/src/test/scala/org/apache/spark/MapOutputTrackerSuite.scala index 0f17728e0e8..a1415aa4546 100644 --- a/repos/spark/core/src/test/scala/org/apache/spark/MapOutputTrackerSuite.scala +++ b/repos/spark/core/src/test/scala/org/apache/spark/MapOutputTrackerSuite.scala @@ -153,8 +153,9 @@ class MapOutputTrackerSuite extends SparkFunSuite { val slaveRpcEnv = createRpcEnv("spark-slave", hostname, 0, new SecurityManager(conf)) val slaveTracker = new MapOutputTrackerWorker(conf) - slaveTracker.trackerEndpoint = slaveRpcEnv - .setupEndpointRef(rpcEnv.address, MapOutputTracker.ENDPOINT_NAME) + slaveTracker.trackerEndpoint = + slaveRpcEnv.setupEndpointRef(rpcEnv.address, + MapOutputTracker.ENDPOINT_NAME) masterTracker.registerShuffle(10, 1) masterTracker.incrementEpoch() @@ -175,8 +176,9 @@ class MapOutputTrackerSuite extends SparkFunSuite { (BlockManagerId("a", "hostA", 1000), ArrayBuffer((ShuffleBlockId(10, 0, 0), size1000))))) - masterTracker - .unregisterMapOutput(10, 0, BlockManagerId("a", "hostA", 1000)) + masterTracker.unregisterMapOutput(10, + 0, + BlockManagerId("a", "hostA", 1000)) masterTracker.incrementEpoch() slaveTracker.updateEpoch(masterTracker.getEpoch) intercept[FetchFailedException] { diff --git a/repos/spark/core/src/test/scala/org/apache/spark/deploy/RPackageUtilsSuite.scala b/repos/spark/core/src/test/scala/org/apache/spark/deploy/RPackageUtilsSuite.scala index ba2a2484859..e4f2be94b90 100644 --- a/repos/spark/core/src/test/scala/org/apache/spark/deploy/RPackageUtilsSuite.scala +++ b/repos/spark/core/src/test/scala/org/apache/spark/deploy/RPackageUtilsSuite.scala @@ -45,8 +45,10 @@ class RPackageUtilsSuite private val dep2 = MavenCoordinate("a", "dep2", "d") private def getJarPath(coord: MavenCoordinate, repo: File): File = { - new File(IvyTestUtils - .pathFromCoordinate(coord, repo, "jar", useIvyLayout = false), + new File(IvyTestUtils.pathFromCoordinate(coord, + repo, + "jar", + useIvyLayout = false), IvyTestUtils.artifactName(coord, useIvyLayout = false, ".jar")) } diff --git a/repos/spark/core/src/test/scala/org/apache/spark/deploy/SparkSubmitSuite.scala b/repos/spark/core/src/test/scala/org/apache/spark/deploy/SparkSubmitSuite.scala index 6bd0ea04082..df192d8e0fa 100644 --- a/repos/spark/core/src/test/scala/org/apache/spark/deploy/SparkSubmitSuite.scala +++ b/repos/spark/core/src/test/scala/org/apache/spark/deploy/SparkSubmitSuite.scala @@ -510,8 +510,8 @@ class SparkSubmitSuite ignore("correctly builds R packages included in a jar with --packages") { assume(RUtils.isRInstalled, "R isn't installed on this machine.") val main = MavenCoordinate("my.great.lib", "mylib", "0.1") - val sparkHome = sys.props - .getOrElse("spark.test.home", fail("spark.test.home is not set!")) + val sparkHome = sys.props.getOrElse("spark.test.home", + fail("spark.test.home is not set!")) val rScriptDir = Seq(sparkHome, "R", "pkg", "inst", "tests", "packageInAJarTest.R") .mkString(File.separator) diff --git a/repos/spark/core/src/test/scala/org/apache/spark/deploy/SparkSubmitUtilsSuite.scala b/repos/spark/core/src/test/scala/org/apache/spark/deploy/SparkSubmitUtilsSuite.scala index fdfebda805c..879793037e3 100644 --- a/repos/spark/core/src/test/scala/org/apache/spark/deploy/SparkSubmitUtilsSuite.scala +++ b/repos/spark/core/src/test/scala/org/apache/spark/deploy/SparkSubmitUtilsSuite.scala @@ -168,25 +168,30 @@ class SparkSubmitUtilsSuite extends SparkFunSuite with BeforeAndAfterAll { val main = new MavenCoordinate("my.great.lib", "mylib", "0.1") val dep = "my.great.dep:mydep:0.5" // Local M2 repository - IvyTestUtils - .withRepository(main, Some(dep), Some(SparkSubmitUtils.m2Path)) { repo => - val jarPath = SparkSubmitUtils - .resolveMavenCoordinates(main.toString, None, None, isTest = true) + IvyTestUtils.withRepository(main, Some(dep), Some(SparkSubmitUtils.m2Path)) { + repo => + val jarPath = SparkSubmitUtils.resolveMavenCoordinates(main.toString, + None, + None, + isTest = true) assert(jarPath.indexOf("mylib") >= 0, "should find artifact") assert(jarPath.indexOf("mydep") >= 0, "should find dependency") - } + } // Local Ivy Repository val settings = new IvySettings val ivyLocal = new File(settings.getDefaultIvyUserDir, "local" + File.separator) - IvyTestUtils - .withRepository(main, Some(dep), Some(ivyLocal), useIvyLayout = true) { - repo => - val jarPath = SparkSubmitUtils - .resolveMavenCoordinates(main.toString, None, None, isTest = true) - assert(jarPath.indexOf("mylib") >= 0, "should find artifact") - assert(jarPath.indexOf("mydep") >= 0, "should find dependency") - } + IvyTestUtils.withRepository(main, + Some(dep), + Some(ivyLocal), + useIvyLayout = true) { repo => + val jarPath = SparkSubmitUtils.resolveMavenCoordinates(main.toString, + None, + None, + isTest = true) + assert(jarPath.indexOf("mylib") >= 0, "should find artifact") + assert(jarPath.indexOf("mydep") >= 0, "should find dependency") + } // Local ivy repository with modified home val dummyIvyLocal = new File(tempIvyPath, "local" + File.separator) settings.setDefaultIvyUserDir(new File(tempIvyPath)) @@ -207,8 +212,10 @@ class SparkSubmitUtilsSuite extends SparkFunSuite with BeforeAndAfterAll { test("dependency not found throws RuntimeException") { intercept[RuntimeException] { - SparkSubmitUtils - .resolveMavenCoordinates("a:b:c", None, None, isTest = true) + SparkSubmitUtils.resolveMavenCoordinates("a:b:c", + None, + None, + isTest = true) } } @@ -231,8 +238,10 @@ class SparkSubmitUtilsSuite extends SparkFunSuite with BeforeAndAfterAll { .map(comp => s"org.apache.spark:spark-${comp}2.10:1.2.0") .mkString(",") + ",org.apache.spark:spark-core_fake:1.2.0" - val path = SparkSubmitUtils - .resolveMavenCoordinates(coordinates, None, None, isTest = true) + val path = SparkSubmitUtils.resolveMavenCoordinates(coordinates, + None, + None, + isTest = true) assert(path === "", "should return empty path") val main = MavenCoordinate("org.apache.spark", "spark-streaming-kafka-assembly_2.10", diff --git a/repos/spark/core/src/test/scala/org/apache/spark/deploy/StandaloneDynamicAllocationSuite.scala b/repos/spark/core/src/test/scala/org/apache/spark/deploy/StandaloneDynamicAllocationSuite.scala index 1692aacb6b5..57352c6785a 100644 --- a/repos/spark/core/src/test/scala/org/apache/spark/deploy/StandaloneDynamicAllocationSuite.scala +++ b/repos/spark/core/src/test/scala/org/apache/spark/deploy/StandaloneDynamicAllocationSuite.scala @@ -64,8 +64,11 @@ class StandaloneDynamicAllocationSuite masterRpcEnv = RpcEnv.create(Master.SYSTEM_NAME, "localhost", 0, conf, securityManager) workerRpcEnvs = (0 until numWorkers).map { i => - RpcEnv - .create(Worker.SYSTEM_NAME + i, "localhost", 0, conf, securityManager) + RpcEnv.create(Worker.SYSTEM_NAME + i, + "localhost", + 0, + conf, + securityManager) } master = makeMaster() workers = makeWorkers(10, 2048) diff --git a/repos/spark/core/src/test/scala/org/apache/spark/deploy/client/AppClientSuite.scala b/repos/spark/core/src/test/scala/org/apache/spark/deploy/client/AppClientSuite.scala index b4a3ef650bb..25dd7c4d40c 100644 --- a/repos/spark/core/src/test/scala/org/apache/spark/deploy/client/AppClientSuite.scala +++ b/repos/spark/core/src/test/scala/org/apache/spark/deploy/client/AppClientSuite.scala @@ -61,8 +61,11 @@ class AppClientSuite masterRpcEnv = RpcEnv.create(Master.SYSTEM_NAME, "localhost", 0, conf, securityManager) workerRpcEnvs = (0 until numWorkers).map { i => - RpcEnv - .create(Worker.SYSTEM_NAME + i, "localhost", 0, conf, securityManager) + RpcEnv.create(Worker.SYSTEM_NAME + i, + "localhost", + 0, + conf, + securityManager) } master = makeMaster() workers = makeWorkers(10, 2048) diff --git a/repos/spark/core/src/test/scala/org/apache/spark/deploy/history/ApplicationCacheSuite.scala b/repos/spark/core/src/test/scala/org/apache/spark/deploy/history/ApplicationCacheSuite.scala index d538db996b3..03edc89d8f1 100644 --- a/repos/spark/core/src/test/scala/org/apache/spark/deploy/history/ApplicationCacheSuite.scala +++ b/repos/spark/core/src/test/scala/org/apache/spark/deploy/history/ApplicationCacheSuite.scala @@ -328,8 +328,12 @@ class ApplicationCacheSuite val updateTime = window * 3 // update the cached value - val updatedApp = operations - .putAppUI(appId, attemptId, true, started, updateTime, updateTime) + val updatedApp = operations.putAppUI(appId, + attemptId, + true, + started, + updateTime, + updateTime) val endTime = window * 10 clock.setTime(endTime) logDebug(s"Before operation = $cache") diff --git a/repos/spark/core/src/test/scala/org/apache/spark/deploy/master/PersistenceEngineSuite.scala b/repos/spark/core/src/test/scala/org/apache/spark/deploy/master/PersistenceEngineSuite.scala index a48deea2019..f5a03694c19 100644 --- a/repos/spark/core/src/test/scala/org/apache/spark/deploy/master/PersistenceEngineSuite.scala +++ b/repos/spark/core/src/test/scala/org/apache/spark/deploy/master/PersistenceEngineSuite.scala @@ -78,8 +78,11 @@ class PersistenceEngineSuite extends SparkFunSuite { assert(persistenceEngine.read[String]("test_").isEmpty) // Test deserializing objects that contain RpcEndpointRef - val testRpcEnv = RpcEnv - .create("test", "localhost", 12345, conf, new SecurityManager(conf)) + val testRpcEnv = RpcEnv.create("test", + "localhost", + 12345, + conf, + new SecurityManager(conf)) try { // Create a real endpoint so that we can test RpcEndpointRef deserialization val workerEndpoint = diff --git a/repos/spark/core/src/test/scala/org/apache/spark/deploy/rest/SubmitRestProtocolSuite.scala b/repos/spark/core/src/test/scala/org/apache/spark/deploy/rest/SubmitRestProtocolSuite.scala index 136034244ad..608c975995c 100644 --- a/repos/spark/core/src/test/scala/org/apache/spark/deploy/rest/SubmitRestProtocolSuite.scala +++ b/repos/spark/core/src/test/scala/org/apache/spark/deploy/rest/SubmitRestProtocolSuite.scala @@ -121,8 +121,9 @@ class SubmitRestProtocolSuite extends SparkFunSuite { // test JSON val json = message.toJson assertJsonEquals(json, submitDriverRequestJson) - val newMessage = SubmitRestProtocolMessage - .fromJson(json, classOf[CreateSubmissionRequest]) + val newMessage = + SubmitRestProtocolMessage.fromJson(json, + classOf[CreateSubmissionRequest]) assert(newMessage.clientSparkVersion === "1.2.3") assert(newMessage.appResource === "honey-walnut-cherry.jar") assert(newMessage.mainClass === "org.apache.spark.examples.SparkPie") @@ -160,8 +161,9 @@ class SubmitRestProtocolSuite extends SparkFunSuite { // test JSON val json = message.toJson assertJsonEquals(json, submitDriverResponseJson) - val newMessage = SubmitRestProtocolMessage - .fromJson(json, classOf[CreateSubmissionResponse]) + val newMessage = + SubmitRestProtocolMessage.fromJson(json, + classOf[CreateSubmissionResponse]) assert(newMessage.serverSparkVersion === "1.2.3") assert(newMessage.submissionId === "driver_123") assert(newMessage.success) @@ -198,8 +200,9 @@ class SubmitRestProtocolSuite extends SparkFunSuite { // test JSON val json = message.toJson assertJsonEquals(json, driverStatusResponseJson) - val newMessage = SubmitRestProtocolMessage - .fromJson(json, classOf[SubmissionStatusResponse]) + val newMessage = + SubmitRestProtocolMessage.fromJson(json, + classOf[SubmissionStatusResponse]) assert(newMessage.serverSparkVersion === "1.2.3") assert(newMessage.submissionId === "driver_123") assert(newMessage.driverState === "RUNNING") diff --git a/repos/spark/core/src/test/scala/org/apache/spark/deploy/worker/CommandUtilsSuite.scala b/repos/spark/core/src/test/scala/org/apache/spark/deploy/worker/CommandUtilsSuite.scala index da00d2350f2..7761afd6e6a 100644 --- a/repos/spark/core/src/test/scala/org/apache/spark/deploy/worker/CommandUtilsSuite.scala +++ b/repos/spark/core/src/test/scala/org/apache/spark/deploy/worker/CommandUtilsSuite.scala @@ -30,8 +30,8 @@ class CommandUtilsSuite test("set libraryPath correctly") { val appId = "12345-worker321-9876" - val sparkHome = sys.props - .getOrElse("spark.test.home", fail("spark.test.home is not set!")) + val sparkHome = sys.props.getOrElse("spark.test.home", + fail("spark.test.home is not set!")) val cmd = new Command("mainClass", Seq(), Map(), diff --git a/repos/spark/core/src/test/scala/org/apache/spark/deploy/worker/ExecutorRunnerTest.scala b/repos/spark/core/src/test/scala/org/apache/spark/deploy/worker/ExecutorRunnerTest.scala index 01cc96eea45..d5025211328 100644 --- a/repos/spark/core/src/test/scala/org/apache/spark/deploy/worker/ExecutorRunnerTest.scala +++ b/repos/spark/core/src/test/scala/org/apache/spark/deploy/worker/ExecutorRunnerTest.scala @@ -26,8 +26,8 @@ class ExecutorRunnerTest extends SparkFunSuite { test("command includes appId") { val appId = "12345-worker321-9876" val conf = new SparkConf - val sparkHome = sys.props - .getOrElse("spark.test.home", fail("spark.test.home is not set!")) + val sparkHome = sys.props.getOrElse("spark.test.home", + fail("spark.test.home is not set!")) val appDesc = new ApplicationDescription( "app name", Some(8), diff --git a/repos/spark/core/src/test/scala/org/apache/spark/deploy/worker/WorkerSuite.scala b/repos/spark/core/src/test/scala/org/apache/spark/deploy/worker/WorkerSuite.scala index e41db31f707..7cd1c30f2d9 100644 --- a/repos/spark/core/src/test/scala/org/apache/spark/deploy/worker/WorkerSuite.scala +++ b/repos/spark/core/src/test/scala/org/apache/spark/deploy/worker/WorkerSuite.scala @@ -75,8 +75,11 @@ class WorkerSuite extends SparkFunSuite with Matchers { test("test clearing of finishedExecutors (small number of executors)") { val conf = new SparkConf() conf.set("spark.worker.ui.retainedExecutors", 2.toString) - val rpcEnv = RpcEnv - .create("test", "localhost", 12345, conf, new SecurityManager(conf)) + val rpcEnv = RpcEnv.create("test", + "localhost", + 12345, + conf, + new SecurityManager(conf)) val worker = new Worker(rpcEnv, 50000, 20, @@ -109,8 +112,11 @@ class WorkerSuite extends SparkFunSuite with Matchers { test("test clearing of finishedExecutors (more executors)") { val conf = new SparkConf() conf.set("spark.worker.ui.retainedExecutors", 30.toString) - val rpcEnv = RpcEnv - .create("test", "localhost", 12345, conf, new SecurityManager(conf)) + val rpcEnv = RpcEnv.create("test", + "localhost", + 12345, + conf, + new SecurityManager(conf)) val worker = new Worker(rpcEnv, 50000, 20, @@ -152,8 +158,11 @@ class WorkerSuite extends SparkFunSuite with Matchers { test("test clearing of finishedDrivers (small number of drivers)") { val conf = new SparkConf() conf.set("spark.worker.ui.retainedDrivers", 2.toString) - val rpcEnv = RpcEnv - .create("test", "localhost", 12345, conf, new SecurityManager(conf)) + val rpcEnv = RpcEnv.create("test", + "localhost", + 12345, + conf, + new SecurityManager(conf)) val worker = new Worker(rpcEnv, 50000, 20, @@ -188,8 +197,11 @@ class WorkerSuite extends SparkFunSuite with Matchers { test("test clearing of finishedDrivers (more drivers)") { val conf = new SparkConf() conf.set("spark.worker.ui.retainedDrivers", 30.toString) - val rpcEnv = RpcEnv - .create("test", "localhost", 12345, conf, new SecurityManager(conf)) + val rpcEnv = RpcEnv.create("test", + "localhost", + 12345, + conf, + new SecurityManager(conf)) val worker = new Worker(rpcEnv, 50000, 20, diff --git a/repos/spark/core/src/test/scala/org/apache/spark/deploy/worker/WorkerWatcherSuite.scala b/repos/spark/core/src/test/scala/org/apache/spark/deploy/worker/WorkerWatcherSuite.scala index e717cd67841..6ed5da0c32c 100644 --- a/repos/spark/core/src/test/scala/org/apache/spark/deploy/worker/WorkerWatcherSuite.scala +++ b/repos/spark/core/src/test/scala/org/apache/spark/deploy/worker/WorkerWatcherSuite.scala @@ -23,8 +23,11 @@ import org.apache.spark.rpc.{RpcAddress, RpcEndpointAddress, RpcEnv} class WorkerWatcherSuite extends SparkFunSuite { test("WorkerWatcher shuts down on valid disassociation") { val conf = new SparkConf() - val rpcEnv = RpcEnv - .create("test", "localhost", 12345, conf, new SecurityManager(conf)) + val rpcEnv = RpcEnv.create("test", + "localhost", + 12345, + conf, + new SecurityManager(conf)) val targetWorkerUrl = RpcEndpointAddress(RpcAddress("1.2.3.4", 1234), "Worker").toString val workerWatcher = @@ -37,8 +40,11 @@ class WorkerWatcherSuite extends SparkFunSuite { test("WorkerWatcher stays alive on invalid disassociation") { val conf = new SparkConf() - val rpcEnv = RpcEnv - .create("test", "localhost", 12345, conf, new SecurityManager(conf)) + val rpcEnv = RpcEnv.create("test", + "localhost", + 12345, + conf, + new SecurityManager(conf)) val targetWorkerUrl = RpcEndpointAddress(RpcAddress("1.2.3.4", 1234), "Worker").toString val otherRpcAddress = RpcAddress("4.3.2.1", 1234) diff --git a/repos/spark/core/src/test/scala/org/apache/spark/io/CompressionCodecSuite.scala b/repos/spark/core/src/test/scala/org/apache/spark/io/CompressionCodecSuite.scala index 37f0bc59ef4..29e06d3f1d9 100644 --- a/repos/spark/core/src/test/scala/org/apache/spark/io/CompressionCodecSuite.scala +++ b/repos/spark/core/src/test/scala/org/apache/spark/io/CompressionCodecSuite.scala @@ -91,8 +91,9 @@ class CompressionCodecSuite extends SparkFunSuite { } test("snappy compression codec") { - val codec = CompressionCodec - .createCodec(conf, classOf[SnappyCompressionCodec].getName) + val codec = + CompressionCodec.createCodec(conf, + classOf[SnappyCompressionCodec].getName) assert(codec.getClass === classOf[SnappyCompressionCodec]) testCodec(codec) } @@ -104,8 +105,9 @@ class CompressionCodecSuite extends SparkFunSuite { } test("snappy supports concatenation of serialized streams") { - val codec = CompressionCodec - .createCodec(conf, classOf[SnappyCompressionCodec].getName) + val codec = + CompressionCodec.createCodec(conf, + classOf[SnappyCompressionCodec].getName) assert(codec.getClass === classOf[SnappyCompressionCodec]) testConcatenationOfSerializedStreams(codec) } diff --git a/repos/spark/core/src/test/scala/org/apache/spark/rdd/PairRDDFunctionsSuite.scala b/repos/spark/core/src/test/scala/org/apache/spark/rdd/PairRDDFunctionsSuite.scala index 158817fbbaf..cd0d8c5ce2a 100644 --- a/repos/spark/core/src/test/scala/org/apache/spark/rdd/PairRDDFunctionsSuite.scala +++ b/repos/spark/core/src/test/scala/org/apache/spark/rdd/PairRDDFunctionsSuite.scala @@ -109,8 +109,10 @@ class PairRDDFunctionsSuite extends SparkFunSuite with SharedSparkContext { val stratifiedData = data.keyBy(StratifiedAuxiliary.stratifier(fractionPositive)) val samplingRate = 0.1 - StratifiedAuxiliary - .testSample(stratifiedData, samplingRate, defaultSeed, n) + StratifiedAuxiliary.testSample(stratifiedData, + samplingRate, + defaultSeed, + n) } // vary fractionPositive @@ -120,8 +122,10 @@ class PairRDDFunctionsSuite extends SparkFunSuite with SharedSparkContext { val stratifiedData = data.keyBy(StratifiedAuxiliary.stratifier(fractionPositive)) val samplingRate = 0.1 - StratifiedAuxiliary - .testSample(stratifiedData, samplingRate, defaultSeed, n) + StratifiedAuxiliary.testSample(stratifiedData, + samplingRate, + defaultSeed, + n) } // Use the same data for the rest of the tests @@ -139,8 +143,10 @@ class PairRDDFunctionsSuite extends SparkFunSuite with SharedSparkContext { // vary sampling rate for (samplingRate <- List(0.01, 0.05, 0.1, 0.5)) { - StratifiedAuxiliary - .testSample(stratifiedData, samplingRate, defaultSeed, n) + StratifiedAuxiliary.testSample(stratifiedData, + samplingRate, + defaultSeed, + n) } } @@ -154,8 +160,10 @@ class PairRDDFunctionsSuite extends SparkFunSuite with SharedSparkContext { val stratifiedData = data.keyBy(StratifiedAuxiliary.stratifier(fractionPositive)) val samplingRate = 0.1 - StratifiedAuxiliary - .testSampleExact(stratifiedData, samplingRate, defaultSeed, n) + StratifiedAuxiliary.testSampleExact(stratifiedData, + samplingRate, + defaultSeed, + n) } // vary fractionPositive @@ -165,8 +173,10 @@ class PairRDDFunctionsSuite extends SparkFunSuite with SharedSparkContext { val stratifiedData = data.keyBy(StratifiedAuxiliary.stratifier(fractionPositive)) val samplingRate = 0.1 - StratifiedAuxiliary - .testSampleExact(stratifiedData, samplingRate, defaultSeed, n) + StratifiedAuxiliary.testSampleExact(stratifiedData, + samplingRate, + defaultSeed, + n) } // Use the same data for the rest of the tests @@ -179,14 +189,18 @@ class PairRDDFunctionsSuite extends SparkFunSuite with SharedSparkContext { // vary seed for (seed <- defaultSeed to defaultSeed + 5L) { val samplingRate = 0.1 - StratifiedAuxiliary - .testSampleExact(stratifiedData, samplingRate, seed, n) + StratifiedAuxiliary.testSampleExact(stratifiedData, + samplingRate, + seed, + n) } // vary sampling rate for (samplingRate <- List(0.01, 0.05, 0.1, 0.5)) { - StratifiedAuxiliary - .testSampleExact(stratifiedData, samplingRate, defaultSeed, n) + StratifiedAuxiliary.testSampleExact(stratifiedData, + samplingRate, + defaultSeed, + n) } } diff --git a/repos/spark/core/src/test/scala/org/apache/spark/rdd/RDDOperationScopeSuite.scala b/repos/spark/core/src/test/scala/org/apache/spark/rdd/RDDOperationScopeSuite.scala index 61bb6ca0668..0f68a5205b1 100644 --- a/repos/spark/core/src/test/scala/org/apache/spark/rdd/RDDOperationScopeSuite.scala +++ b/repos/spark/core/src/test/scala/org/apache/spark/rdd/RDDOperationScopeSuite.scala @@ -70,22 +70,24 @@ class RDDOperationScopeSuite extends SparkFunSuite with BeforeAndAfter { var rdd1: MyCoolRDD = null var rdd2: MyCoolRDD = null var rdd3: MyCoolRDD = null - RDDOperationScope - .withScope(sc, "scope1", allowNesting = false, ignoreParent = false) { - rdd1 = new MyCoolRDD(sc) + RDDOperationScope.withScope(sc, + "scope1", + allowNesting = false, + ignoreParent = false) { + rdd1 = new MyCoolRDD(sc) + RDDOperationScope.withScope(sc, + "scope2", + allowNesting = false, + ignoreParent = false) { + rdd2 = new MyCoolRDD(sc) RDDOperationScope.withScope(sc, - "scope2", + "scope3", allowNesting = false, ignoreParent = false) { - rdd2 = new MyCoolRDD(sc) - RDDOperationScope.withScope(sc, - "scope3", - allowNesting = false, - ignoreParent = false) { - rdd3 = new MyCoolRDD(sc) - } + rdd3 = new MyCoolRDD(sc) } } + } assert(rdd0.scope.isEmpty) assert(rdd1.scope.isDefined) assert(rdd2.scope.isDefined) @@ -101,23 +103,25 @@ class RDDOperationScopeSuite extends SparkFunSuite with BeforeAndAfter { var rdd2: MyCoolRDD = null var rdd3: MyCoolRDD = null // allow nesting here - RDDOperationScope - .withScope(sc, "scope1", allowNesting = true, ignoreParent = false) { - rdd1 = new MyCoolRDD(sc) - // stop nesting here + RDDOperationScope.withScope(sc, + "scope1", + allowNesting = true, + ignoreParent = false) { + rdd1 = new MyCoolRDD(sc) + // stop nesting here + RDDOperationScope.withScope(sc, + "scope2", + allowNesting = false, + ignoreParent = false) { + rdd2 = new MyCoolRDD(sc) RDDOperationScope.withScope(sc, - "scope2", + "scope3", allowNesting = false, ignoreParent = false) { - rdd2 = new MyCoolRDD(sc) - RDDOperationScope.withScope(sc, - "scope3", - allowNesting = false, - ignoreParent = false) { - rdd3 = new MyCoolRDD(sc) - } + rdd3 = new MyCoolRDD(sc) } } + } assert(rdd0.scope.isEmpty) assert(rdd1.scope.isDefined) assert(rdd2.scope.isDefined) @@ -132,22 +136,24 @@ class RDDOperationScopeSuite extends SparkFunSuite with BeforeAndAfter { var rdd1: MyCoolRDD = null var rdd2: MyCoolRDD = null var rdd3: MyCoolRDD = null - RDDOperationScope - .withScope(sc, "scope1", allowNesting = true, ignoreParent = false) { - rdd1 = new MyCoolRDD(sc) + RDDOperationScope.withScope(sc, + "scope1", + allowNesting = true, + ignoreParent = false) { + rdd1 = new MyCoolRDD(sc) + RDDOperationScope.withScope(sc, + "scope2", + allowNesting = true, + ignoreParent = false) { + rdd2 = new MyCoolRDD(sc) RDDOperationScope.withScope(sc, - "scope2", + "scope3", allowNesting = true, ignoreParent = false) { - rdd2 = new MyCoolRDD(sc) - RDDOperationScope.withScope(sc, - "scope3", - allowNesting = true, - ignoreParent = false) { - rdd3 = new MyCoolRDD(sc) - } + rdd3 = new MyCoolRDD(sc) } } + } assert(rdd0.scope.isEmpty) assert(rdd1.scope.isDefined) assert(rdd2.scope.isDefined) diff --git a/repos/spark/core/src/test/scala/org/apache/spark/rpc/RpcEnvSuite.scala b/repos/spark/core/src/test/scala/org/apache/spark/rpc/RpcEnvSuite.scala index 010c341984c..959e9cfdaed 100644 --- a/repos/spark/core/src/test/scala/org/apache/spark/rpc/RpcEnvSuite.scala +++ b/repos/spark/core/src/test/scala/org/apache/spark/rpc/RpcEnvSuite.scala @@ -378,8 +378,9 @@ abstract class RpcEnvSuite extends SparkFunSuite with BeforeAndAfterAll { // If a RpcEnv implementation breaks the `receive` contract, hope this test can expose it for (i <- 0 until 100) { @volatile var result = 0 - val endpointRef = env - .setupEndpoint(s"receive-in-sequence-$i", new ThreadSafeRpcEndpoint { + val endpointRef = env.setupEndpoint( + s"receive-in-sequence-$i", + new ThreadSafeRpcEndpoint { override val rpcEnv = env override def receive: PartialFunction[Any, Unit] = { @@ -525,8 +526,9 @@ abstract class RpcEnvSuite extends SparkFunSuite with BeforeAndAfterAll { _env: RpcEnv, name: String): (RpcEndpointRef, ConcurrentLinkedQueue[(Any, Any)]) = { val events = new ConcurrentLinkedQueue[(Any, Any)] - val ref = _env - .setupEndpoint("network-events-non-client", new ThreadSafeRpcEndpoint { + val ref = _env.setupEndpoint( + "network-events-non-client", + new ThreadSafeRpcEndpoint { override val rpcEnv = _env override def receive: PartialFunction[Any, Unit] = { @@ -660,8 +662,9 @@ abstract class RpcEnvSuite extends SparkFunSuite with BeforeAndAfterAll { val anotherEnv = createRpcEnv(new SparkConf(), "remote", 0, clientMode = true) // Use anotherEnv to find out the RpcEndpointRef - val rpcEndpointRef = anotherEnv - .setupEndpointRef(env.address, "sendWithReply-unserializable-error") + val rpcEndpointRef = + anotherEnv.setupEndpointRef(env.address, + "sendWithReply-unserializable-error") try { val f = rpcEndpointRef.ask[String]("hello") val e = intercept[Exception] { diff --git a/repos/spark/core/src/test/scala/org/apache/spark/scheduler/EventLoggingListenerSuite.scala b/repos/spark/core/src/test/scala/org/apache/spark/scheduler/EventLoggingListenerSuite.scala index 2f1e1224198..e9c03300597 100644 --- a/repos/spark/core/src/test/scala/org/apache/spark/scheduler/EventLoggingListenerSuite.scala +++ b/repos/spark/core/src/test/scala/org/apache/spark/scheduler/EventLoggingListenerSuite.scala @@ -176,8 +176,9 @@ class EventLoggingListenerSuite eventLogger.stop() // Verify file contains exactly the two events logged - val logData = EventLoggingListener - .openEventLog(new Path(eventLogger.logPath), fileSystem) + val logData = EventLoggingListener.openEventLog( + new Path(eventLogger.logPath), + fileSystem) try { val lines = readLines(logData) val logStart = SparkListenerLogStart(SPARK_VERSION) @@ -229,8 +230,9 @@ class EventLoggingListenerSuite eventExistenceListener.assertAllCallbacksInvoked() // Make sure expected events exist in the log file. - val logData = EventLoggingListener - .openEventLog(new Path(eventLogger.logPath), fileSystem) + val logData = EventLoggingListener.openEventLog( + new Path(eventLogger.logPath), + fileSystem) val logStart = SparkListenerLogStart(SPARK_VERSION) val lines = readLines(logData) val eventSet = mutable diff --git a/repos/spark/core/src/test/scala/org/apache/spark/scheduler/TaskResultGetterSuite.scala b/repos/spark/core/src/test/scala/org/apache/spark/scheduler/TaskResultGetterSuite.scala index 9e439f0224f..bdbe6357f17 100644 --- a/repos/spark/core/src/test/scala/org/apache/spark/scheduler/TaskResultGetterSuite.scala +++ b/repos/spark/core/src/test/scala/org/apache/spark/scheduler/TaskResultGetterSuite.scala @@ -189,8 +189,10 @@ class TaskResultGetterSuite |public class MyException extends Exception { |} """.stripMargin) - val excFile = TestUtils - .createCompiledClass("MyException", srcDir, excSource, Seq.empty) + val excFile = TestUtils.createCompiledClass("MyException", + srcDir, + excSource, + Seq.empty) val jarFile = new File(tempDir, "testJar-%s.jar".format(System.currentTimeMillis())) TestUtils.createJar(Seq(excFile), jarFile, directoryPrefix = Some("repro")) diff --git a/repos/spark/core/src/test/scala/org/apache/spark/scheduler/cluster/mesos/CoarseMesosSchedulerBackendSuite.scala b/repos/spark/core/src/test/scala/org/apache/spark/scheduler/cluster/mesos/CoarseMesosSchedulerBackendSuite.scala index 0524431b386..f4d4202d3e6 100644 --- a/repos/spark/core/src/test/scala/org/apache/spark/scheduler/cluster/mesos/CoarseMesosSchedulerBackendSuite.scala +++ b/repos/spark/core/src/test/scala/org/apache/spark/scheduler/cluster/mesos/CoarseMesosSchedulerBackendSuite.scala @@ -203,8 +203,11 @@ class CoarseMesosSchedulerBackendSuite val status2 = createTaskStatus("1", "s1", TaskState.TASK_RUNNING) backend.statusUpdate(driver, status2) - verify(externalShuffleClient, times(1)) - .registerDriverWithShuffleService(anyString, anyInt, anyLong, anyLong) + verify(externalShuffleClient, times(1)).registerDriverWithShuffleService( + anyString, + anyInt, + anyLong, + anyLong) } test("mesos kills an executor when told") { diff --git a/repos/spark/core/src/test/scala/org/apache/spark/scheduler/cluster/mesos/MesosSchedulerUtilsSuite.scala b/repos/spark/core/src/test/scala/org/apache/spark/scheduler/cluster/mesos/MesosSchedulerUtilsSuite.scala index 3263e562f1a..9a2351c44bd 100644 --- a/repos/spark/core/src/test/scala/org/apache/spark/scheduler/cluster/mesos/MesosSchedulerUtilsSuite.scala +++ b/repos/spark/core/src/test/scala/org/apache/spark/scheduler/cluster/mesos/MesosSchedulerUtilsSuite.scala @@ -96,12 +96,9 @@ class MesosSchedulerUtilsSuite val tachyonFalseOffer = Map("tachyon" -> Value.Text.newBuilder().setValue("false").build()) - utils - .matchesAttributeRequirements(parsedConstraints, noTachyonOffer) shouldBe false - utils - .matchesAttributeRequirements(parsedConstraints, tachyonTrueOffer) shouldBe true - utils - .matchesAttributeRequirements(parsedConstraints, tachyonFalseOffer) shouldBe true + utils.matchesAttributeRequirements(parsedConstraints, noTachyonOffer) shouldBe false + utils.matchesAttributeRequirements(parsedConstraints, tachyonTrueOffer) shouldBe true + utils.matchesAttributeRequirements(parsedConstraints, tachyonFalseOffer) shouldBe true } test("subset match is performed for set attributes") { @@ -117,8 +114,7 @@ class MesosSchedulerUtilsSuite val zoneConstraintStr = "tachyon:;zone:us-east-1a,us-east-1c" val parsedConstraints = utils.parseConstraintString(zoneConstraintStr) - utils - .matchesAttributeRequirements(parsedConstraints, supersetConstraint) shouldBe true + utils.matchesAttributeRequirements(parsedConstraints, supersetConstraint) shouldBe true } test("less than equal match is performed on scalar attributes") { @@ -129,12 +125,9 @@ class MesosSchedulerUtilsSuite val eqConstraint = utils.parseConstraintString("gpus:3") val gtConstraint = utils.parseConstraintString("gpus:4") - utils - .matchesAttributeRequirements(ltConstraint, offerAttribs) shouldBe true - utils - .matchesAttributeRequirements(eqConstraint, offerAttribs) shouldBe true - utils - .matchesAttributeRequirements(gtConstraint, offerAttribs) shouldBe false + utils.matchesAttributeRequirements(ltConstraint, offerAttribs) shouldBe true + utils.matchesAttributeRequirements(eqConstraint, offerAttribs) shouldBe true + utils.matchesAttributeRequirements(gtConstraint, offerAttribs) shouldBe false } test("contains match is performed for range attributes") { @@ -145,12 +138,9 @@ class MesosSchedulerUtilsSuite val gtConstraint = utils.parseConstraintString("ports:8002") val multiConstraint = utils.parseConstraintString("ports:5000,7500,8300") - utils - .matchesAttributeRequirements(ltConstraint, offerAttribs) shouldBe false - utils - .matchesAttributeRequirements(eqConstraint, offerAttribs) shouldBe true - utils - .matchesAttributeRequirements(gtConstraint, offerAttribs) shouldBe false + utils.matchesAttributeRequirements(ltConstraint, offerAttribs) shouldBe false + utils.matchesAttributeRequirements(eqConstraint, offerAttribs) shouldBe true + utils.matchesAttributeRequirements(gtConstraint, offerAttribs) shouldBe false utils.matchesAttributeRequirements(multiConstraint, offerAttribs) shouldBe true } @@ -161,8 +151,7 @@ class MesosSchedulerUtilsSuite val trueConstraint = utils.parseConstraintString("tachyon:true") val falseConstraint = utils.parseConstraintString("tachyon:false") - utils - .matchesAttributeRequirements(trueConstraint, offerAttribs) shouldBe true + utils.matchesAttributeRequirements(trueConstraint, offerAttribs) shouldBe true utils.matchesAttributeRequirements(falseConstraint, offerAttribs) shouldBe false } } diff --git a/repos/spark/core/src/test/scala/org/apache/spark/status/api/v1/AllStagesResourceSuite.scala b/repos/spark/core/src/test/scala/org/apache/spark/status/api/v1/AllStagesResourceSuite.scala index 29eab35aeb5..29696618d0d 100644 --- a/repos/spark/core/src/test/scala/org/apache/spark/status/api/v1/AllStagesResourceSuite.scala +++ b/repos/spark/core/src/test/scala/org/apache/spark/status/api/v1/AllStagesResourceSuite.scala @@ -48,8 +48,10 @@ class AllStagesResourceSuite extends SparkFunSuite { Seq.empty, "details abc", Seq.empty) - val stageData = AllStagesResource - .stageUiToStageData(status, stageInfo, stageUiData, false) + val stageData = AllStagesResource.stageUiToStageData(status, + stageInfo, + stageUiData, + false) stageData.firstTaskLaunchedTime } diff --git a/repos/spark/core/src/test/scala/org/apache/spark/storage/BlockInfoManagerSuite.scala b/repos/spark/core/src/test/scala/org/apache/spark/storage/BlockInfoManagerSuite.scala index dd72a90b9b2..890533dd9bd 100644 --- a/repos/spark/core/src/test/scala/org/apache/spark/storage/BlockInfoManagerSuite.scala +++ b/repos/spark/core/src/test/scala/org/apache/spark/storage/BlockInfoManagerSuite.scala @@ -153,8 +153,9 @@ class BlockInfoManagerSuite extends SparkFunSuite with BeforeAndAfterEach { // After removing the block, the write lock is released. Both threads should wake up but only // one should acquire the write lock. The second thread should block until the winner of the // write race releases its lock. - val winningFuture: Future[Boolean] = Await - .ready(Future.firstCompletedOf(Seq(lock1Future, lock2Future)), 1.seconds) + val winningFuture: Future[Boolean] = + Await.ready(Future.firstCompletedOf(Seq(lock1Future, lock2Future)), + 1.seconds) assert(winningFuture.value.get.get) val winningTID = blockInfoManager.get("block").get.writerTask assert(winningTID === 1 || winningTID === 2) diff --git a/repos/spark/core/src/test/scala/org/apache/spark/storage/BlockManagerReplicationSuite.scala b/repos/spark/core/src/test/scala/org/apache/spark/storage/BlockManagerReplicationSuite.scala index daf0f0884a6..416dacff79d 100644 --- a/repos/spark/core/src/test/scala/org/apache/spark/storage/BlockManagerReplicationSuite.scala +++ b/repos/spark/core/src/test/scala/org/apache/spark/storage/BlockManagerReplicationSuite.scala @@ -358,8 +358,9 @@ class BlockManagerReplicationSuite replicationFactor: Int): Int = { val storageLevel = StorageLevel(true, true, false, true, replicationFactor) - initialStores.head - .putSingle(blockId, new Array[Byte](blockSize), storageLevel) + initialStores.head.putSingle(blockId, + new Array[Byte](blockSize), + storageLevel) val numLocations = master.getLocations(blockId).size allStores.foreach { _.removeBlock(blockId) } numLocations diff --git a/repos/spark/core/src/test/scala/org/apache/spark/storage/BlockManagerSuite.scala b/repos/spark/core/src/test/scala/org/apache/spark/storage/BlockManagerSuite.scala index 1b9ca1827f9..16f5def9f93 100644 --- a/repos/spark/core/src/test/scala/org/apache/spark/storage/BlockManagerSuite.scala +++ b/repos/spark/core/src/test/scala/org/apache/spark/storage/BlockManagerSuite.scala @@ -1290,8 +1290,9 @@ class BlockManagerSuite assert(memoryStore.currentUnrollMemoryForThisTask === 0) // Unroll with all the space in the world. This should succeed. - var putResult = memoryStore - .putIterator("unroll", smallList.iterator, StorageLevel.MEMORY_ONLY) + var putResult = memoryStore.putIterator("unroll", + smallList.iterator, + StorageLevel.MEMORY_ONLY) assert(putResult.isRight) assert(memoryStore.currentUnrollMemoryForThisTask === 0) smallList.iterator.zip(memoryStore.getValues("unroll").get).foreach { @@ -1309,8 +1310,9 @@ class BlockManagerSuite store.putIterator("someBlock2", smallList.iterator, StorageLevel.MEMORY_ONLY)) - putResult = memoryStore - .putIterator("unroll", smallList.iterator, StorageLevel.MEMORY_ONLY) + putResult = memoryStore.putIterator("unroll", + smallList.iterator, + StorageLevel.MEMORY_ONLY) assert(putResult.isRight) assert(memoryStore.currentUnrollMemoryForThisTask === 0) assert(memoryStore.contains("someBlock2")) @@ -1328,8 +1330,9 @@ class BlockManagerSuite store.putIterator("someBlock3", smallList.iterator, StorageLevel.MEMORY_ONLY)) - putResult = memoryStore - .putIterator("unroll", bigList.iterator, StorageLevel.MEMORY_ONLY) + putResult = memoryStore.putIterator("unroll", + bigList.iterator, + StorageLevel.MEMORY_ONLY) assert(memoryStore.currentUnrollMemoryForThisTask > 0) // we returned an iterator assert(!memoryStore.contains("someBlock2")) assert(putResult.isLeft) diff --git a/repos/spark/core/src/test/scala/org/apache/spark/storage/StorageSuite.scala b/repos/spark/core/src/test/scala/org/apache/spark/storage/StorageSuite.scala index e95d949d375..633de52a414 100644 --- a/repos/spark/core/src/test/scala/org/apache/spark/storage/StorageSuite.scala +++ b/repos/spark/core/src/test/scala/org/apache/spark/storage/StorageSuite.scala @@ -351,12 +351,12 @@ class StorageSuite extends SparkFunSuite { test("StorageUtils.getRddBlockLocations with multiple locations") { val storageStatuses = stockStorageStatuses - storageStatuses(0) - .addBlock(RDDBlockId(1, 0), BlockStatus(memAndDisk, 1L, 2L)) - storageStatuses(0) - .addBlock(RDDBlockId(0, 4), BlockStatus(memAndDisk, 1L, 2L)) - storageStatuses(2) - .addBlock(RDDBlockId(0, 0), BlockStatus(memAndDisk, 1L, 2L)) + storageStatuses(0).addBlock(RDDBlockId(1, 0), + BlockStatus(memAndDisk, 1L, 2L)) + storageStatuses(0).addBlock(RDDBlockId(0, 4), + BlockStatus(memAndDisk, 1L, 2L)) + storageStatuses(2).addBlock(RDDBlockId(0, 0), + BlockStatus(memAndDisk, 1L, 2L)) val blockLocations0 = StorageUtils.getRddBlockLocations(0, storageStatuses) val blockLocations1 = StorageUtils.getRddBlockLocations(1, storageStatuses) assert(blockLocations0.size === 5) diff --git a/repos/spark/core/src/test/scala/org/apache/spark/util/collection/ExternalAppendOnlyMapSuite.scala b/repos/spark/core/src/test/scala/org/apache/spark/util/collection/ExternalAppendOnlyMapSuite.scala index fc42c285beb..da2500fa961 100644 --- a/repos/spark/core/src/test/scala/org/apache/spark/util/collection/ExternalAppendOnlyMapSuite.scala +++ b/repos/spark/core/src/test/scala/org/apache/spark/util/collection/ExternalAppendOnlyMapSuite.scala @@ -458,28 +458,30 @@ class ExternalAppendOnlyMapSuite extends SparkFunSuite with LocalSparkContext { spillThreshold.toString) sc = new SparkContext("local", "test", conf) // No spilling - AccumulatorSuite - .verifyPeakExecutionMemorySet(sc, "external map without spilling") { - assertNotSpilled(sc, "verify peak memory") { - sc.parallelize(1 to spillThreshold / 2, 2) - .map { i => - (i, i) - } - .reduceByKey(_ + _) - .count() - } + AccumulatorSuite.verifyPeakExecutionMemorySet( + sc, + "external map without spilling") { + assertNotSpilled(sc, "verify peak memory") { + sc.parallelize(1 to spillThreshold / 2, 2) + .map { i => + (i, i) + } + .reduceByKey(_ + _) + .count() } + } // With spilling - AccumulatorSuite - .verifyPeakExecutionMemorySet(sc, "external map with spilling") { - assertSpilled(sc, "verify peak memory") { - sc.parallelize(1 to spillThreshold * 3, 2) - .map { i => - (i, i) - } - .reduceByKey(_ + _) - .count() - } + AccumulatorSuite.verifyPeakExecutionMemorySet( + sc, + "external map with spilling") { + assertSpilled(sc, "verify peak memory") { + sc.parallelize(1 to spillThreshold * 3, 2) + .map { i => + (i, i) + } + .reduceByKey(_ + _) + .count() } + } } } diff --git a/repos/spark/core/src/test/scala/org/apache/spark/util/collection/ExternalSorterSuite.scala b/repos/spark/core/src/test/scala/org/apache/spark/util/collection/ExternalSorterSuite.scala index d18fa23f4b9..8c245f65d33 100644 --- a/repos/spark/core/src/test/scala/org/apache/spark/util/collection/ExternalSorterSuite.scala +++ b/repos/spark/core/src/test/scala/org/apache/spark/util/collection/ExternalSorterSuite.scala @@ -781,18 +781,20 @@ class ExternalSorterSuite extends SparkFunSuite with LocalSparkContext { sc = new SparkContext("local", "test", conf) // Avoid aggregating here to make sure we're not also using ExternalAppendOnlyMap // No spilling - AccumulatorSuite - .verifyPeakExecutionMemorySet(sc, "external sorter without spilling") { - assertNotSpilled(sc, "verify peak memory") { - sc.parallelize(1 to spillThreshold / 2, 2).repartition(100).count() - } + AccumulatorSuite.verifyPeakExecutionMemorySet( + sc, + "external sorter without spilling") { + assertNotSpilled(sc, "verify peak memory") { + sc.parallelize(1 to spillThreshold / 2, 2).repartition(100).count() } + } // With spilling - AccumulatorSuite - .verifyPeakExecutionMemorySet(sc, "external sorter with spilling") { - assertSpilled(sc, "verify peak memory") { - sc.parallelize(1 to spillThreshold * 3, 2).repartition(100).count() - } + AccumulatorSuite.verifyPeakExecutionMemorySet( + sc, + "external sorter with spilling") { + assertSpilled(sc, "verify peak memory") { + sc.parallelize(1 to spillThreshold * 3, 2).repartition(100).count() } + } } } diff --git a/repos/spark/core/src/test/scala/org/apache/spark/util/collection/SorterSuite.scala b/repos/spark/core/src/test/scala/org/apache/spark/util/collection/SorterSuite.scala index c618b80e053..718b4f26730 100644 --- a/repos/spark/core/src/test/scala/org/apache/spark/util/collection/SorterSuite.scala +++ b/repos/spark/core/src/test/scala/org/apache/spark/util/collection/SorterSuite.scala @@ -211,14 +211,18 @@ class SorterSuite extends SparkFunSuite with Logging { val sorterWithoutKeyReuse = new Sorter(new IntArraySortDataFormat) runExperiment("Sorter without key reuse on primitive int array")({ - sorterWithoutKeyReuse - .sort(intPrimitiveArray, 0, numElements, Ordering[Int]) + sorterWithoutKeyReuse.sort(intPrimitiveArray, + 0, + numElements, + Ordering[Int]) }, prepareIntPrimitiveArray) val sorterWithKeyReuse = new Sorter(new KeyReuseIntArraySortDataFormat) runExperiment("Sorter with key reuse on primitive int array")({ - sorterWithKeyReuse - .sort(intPrimitiveArray, 0, numElements, Ordering[IntWrapper]) + sorterWithKeyReuse.sort(intPrimitiveArray, + 0, + numElements, + Ordering[IntWrapper]) }, prepareIntPrimitiveArray) } } diff --git a/repos/spark/examples/src/main/scala/org/apache/spark/examples/CassandraCQLTest.scala b/repos/spark/examples/src/main/scala/org/apache/spark/examples/CassandraCQLTest.scala index baf824fee9a..3a487515e68 100644 --- a/repos/spark/examples/src/main/scala/org/apache/spark/examples/CassandraCQLTest.scala +++ b/repos/spark/examples/src/main/scala/org/apache/spark/examples/CassandraCQLTest.scala @@ -86,8 +86,8 @@ object CassandraCQLTest { ConfigHelper.setInputColumnFamily(job.getConfiguration(), KeySpace, InputColumnFamily) - ConfigHelper - .setInputPartitioner(job.getConfiguration(), "Murmur3Partitioner") + ConfigHelper.setInputPartitioner(job.getConfiguration(), + "Murmur3Partitioner") CqlConfigHelper.setInputCQLPageRowSize(job.getConfiguration(), "3") /** CqlConfigHelper.setInputWhereClauses(job.getConfiguration(), "user_id='bob'") */ @@ -102,8 +102,8 @@ object CassandraCQLTest { OutputColumnFamily) ConfigHelper.setOutputInitialAddress(job.getConfiguration(), cHost) ConfigHelper.setOutputRpcPort(job.getConfiguration(), cPort) - ConfigHelper - .setOutputPartitioner(job.getConfiguration(), "Murmur3Partitioner") + ConfigHelper.setOutputPartitioner(job.getConfiguration(), + "Murmur3Partitioner") val casRdd = sc.newAPIHadoopRDD(job.getConfiguration(), classOf[CqlPagingInputFormat], diff --git a/repos/spark/examples/src/main/scala/org/apache/spark/examples/CassandraTest.scala b/repos/spark/examples/src/main/scala/org/apache/spark/examples/CassandraTest.scala index aeaff40d10d..d207650366c 100644 --- a/repos/spark/examples/src/main/scala/org/apache/spark/examples/CassandraTest.scala +++ b/repos/spark/examples/src/main/scala/org/apache/spark/examples/CassandraTest.scala @@ -68,10 +68,12 @@ object CassandraTest { ConfigHelper.setInputRpcPort(job.getConfiguration(), port) ConfigHelper.setOutputInitialAddress(job.getConfiguration(), host) ConfigHelper.setOutputRpcPort(job.getConfiguration(), port) - ConfigHelper - .setInputColumnFamily(job.getConfiguration(), "casDemo", "Words") - ConfigHelper - .setOutputColumnFamily(job.getConfiguration(), "casDemo", "WordCount") + ConfigHelper.setInputColumnFamily(job.getConfiguration(), + "casDemo", + "Words") + ConfigHelper.setOutputColumnFamily(job.getConfiguration(), + "casDemo", + "WordCount") val predicate = new SlicePredicate() val sliceRange = new SliceRange() @@ -80,10 +82,10 @@ object CassandraTest { predicate.setSlice_range(sliceRange) ConfigHelper.setInputSlicePredicate(job.getConfiguration(), predicate) - ConfigHelper - .setInputPartitioner(job.getConfiguration(), "Murmur3Partitioner") - ConfigHelper - .setOutputPartitioner(job.getConfiguration(), "Murmur3Partitioner") + ConfigHelper.setInputPartitioner(job.getConfiguration(), + "Murmur3Partitioner") + ConfigHelper.setOutputPartitioner(job.getConfiguration(), + "Murmur3Partitioner") // Make a new Hadoop RDD val casRdd = sc.newAPIHadoopRDD(job.getConfiguration(), diff --git a/repos/spark/examples/src/main/scala/org/apache/spark/examples/ml/GBTExample.scala b/repos/spark/examples/src/main/scala/org/apache/spark/examples/ml/GBTExample.scala index 8557679fbe9..d479cbbbd02 100644 --- a/repos/spark/examples/src/main/scala/org/apache/spark/examples/ml/GBTExample.scala +++ b/repos/spark/examples/src/main/scala/org/apache/spark/examples/ml/GBTExample.scala @@ -245,18 +245,22 @@ object GBTExample { algo match { case "classification" => println("Training data results:") - DecisionTreeExample - .evaluateClassificationModel(pipelineModel, training, labelColName) + DecisionTreeExample.evaluateClassificationModel(pipelineModel, + training, + labelColName) println("Test data results:") - DecisionTreeExample - .evaluateClassificationModel(pipelineModel, test, labelColName) + DecisionTreeExample.evaluateClassificationModel(pipelineModel, + test, + labelColName) case "regression" => println("Training data results:") - DecisionTreeExample - .evaluateRegressionModel(pipelineModel, training, labelColName) + DecisionTreeExample.evaluateRegressionModel(pipelineModel, + training, + labelColName) println("Test data results:") - DecisionTreeExample - .evaluateRegressionModel(pipelineModel, test, labelColName) + DecisionTreeExample.evaluateRegressionModel(pipelineModel, + test, + labelColName) case _ => throw new IllegalArgumentException( "Algo ${params.algo} not supported.") diff --git a/repos/spark/examples/src/main/scala/org/apache/spark/examples/ml/LogisticRegressionExample.scala b/repos/spark/examples/src/main/scala/org/apache/spark/examples/ml/LogisticRegressionExample.scala index 411b7c9dca4..1178ae6be73 100644 --- a/repos/spark/examples/src/main/scala/org/apache/spark/examples/ml/LogisticRegressionExample.scala +++ b/repos/spark/examples/src/main/scala/org/apache/spark/examples/ml/LogisticRegressionExample.scala @@ -174,11 +174,13 @@ object LogisticRegressionExample { s"Weights: ${lorModel.coefficients} Intercept: ${lorModel.intercept}") println("Training data results:") - DecisionTreeExample - .evaluateClassificationModel(pipelineModel, training, "indexedLabel") + DecisionTreeExample.evaluateClassificationModel(pipelineModel, + training, + "indexedLabel") println("Test data results:") - DecisionTreeExample - .evaluateClassificationModel(pipelineModel, test, "indexedLabel") + DecisionTreeExample.evaluateClassificationModel(pipelineModel, + test, + "indexedLabel") sc.stop() } diff --git a/repos/spark/examples/src/main/scala/org/apache/spark/examples/ml/RandomForestExample.scala b/repos/spark/examples/src/main/scala/org/apache/spark/examples/ml/RandomForestExample.scala index 276fcf10eaf..3bc4e2f9ff7 100644 --- a/repos/spark/examples/src/main/scala/org/apache/spark/examples/ml/RandomForestExample.scala +++ b/repos/spark/examples/src/main/scala/org/apache/spark/examples/ml/RandomForestExample.scala @@ -256,18 +256,22 @@ object RandomForestExample { algo match { case "classification" => println("Training data results:") - DecisionTreeExample - .evaluateClassificationModel(pipelineModel, training, labelColName) + DecisionTreeExample.evaluateClassificationModel(pipelineModel, + training, + labelColName) println("Test data results:") - DecisionTreeExample - .evaluateClassificationModel(pipelineModel, test, labelColName) + DecisionTreeExample.evaluateClassificationModel(pipelineModel, + test, + labelColName) case "regression" => println("Training data results:") - DecisionTreeExample - .evaluateRegressionModel(pipelineModel, training, labelColName) + DecisionTreeExample.evaluateRegressionModel(pipelineModel, + training, + labelColName) println("Test data results:") - DecisionTreeExample - .evaluateRegressionModel(pipelineModel, test, labelColName) + DecisionTreeExample.evaluateRegressionModel(pipelineModel, + test, + labelColName) case _ => throw new IllegalArgumentException( "Algo ${params.algo} not supported.") diff --git a/repos/spark/examples/src/main/scala/org/apache/spark/examples/ml/SimpleParamsExample.scala b/repos/spark/examples/src/main/scala/org/apache/spark/examples/ml/SimpleParamsExample.scala index c0d858879ee..3e56482176e 100644 --- a/repos/spark/examples/src/main/scala/org/apache/spark/examples/ml/SimpleParamsExample.scala +++ b/repos/spark/examples/src/main/scala/org/apache/spark/examples/ml/SimpleParamsExample.scala @@ -69,10 +69,8 @@ object SimpleParamsExample { // We may alternatively specify parameters using a ParamMap, // which supports several methods for specifying parameters. val paramMap = ParamMap(lr.maxIter -> 20) - paramMap - .put(lr.maxIter, 30) // Specify 1 Param. This overwrites the original maxIter. - paramMap - .put(lr.regParam -> 0.1, lr.thresholds -> Array(0.45, 0.55)) // Specify multiple Params. + paramMap.put(lr.maxIter, 30) // Specify 1 Param. This overwrites the original maxIter. + paramMap.put(lr.regParam -> 0.1, lr.thresholds -> Array(0.45, 0.55)) // Specify multiple Params. // One can also combine ParamMaps. val paramMap2 = diff --git a/repos/spark/examples/src/main/scala/org/apache/spark/examples/ml/VectorSlicerExample.scala b/repos/spark/examples/src/main/scala/org/apache/spark/examples/ml/VectorSlicerExample.scala index c7969562ffc..1b90b27243a 100644 --- a/repos/spark/examples/src/main/scala/org/apache/spark/examples/ml/VectorSlicerExample.scala +++ b/repos/spark/examples/src/main/scala/org/apache/spark/examples/ml/VectorSlicerExample.scala @@ -47,8 +47,9 @@ object VectorSlicerExample { new AttributeGroup("userFeatures", attrs.asInstanceOf[Array[Attribute]]) val dataRDD = sc.parallelize(data) - val dataset = sqlContext - .createDataFrame(dataRDD, StructType(Array(attrGroup.toStructField()))) + val dataset = sqlContext.createDataFrame( + dataRDD, + StructType(Array(attrGroup.toStructField()))) val slicer = new VectorSlicer().setInputCol("userFeatures").setOutputCol("features") diff --git a/repos/spark/examples/src/main/scala/org/apache/spark/examples/mllib/BinaryClassificationMetricsExample.scala b/repos/spark/examples/src/main/scala/org/apache/spark/examples/mllib/BinaryClassificationMetricsExample.scala index c6ab0c8a6e2..89afaf8ebf2 100644 --- a/repos/spark/examples/src/main/scala/org/apache/spark/examples/mllib/BinaryClassificationMetricsExample.scala +++ b/repos/spark/examples/src/main/scala/org/apache/spark/examples/mllib/BinaryClassificationMetricsExample.scala @@ -34,8 +34,9 @@ object BinaryClassificationMetricsExample { val sc = new SparkContext(conf) // $example on$ // Load training data in LIBSVM format - val data = MLUtils - .loadLibSVMFile(sc, "data/mllib/sample_binary_classification_data.txt") + val data = MLUtils.loadLibSVMFile( + sc, + "data/mllib/sample_binary_classification_data.txt") // Split data into training (60%) and test (40%) val Array(training, test) = data.randomSplit(Array(0.6, 0.4), seed = 11L) diff --git a/repos/spark/examples/src/main/scala/org/apache/spark/examples/mllib/DecisionTreeClassificationExample.scala b/repos/spark/examples/src/main/scala/org/apache/spark/examples/mllib/DecisionTreeClassificationExample.scala index e3a71cc891f..798d94103ea 100644 --- a/repos/spark/examples/src/main/scala/org/apache/spark/examples/mllib/DecisionTreeClassificationExample.scala +++ b/repos/spark/examples/src/main/scala/org/apache/spark/examples/mllib/DecisionTreeClassificationExample.scala @@ -66,8 +66,9 @@ object DecisionTreeClassificationExample { // Save and load model model.save(sc, "target/tmp/myDecisionTreeClassificationModel") - val sameModel = DecisionTreeModel - .load(sc, "target/tmp/myDecisionTreeClassificationModel") + val sameModel = DecisionTreeModel.load( + sc, + "target/tmp/myDecisionTreeClassificationModel") // $example off$ } } diff --git a/repos/spark/examples/src/main/scala/org/apache/spark/examples/mllib/GradientBoostingClassificationExample.scala b/repos/spark/examples/src/main/scala/org/apache/spark/examples/mllib/GradientBoostingClassificationExample.scala index 8416735442b..93ac046477b 100644 --- a/repos/spark/examples/src/main/scala/org/apache/spark/examples/mllib/GradientBoostingClassificationExample.scala +++ b/repos/spark/examples/src/main/scala/org/apache/spark/examples/mllib/GradientBoostingClassificationExample.scala @@ -61,8 +61,9 @@ object GradientBoostingClassificationExample { // Save and load model model.save(sc, "target/tmp/myGradientBoostingClassificationModel") - val sameModel = GradientBoostedTreesModel - .load(sc, "target/tmp/myGradientBoostingClassificationModel") + val sameModel = GradientBoostedTreesModel.load( + sc, + "target/tmp/myGradientBoostingClassificationModel") // $example off$ } } diff --git a/repos/spark/examples/src/main/scala/org/apache/spark/examples/mllib/GradientBoostingRegressionExample.scala b/repos/spark/examples/src/main/scala/org/apache/spark/examples/mllib/GradientBoostingRegressionExample.scala index 4bc68299291..246a4942420 100644 --- a/repos/spark/examples/src/main/scala/org/apache/spark/examples/mllib/GradientBoostingRegressionExample.scala +++ b/repos/spark/examples/src/main/scala/org/apache/spark/examples/mllib/GradientBoostingRegressionExample.scala @@ -61,8 +61,9 @@ object GradientBoostingRegressionExample { // Save and load model model.save(sc, "target/tmp/myGradientBoostingRegressionModel") - val sameModel = GradientBoostedTreesModel - .load(sc, "target/tmp/myGradientBoostingRegressionModel") + val sameModel = GradientBoostedTreesModel.load( + sc, + "target/tmp/myGradientBoostingRegressionModel") // $example off$ } } diff --git a/repos/spark/examples/src/main/scala/org/apache/spark/examples/mllib/LinearRegressionWithSGDExample.scala b/repos/spark/examples/src/main/scala/org/apache/spark/examples/mllib/LinearRegressionWithSGDExample.scala index 3cb77393884..98a53ce7585 100644 --- a/repos/spark/examples/src/main/scala/org/apache/spark/examples/mllib/LinearRegressionWithSGDExample.scala +++ b/repos/spark/examples/src/main/scala/org/apache/spark/examples/mllib/LinearRegressionWithSGDExample.scala @@ -57,8 +57,9 @@ object LinearRegressionWithSGDExample { // Save and load model model.save(sc, "target/tmp/scalaLinearRegressionWithSGDModel") - val sameModel = LinearRegressionModel - .load(sc, "target/tmp/scalaLinearRegressionWithSGDModel") + val sameModel = LinearRegressionModel.load( + sc, + "target/tmp/scalaLinearRegressionWithSGDModel") // $example off$ sc.stop() diff --git a/repos/spark/examples/src/main/scala/org/apache/spark/examples/mllib/LogisticRegressionWithLBFGSExample.scala b/repos/spark/examples/src/main/scala/org/apache/spark/examples/mllib/LogisticRegressionWithLBFGSExample.scala index e5b6324d99f..7b2d57769fa 100644 --- a/repos/spark/examples/src/main/scala/org/apache/spark/examples/mllib/LogisticRegressionWithLBFGSExample.scala +++ b/repos/spark/examples/src/main/scala/org/apache/spark/examples/mllib/LogisticRegressionWithLBFGSExample.scala @@ -62,8 +62,9 @@ object LogisticRegressionWithLBFGSExample { // Save and load model model.save(sc, "target/tmp/scalaLogisticRegressionWithLBFGSModel") - val sameModel = LogisticRegressionModel - .load(sc, "target/tmp/scalaLogisticRegressionWithLBFGSModel") + val sameModel = LogisticRegressionModel.load( + sc, + "target/tmp/scalaLogisticRegressionWithLBFGSModel") // $example off$ sc.stop() diff --git a/repos/spark/examples/src/main/scala/org/apache/spark/examples/mllib/RandomForestClassificationExample.scala b/repos/spark/examples/src/main/scala/org/apache/spark/examples/mllib/RandomForestClassificationExample.scala index e7b01c8fb50..5b401c2eb22 100644 --- a/repos/spark/examples/src/main/scala/org/apache/spark/examples/mllib/RandomForestClassificationExample.scala +++ b/repos/spark/examples/src/main/scala/org/apache/spark/examples/mllib/RandomForestClassificationExample.scala @@ -67,8 +67,9 @@ object RandomForestClassificationExample { // Save and load model model.save(sc, "target/tmp/myRandomForestClassificationModel") - val sameModel = RandomForestModel - .load(sc, "target/tmp/myRandomForestClassificationModel") + val sameModel = RandomForestModel.load( + sc, + "target/tmp/myRandomForestClassificationModel") // $example off$ } } diff --git a/repos/spark/examples/src/main/scala/org/apache/spark/examples/mllib/SparseNaiveBayes.scala b/repos/spark/examples/src/main/scala/org/apache/spark/examples/mllib/SparseNaiveBayes.scala index b770b7e7896..742d5b158e9 100644 --- a/repos/spark/examples/src/main/scala/org/apache/spark/examples/mllib/SparseNaiveBayes.scala +++ b/repos/spark/examples/src/main/scala/org/apache/spark/examples/mllib/SparseNaiveBayes.scala @@ -80,8 +80,10 @@ object SparseNaiveBayes { if (params.minPartitions > 0) params.minPartitions else sc.defaultMinPartitions - val examples = MLUtils - .loadLibSVMFile(sc, params.input, params.numFeatures, minPartitions) + val examples = MLUtils.loadLibSVMFile(sc, + params.input, + params.numFeatures, + minPartitions) // Cache examples because it will be used in both training and evaluation. examples.cache() diff --git a/repos/spark/external/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/MySQLIntegrationSuite.scala b/repos/spark/external/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/MySQLIntegrationSuite.scala index 95c90f74d66..2e6afe92de0 100644 --- a/repos/spark/external/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/MySQLIntegrationSuite.scala +++ b/repos/spark/external/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/MySQLIntegrationSuite.scala @@ -166,14 +166,14 @@ class MySQLIntegrationSuite extends DockerJDBCIntegrationSuite { assert(rows(0).getString(4).equals("jumps")) assert(rows(0).getString(5).equals("over")) assert( - java.util.Arrays - .equals(rows(0).getAs[Array[Byte]](6), Array[Byte](116, 104, 101, 0))) + java.util.Arrays.equals(rows(0).getAs[Array[Byte]](6), + Array[Byte](116, 104, 101, 0))) assert( - java.util.Arrays - .equals(rows(0).getAs[Array[Byte]](7), Array[Byte](108, 97, 122, 121))) + java.util.Arrays.equals(rows(0).getAs[Array[Byte]](7), + Array[Byte](108, 97, 122, 121))) assert( - java.util.Arrays - .equals(rows(0).getAs[Array[Byte]](8), Array[Byte](100, 111, 103))) + java.util.Arrays.equals(rows(0).getAs[Array[Byte]](8), + Array[Byte](100, 111, 103))) } test("Basic write test") { diff --git a/repos/spark/external/kafka/src/main/scala/org/apache/spark/streaming/kafka/KafkaInputDStream.scala b/repos/spark/external/kafka/src/main/scala/org/apache/spark/streaming/kafka/KafkaInputDStream.scala index a33ba439b50..a7b0faeb71a 100644 --- a/repos/spark/external/kafka/src/main/scala/org/apache/spark/streaming/kafka/KafkaInputDStream.scala +++ b/repos/spark/external/kafka/src/main/scala/org/apache/spark/streaming/kafka/KafkaInputDStream.scala @@ -117,8 +117,9 @@ private[streaming] class KafkaReceiver[K: ClassTag, val topicMessageStreams = consumerConnector.createMessageStreams(topics, keyDecoder, valueDecoder) - val executorPool = ThreadUtils - .newDaemonFixedThreadPool(topics.values.sum, "KafkaMessageHandler") + val executorPool = ThreadUtils.newDaemonFixedThreadPool( + topics.values.sum, + "KafkaMessageHandler") try { // Start the messages handler for each partition topicMessageStreams.values.foreach { streams => diff --git a/repos/spark/external/kafka/src/main/scala/org/apache/spark/streaming/kafka/ReliableKafkaReceiver.scala b/repos/spark/external/kafka/src/main/scala/org/apache/spark/streaming/kafka/ReliableKafkaReceiver.scala index 66aaad7bf63..bbe52e644f1 100644 --- a/repos/spark/external/kafka/src/main/scala/org/apache/spark/streaming/kafka/ReliableKafkaReceiver.scala +++ b/repos/spark/external/kafka/src/main/scala/org/apache/spark/streaming/kafka/ReliableKafkaReceiver.scala @@ -143,8 +143,9 @@ private[streaming] class ReliableKafkaReceiver[K: ClassTag, consumerConfig.zkConnectionTimeoutMs, ZKStringSerializer) - messageHandlerThreadPool = ThreadUtils - .newDaemonFixedThreadPool(topics.values.sum, "KafkaMessageHandler") + messageHandlerThreadPool = ThreadUtils.newDaemonFixedThreadPool( + topics.values.sum, + "KafkaMessageHandler") blockGenerator.start() diff --git a/repos/spark/external/kinesis-asl/src/main/scala/org/apache/spark/streaming/kinesis/KinesisReceiver.scala b/repos/spark/external/kinesis-asl/src/main/scala/org/apache/spark/streaming/kinesis/KinesisReceiver.scala index f7beda43243..2606603475b 100644 --- a/repos/spark/external/kinesis-asl/src/main/scala/org/apache/spark/streaming/kinesis/KinesisReceiver.scala +++ b/repos/spark/external/kinesis-asl/src/main/scala/org/apache/spark/streaming/kinesis/KinesisReceiver.scala @@ -291,8 +291,9 @@ private[kinesis] class KinesisReceiver[T]( * for next block. Internally, this is synchronized with `rememberAddedRange()`. */ private def finalizeRangesForCurrentBlock(blockId: StreamBlockId): Unit = { - blockIdToSeqNumRanges - .put(blockId, SequenceNumberRanges(seqNumRangesInCurrentBlock.toArray)) + blockIdToSeqNumRanges.put( + blockId, + SequenceNumberRanges(seqNumRangesInCurrentBlock.toArray)) seqNumRangesInCurrentBlock.clear() logDebug(s"Generated block $blockId has $blockIdToSeqNumRanges") } diff --git a/repos/spark/external/kinesis-asl/src/main/scala/org/apache/spark/streaming/kinesis/KinesisRecordProcessor.scala b/repos/spark/external/kinesis-asl/src/main/scala/org/apache/spark/streaming/kinesis/KinesisRecordProcessor.scala index f706242fdac..980ab5e280d 100644 --- a/repos/spark/external/kinesis-asl/src/main/scala/org/apache/spark/streaming/kinesis/KinesisRecordProcessor.scala +++ b/repos/spark/external/kinesis-asl/src/main/scala/org/apache/spark/streaming/kinesis/KinesisRecordProcessor.scala @@ -137,8 +137,7 @@ private[kinesis] class KinesisRecordProcessor[T](receiver: KinesisReceiver[T], * This may lead to records being processed more than once. */ case _ => - receiver - .removeCheckpointer(shardId, null) // return null so that we don't checkpoint + receiver.removeCheckpointer(shardId, null) // return null so that we don't checkpoint } } } diff --git a/repos/spark/external/kinesis-asl/src/main/scala/org/apache/spark/streaming/kinesis/KinesisTestUtils.scala b/repos/spark/external/kinesis-asl/src/main/scala/org/apache/spark/streaming/kinesis/KinesisTestUtils.scala index 64c707a6e64..ee0fcb7b4ae 100644 --- a/repos/spark/external/kinesis-asl/src/main/scala/org/apache/spark/streaming/kinesis/KinesisTestUtils.scala +++ b/repos/spark/external/kinesis-asl/src/main/scala/org/apache/spark/streaming/kinesis/KinesisTestUtils.scala @@ -265,8 +265,9 @@ private[kinesis] class SimpleDataGenerator(client: AmazonKinesisClient) val putRecordResult = client.putRecord(putRecordRequest) val shardId = putRecordResult.getShardId val seqNumber = putRecordResult.getSequenceNumber() - val sentSeqNumbers = shardIdToSeqNumbers - .getOrElseUpdate(shardId, new ArrayBuffer[(Int, String)]()) + val sentSeqNumbers = + shardIdToSeqNumbers.getOrElseUpdate(shardId, + new ArrayBuffer[(Int, String)]()) sentSeqNumbers += ((num, seqNumber)) } diff --git a/repos/spark/external/kinesis-asl/src/test/scala/org/apache/spark/streaming/kinesis/KPLBasedKinesisTestUtils.scala b/repos/spark/external/kinesis-asl/src/test/scala/org/apache/spark/streaming/kinesis/KPLBasedKinesisTestUtils.scala index cc627af8c5a..c617f0e2076 100644 --- a/repos/spark/external/kinesis-asl/src/test/scala/org/apache/spark/streaming/kinesis/KPLBasedKinesisTestUtils.scala +++ b/repos/spark/external/kinesis-asl/src/test/scala/org/apache/spark/streaming/kinesis/KPLBasedKinesisTestUtils.scala @@ -68,8 +68,9 @@ private[kinesis] class KPLDataGenerator(regionName: String) override def onSuccess(result: UserRecordResult): Unit = { val shardId = result.getShardId val seqNumber = result.getSequenceNumber() - val sentSeqNumbers = shardIdToSeqNumbers - .getOrElseUpdate(shardId, new ArrayBuffer[(Int, String)]()) + val sentSeqNumbers = shardIdToSeqNumbers.getOrElseUpdate( + shardId, + new ArrayBuffer[(Int, String)]()) sentSeqNumbers += ((num, seqNumber)) } } diff --git a/repos/spark/external/kinesis-asl/src/test/scala/org/apache/spark/streaming/kinesis/KinesisBackedBlockRDDSuite.scala b/repos/spark/external/kinesis-asl/src/test/scala/org/apache/spark/streaming/kinesis/KinesisBackedBlockRDDSuite.scala index 0a43ed1a0fa..7958f5b1775 100644 --- a/repos/spark/external/kinesis-asl/src/test/scala/org/apache/spark/streaming/kinesis/KinesisBackedBlockRDDSuite.scala +++ b/repos/spark/external/kinesis-asl/src/test/scala/org/apache/spark/streaming/kinesis/KinesisBackedBlockRDDSuite.scala @@ -233,8 +233,9 @@ abstract class KinesisBackedBlockRDDTests(aggregateTestData: Boolean) val blockData = shardIdToData(shardIds(i)).iterator.map { _.toString.getBytes() } - blockManager - .putIterator(blockIds(i), blockData, StorageLevel.MEMORY_ONLY) + blockManager.putIterator(blockIds(i), + blockData, + StorageLevel.MEMORY_ONLY) } // Create the necessary ranges to use in the RDD diff --git a/repos/spark/external/kinesis-asl/src/test/scala/org/apache/spark/streaming/kinesis/KinesisReceiverSuite.scala b/repos/spark/external/kinesis-asl/src/test/scala/org/apache/spark/streaming/kinesis/KinesisReceiverSuite.scala index ad667f1fc61..6d783ce9c43 100644 --- a/repos/spark/external/kinesis-asl/src/test/scala/org/apache/spark/streaming/kinesis/KinesisReceiverSuite.scala +++ b/repos/spark/external/kinesis-asl/src/test/scala/org/apache/spark/streaming/kinesis/KinesisReceiverSuite.scala @@ -91,10 +91,10 @@ class KinesisReceiverSuite recordProcessor.processRecords(batch, checkpointerMock) verify(receiverMock, times(1)).isStopped() - verify(receiverMock, never) - .addRecords(anyString, anyListOf(classOf[Record])) - verify(receiverMock, never) - .setCheckpointer(anyString, meq(checkpointerMock)) + verify(receiverMock, never).addRecords(anyString, + anyListOf(classOf[Record])) + verify(receiverMock, never).setCheckpointer(anyString, + meq(checkpointerMock)) } test("shouldn't update checkpointer when exception occurs during store") { @@ -111,8 +111,8 @@ class KinesisReceiverSuite verify(receiverMock, times(1)).isStopped() verify(receiverMock, times(1)).addRecords(shardId, batch) - verify(receiverMock, never) - .setCheckpointer(anyString, meq(checkpointerMock)) + verify(receiverMock, never).setCheckpointer(anyString, + meq(checkpointerMock)) } test("shutdown should checkpoint if the reason is TERMINATE") { diff --git a/repos/spark/graphx/src/main/scala/org/apache/spark/graphx/impl/EdgePartition.scala b/repos/spark/graphx/src/main/scala/org/apache/spark/graphx/impl/EdgePartition.scala index a5fe70915c4..6511de7d716 100644 --- a/repos/spark/graphx/src/main/scala/org/apache/spark/graphx/impl/EdgePartition.scala +++ b/repos/spark/graphx/src/main/scala/org/apache/spark/graphx/impl/EdgePartition.scala @@ -304,8 +304,11 @@ private[graphx] class EdgePartition[@specialized(Char, } // Finally, release the last accumulated run if (size > 0) { - builder - .add(currSrcId, currDstId, currLocalSrcId, currLocalDstId, currAttr) + builder.add(currSrcId, + currDstId, + currLocalSrcId, + currLocalDstId, + currAttr) } builder.toEdgePartition } @@ -459,8 +462,13 @@ private[graphx] class EdgePartition[@specialized(Char, val dstAttr = if (tripletFields.useDst) vertexAttrs(localDstId) else null.asInstanceOf[VD] - ctx - .set(srcId, dstId, localSrcId, localDstId, srcAttr, dstAttr, data(i)) + ctx.set(srcId, + dstId, + localSrcId, + localDstId, + srcAttr, + dstAttr, + data(i)) sendMsg(ctx) } i += 1 diff --git a/repos/spark/graphx/src/main/scala/org/apache/spark/graphx/impl/GraphImpl.scala b/repos/spark/graphx/src/main/scala/org/apache/spark/graphx/impl/GraphImpl.scala index 4a5fd1ed220..fa5e367da39 100644 --- a/repos/spark/graphx/src/main/scala/org/apache/spark/graphx/impl/GraphImpl.scala +++ b/repos/spark/graphx/src/main/scala/org/apache/spark/graphx/impl/GraphImpl.scala @@ -161,8 +161,9 @@ class GraphImpl[VD: ClassTag, ED: ClassTag] protected ( f: (PartitionID, Iterator[EdgeTriplet[VD, ED]]) => Iterator[ED2], tripletFields: TripletFields): Graph[VD, ED2] = { vertices.cache() - replicatedVertexView - .upgrade(vertices, tripletFields.useSrc, tripletFields.useDst) + replicatedVertexView.upgrade(vertices, + tripletFields.useSrc, + tripletFields.useDst) val newEdges = replicatedVertexView.edges.mapEdgePartitions { (pid, part) => part.map( @@ -216,8 +217,9 @@ class GraphImpl[VD: ClassTag, ED: ClassTag] protected ( vertices.cache() // For each vertex, replicate its attribute only to partitions where it is // in the relevant position in an edge. - replicatedVertexView - .upgrade(vertices, tripletFields.useSrc, tripletFields.useDst) + replicatedVertexView.upgrade(vertices, + tripletFields.useSrc, + tripletFields.useDst) val view = activeSetOpt match { case Some((activeSet, _)) => replicatedVertexView.withActiveSet(activeSet) @@ -309,8 +311,9 @@ class GraphImpl[VD: ClassTag, ED: ClassTag] protected ( /** Test whether the closure accesses the attribute with name `attrName`. */ private def accessesVertexAttr(closure: AnyRef, attrName: String): Boolean = { try { - BytecodeUtils - .invokedMethod(closure, classOf[EdgeTriplet[VD, ED]], attrName) + BytecodeUtils.invokedMethod(closure, + classOf[EdgeTriplet[VD, ED]], + attrName) } catch { case _: ClassNotFoundException => true // if we don't know, be conservative diff --git a/repos/spark/graphx/src/test/scala/org/apache/spark/graphx/GraphSuite.scala b/repos/spark/graphx/src/test/scala/org/apache/spark/graphx/GraphSuite.scala index f5b6eff32ad..1e802310b4a 100644 --- a/repos/spark/graphx/src/test/scala/org/apache/spark/graphx/GraphSuite.scala +++ b/repos/spark/graphx/src/test/scala/org/apache/spark/graphx/GraphSuite.scala @@ -28,7 +28,8 @@ class GraphSuite extends SparkFunSuite with LocalSparkContext { def starGraph(sc: SparkContext, n: Int): Graph[String, Int] = { Graph.fromEdgeTuples( - sc.parallelize((1 to n).map(x => (0: VertexId, x: VertexId)), 3), "v") + sc.parallelize((1 to n).map(x => (0: VertexId, x: VertexId)), 3), + "v") } test("Graph.fromEdgeTuples") { @@ -40,8 +41,9 @@ class GraphSuite extends SparkFunSuite with LocalSparkContext { assert(graph.edges.collect().forall(e => e.attr == 1)) // uniqueEdges option should uniquify edges and store duplicate count in edge attributes - val uniqueGraph = Graph.fromEdgeTuples( - sc.parallelize(doubleRing), 1, Some(RandomVertexCut)) + val uniqueGraph = Graph.fromEdgeTuples(sc.parallelize(doubleRing), + 1, + Some(RandomVertexCut)) assert(uniqueGraph.edges.count() === ring.size) assert(uniqueGraph.edges.collect().forall(e => e.attr == 2)) } @@ -70,9 +72,9 @@ class GraphSuite extends SparkFunSuite with LocalSparkContext { assert(graph.vertices.count() === 100) graph.triplets.collect().map { et => assert( - (et.srcId < 10 && et.srcAttr) || (et.srcId >= 10 && !et.srcAttr)) + (et.srcId < 10 && et.srcAttr) || (et.srcId >= 10 && !et.srcAttr)) assert( - (et.dstId < 10 && et.dstAttr) || (et.dstId >= 10 && !et.dstAttr)) + (et.dstId < 10 && et.dstAttr) || (et.dstId >= 10 && !et.dstAttr)) } } } @@ -82,12 +84,12 @@ class GraphSuite extends SparkFunSuite with LocalSparkContext { val n = 5 val star = starGraph(sc, n) assert( - star.triplets - .map(et => (et.srcId, et.dstId, et.srcAttr, et.dstAttr)) - .collect() - .toSet === (1 to n) - .map(x => (0: VertexId, x: VertexId, "v", "v")) - .toSet) + star.triplets + .map(et => (et.srcId, et.dstId, et.srcAttr, et.dstAttr)) + .collect() + .toSet === (1 to n) + .map(x => (0: VertexId, x: VertexId, "v", "v")) + .toSet) } } @@ -110,18 +112,19 @@ class GraphSuite extends SparkFunSuite with LocalSparkContext { assert(nonemptyParts(mkGraph(edges)).count === 2) } // partitionBy(RandomVertexCut) puts identical edges in the same partition - assert(nonemptyParts(mkGraph(identicalEdges).partitionBy( - RandomVertexCut)).count === 1) + assert(nonemptyParts( + mkGraph(identicalEdges).partitionBy(RandomVertexCut)).count === 1) // partitionBy(EdgePartition1D) puts same-source edges in the same partition assert( - nonemptyParts(mkGraph(sameSrcEdges).partitionBy(EdgePartition1D)).count === 1) + nonemptyParts(mkGraph(sameSrcEdges).partitionBy(EdgePartition1D)).count === 1) // partitionBy(CanonicalRandomVertexCut) puts edges that are identical modulo direction into // the same partition - assert(nonemptyParts(mkGraph(canonicalEdges).partitionBy( - CanonicalRandomVertexCut)).count === 1) + assert( + nonemptyParts(mkGraph(canonicalEdges).partitionBy( + CanonicalRandomVertexCut)).count === 1) // partitionBy(EdgePartition2D) puts identical edges in the same partition - assert(nonemptyParts(mkGraph(identicalEdges).partitionBy( - EdgePartition2D)).count === 1) + assert(nonemptyParts( + mkGraph(identicalEdges).partitionBy(EdgePartition2D)).count === 1) // partitionBy(EdgePartition2D) ensures that vertices need only be replicated to 2 * sqrt(p) // partitions @@ -129,12 +132,13 @@ class GraphSuite extends SparkFunSuite with LocalSparkContext { val p = 100 val verts = 1 to n val graph = Graph.fromEdgeTuples( - sc.parallelize(verts.flatMap(x => - verts - .withFilter(y => y % x == 0) - .map(y => (x: VertexId, y: VertexId))), - p), - 0) + sc.parallelize(verts.flatMap( + x => + verts + .withFilter(y => y % x == 0) + .map(y => (x: VertexId, y: VertexId))), + p), + 0) assert(graph.edges.partitions.length === p) val partitionedGraph = graph.partitionBy(EdgePartition2D) assert(graph.edges.partitions.length === p) @@ -144,39 +148,43 @@ class GraphSuite extends SparkFunSuite with LocalSparkContext { iter => val part = iter.next()._2 Iterator( - (part.iterator.flatMap(e => Iterator(e.srcId, e.dstId))).toSet) + (part.iterator.flatMap(e => Iterator(e.srcId, e.dstId))).toSet) }.collect if (!verts.forall(id => partitionSets.count(_.contains(id)) <= bound)) { val numFailures = verts.count(id => partitionSets.count(_.contains(id)) > bound) val failure = verts.maxBy(id => partitionSets.count(_.contains(id))) fail( - ("Replication bound test failed for %d/%d vertices. " + - "Example: vertex %d replicated to %d (> %f) partitions.") - .format(numFailures, - n, - failure, - partitionSets.count(_.contains(failure)), - bound)) + ("Replication bound test failed for %d/%d vertices. " + + "Example: vertex %d replicated to %d (> %f) partitions.").format( + numFailures, + n, + failure, + partitionSets.count(_.contains(failure)), + bound)) } // This should not be true for the default hash partitioning val partitionSetsUnpartitioned = graph.edges.partitionsRDD.mapPartitions { iter => val part = iter.next()._2 Iterator( - (part.iterator.flatMap(e => Iterator(e.srcId, e.dstId))).toSet) + (part.iterator.flatMap(e => Iterator(e.srcId, e.dstId))).toSet) }.collect - assert(verts.exists( - id => partitionSetsUnpartitioned.count(_.contains(id)) > bound)) + assert(verts.exists(id => + partitionSetsUnpartitioned.count(_.contains(id)) > bound)) // Forming triplets view val g = Graph(sc.parallelize(List((0L, "a"), (1L, "b"), (2L, "c"))), sc.parallelize(List(Edge(0L, 1L, 1), Edge(0L, 2L, 1)), 2)) - assert(g.triplets.collect().map(_.toTuple).toSet === Set( - ((0L, "a"), (1L, "b"), 1), ((0L, "a"), (2L, "c"), 1))) + assert( + g.triplets.collect().map(_.toTuple).toSet === Set( + ((0L, "a"), (1L, "b"), 1), + ((0L, "a"), (2L, "c"), 1))) val gPart = g.partitionBy(EdgePartition2D) - assert(gPart.triplets.collect().map(_.toTuple).toSet === Set( - ((0L, "a"), (1L, "b"), 1), ((0L, "a"), (2L, "c"), 1))) + assert( + gPart.triplets.collect().map(_.toTuple).toSet === Set( + ((0L, "a"), (1L, "b"), 1), + ((0L, "a"), (2L, "c"), 1))) } } @@ -186,43 +194,49 @@ class GraphSuite extends SparkFunSuite with LocalSparkContext { val star = starGraph(sc, n) // mapVertices preserving type val mappedVAttrs = star.mapVertices((vid, attr) => attr + "2") - assert(mappedVAttrs.vertices.collect().toSet === (0 to n) - .map(x => (x: VertexId, "v2")) - .toSet) + assert( + mappedVAttrs.vertices.collect().toSet === (0 to n) + .map(x => (x: VertexId, "v2")) + .toSet) // mapVertices changing type val mappedVAttrs2 = star.mapVertices((vid, attr) => attr.length) - assert(mappedVAttrs2.vertices.collect().toSet === (0 to n) - .map(x => (x: VertexId, 1)) - .toSet) + assert( + mappedVAttrs2.vertices.collect().toSet === (0 to n) + .map(x => (x: VertexId, 1)) + .toSet) } } test("mapVertices changing type with same erased type") { withSpark { sc => - val vertices = sc.parallelize(Array[(Long, Option[java.lang.Integer])]( - (1L, Some(1)), - (2L, Some(2)), - (3L, Some(3)) - )) - val edges = sc.parallelize(Array( - Edge(1L, 2L, 0), - Edge(2L, 3L, 0), - Edge(3L, 1L, 0) - )) + val vertices = sc.parallelize( + Array[(Long, Option[java.lang.Integer])]( + (1L, Some(1)), + (2L, Some(2)), + (3L, Some(3)) + )) + val edges = sc.parallelize( + Array( + Edge(1L, 2L, 0), + Edge(2L, 3L, 0), + Edge(3L, 1L, 0) + )) val graph0 = Graph(vertices, edges) // Trigger initial vertex replication graph0.triplets.foreach(x => {}) // Change type of replicated vertices, but preserve erased type val graph1 = graph0.mapVertices { case (vid, integerOpt) => - integerOpt.map( - (x: java.lang.Integer) => x.toDouble: java.lang.Double) + integerOpt.map((x: java.lang.Integer) => + x.toDouble: java.lang.Double) } // Access replicated vertices, exposing the erased type val graph2 = graph1.mapTriplets(t => t.srcAttr.get) assert( - graph2.edges.map(_.attr).collect().toSet === Set[java.lang.Double]( - 1.0, 2.0, 3.0)) + graph2.edges.map(_.attr).collect().toSet === Set[java.lang.Double]( + 1.0, + 2.0, + 3.0)) } } @@ -242,11 +256,12 @@ class GraphSuite extends SparkFunSuite with LocalSparkContext { withSpark { sc => val n = 5 val star = starGraph(sc, n) - assert(star - .mapTriplets(et => et.srcAttr + et.dstAttr) - .edges - .collect() - .toSet === (1L to n).map(x => Edge(0, x, "vv")).toSet) + assert( + star + .mapTriplets(et => et.srcAttr + et.dstAttr) + .edges + .collect() + .toSet === (1L to n).map(x => Edge(0, x, "vv")).toSet) } } @@ -254,9 +269,10 @@ class GraphSuite extends SparkFunSuite with LocalSparkContext { withSpark { sc => val n = 5 val star = starGraph(sc, n) - assert(star.reverse.outDegrees.collect().toSet === (1 to n) - .map(x => (x: VertexId, 1)) - .toSet) + assert( + star.reverse.outDegrees.collect().toSet === (1 to n) + .map(x => (x: VertexId, 1)) + .toSet) } } @@ -267,7 +283,9 @@ class GraphSuite extends SparkFunSuite with LocalSparkContext { val edges: RDD[Edge[Int]] = sc.parallelize(Array(Edge(1L, 2L, 0))) val graph = Graph(vertices, edges).reverse val result = GraphXUtils.mapReduceTriplets[Int, Int, Int]( - graph, et => Iterator((et.dstId, et.srcAttr)), _ + _) + graph, + et => Iterator((et.dstId, et.srcAttr)), + _ + _) assert(result.collect().toSet === Set((1L, 2))) } } @@ -281,14 +299,16 @@ class GraphSuite extends SparkFunSuite with LocalSparkContext { val subgraph = star.subgraph(vpred = (vid, attr) => vid % 2 == 0) // We should have 5 vertices. - assert(subgraph.vertices.collect().toSet === (0 to n by 2) - .map(x => (x, "v")) - .toSet) + assert( + subgraph.vertices.collect().toSet === (0 to n by 2) + .map(x => (x, "v")) + .toSet) // And 4 edges. - assert(subgraph.edges.map(_.copy()).collect().toSet === (2 to n by 2) - .map(x => Edge(0, x, 1)) - .toSet) + assert( + subgraph.edges.map(_.copy()).collect().toSet === (2 to n by 2) + .map(x => Edge(0, x, 1)) + .toSet) } } @@ -301,8 +321,8 @@ class GraphSuite extends SparkFunSuite with LocalSparkContext { val subgraph = graph .subgraph( - e => e.dstId != 4L, - (vid, vdata) => vid != 3L + e => e.dstId != 4L, + (vid, vdata) => vid != 3L ) .mapVertices((vid, vdata) => -1) .mapEdges(e => -1) @@ -326,21 +346,22 @@ class GraphSuite extends SparkFunSuite with LocalSparkContext { val n = 5 val star = starGraph(sc, n) val doubleStar = Graph.fromEdgeTuples( - sc.parallelize((1 to n).flatMap(x => - List((0: VertexId, x: VertexId), - (0: VertexId, x: VertexId))), - 1), - "v") + sc.parallelize( + (1 to n).flatMap(x => + List((0: VertexId, x: VertexId), (0: VertexId, x: VertexId))), + 1), + "v") val star2 = doubleStar.groupEdges { (a, b) => a } - assert(star2.edges - .collect() - .toArray - .sorted(Edge.lexicographicOrdering[Int]) === star.edges - .collect() - .toArray - .sorted(Edge.lexicographicOrdering[Int])) + assert( + star2.edges + .collect() + .toArray + .sorted(Edge.lexicographicOrdering[Int]) === star.edges + .collect() + .toArray + .sorted(Edge.lexicographicOrdering[Int])) assert(star2.vertices.collect().toSet === star.vertices.collect().toSet) } } @@ -348,20 +369,16 @@ class GraphSuite extends SparkFunSuite with LocalSparkContext { test("aggregateMessages") { withSpark { sc => val n = 5 - val agg = starGraph(sc, n).aggregateMessages[String]( - ctx => - { - if (ctx.dstAttr != null) { - throw new Exception( - "expected ctx.dstAttr to be null due to TripletFields, but it was " + - ctx.dstAttr) - } - ctx.sendToDst(ctx.srcAttr) - }, - _ + _, - TripletFields.Src) + val agg = starGraph(sc, n).aggregateMessages[String](ctx => { + if (ctx.dstAttr != null) { + throw new Exception( + "expected ctx.dstAttr to be null due to TripletFields, but it was " + + ctx.dstAttr) + } + ctx.sendToDst(ctx.srcAttr) + }, _ + _, TripletFields.Src) assert( - agg.collect().toSet === (1 to n).map(x => (x: VertexId, "v")).toSet) + agg.collect().toSet === (1 to n).map(x => (x: VertexId, "v")).toSet) } } @@ -377,13 +394,13 @@ class GraphSuite extends SparkFunSuite with LocalSparkContext { } val neighborDegreeSums = GraphXUtils .mapReduceTriplets[Int, Int, Int]( - reverseStarDegrees, - et => Iterator((et.srcId, et.dstAttr), (et.dstId, et.srcAttr)), - (a: Int, b: Int) => a + b) + reverseStarDegrees, + et => Iterator((et.srcId, et.dstAttr), (et.dstId, et.srcAttr)), + (a: Int, b: Int) => a + b) .collect() .toSet - assert(neighborDegreeSums === Set((0: VertexId, n)) ++ (1 to n).map( - x => (x: VertexId, 0))) + assert(neighborDegreeSums === Set((0: VertexId, n)) ++ (1 to n).map(x => + (x: VertexId, 0))) // outerJoinVertices preserving type val messages = reverseStar.vertices.mapValues { (vid, attr) => vid.toString @@ -392,9 +409,10 @@ class GraphSuite extends SparkFunSuite with LocalSparkContext { (vid, a, bOpt) => a + bOpt.getOrElse("") } - assert(newReverseStar.vertices.map(_._2).collect().toSet === (0 to n) - .map(x => "v%d".format(x)) - .toSet) + assert( + newReverseStar.vertices.map(_._2).collect().toSet === (0 to n) + .map(x => "v%d".format(x)) + .toSet) } } @@ -408,8 +426,9 @@ class GraphSuite extends SparkFunSuite with LocalSparkContext { .map(et => (et.srcId, et.dstId, et.srcAttr, et.dstAttr)) .collect() .toSet - assert(triplets === Set((1: VertexId, 2: VertexId, "a", "b"), - (2: VertexId, 1: VertexId, "b", "a"))) + assert( + triplets === Set((1: VertexId, 2: VertexId, "a", "b"), + (2: VertexId, 1: VertexId, "b", "a"))) } } @@ -443,8 +462,11 @@ class GraphSuite extends SparkFunSuite with LocalSparkContext { val verts = sc.parallelize(List((1: VertexId, "a"), (2: VertexId, "b")), 1) val edges = sc.parallelize(List(Edge(1, 2, 0), Edge(2, 1, 0)), 2) - val graph = Graph( - verts, edges, "", StorageLevel.MEMORY_ONLY, StorageLevel.MEMORY_ONLY) + val graph = Graph(verts, + edges, + "", + StorageLevel.MEMORY_ONLY, + StorageLevel.MEMORY_ONLY) // Note: Before caching, graph.vertices is cached, but graph.edges is not (but graph.edges' // parent RDD is cached). graph.cache() @@ -462,11 +484,13 @@ class GraphSuite extends SparkFunSuite with LocalSparkContext { .set("spark.default.parallelism", defaultParallelism.toString) val sc = new SparkContext("local", "test", conf) try { - val edges = sc.parallelize( - (1 to n).map(x => (x: VertexId, 0: VertexId)), numEdgePartitions) + val edges = sc.parallelize((1 to n).map(x => (x: VertexId, 0: VertexId)), + numEdgePartitions) val graph = Graph.fromEdgeTuples(edges, 1) val neighborAttrSums = GraphXUtils.mapReduceTriplets[Int, Int, Int]( - graph, et => Iterator((et.dstId, et.srcAttr)), _ + _) + graph, + et => Iterator((et.dstId, et.srcAttr)), + _ + _) assert(neighborAttrSums.collect().toSet === Set((0: VertexId, n))) } finally { sc.stop() diff --git a/repos/spark/graphx/src/test/scala/org/apache/spark/graphx/util/GraphGeneratorsSuite.scala b/repos/spark/graphx/src/test/scala/org/apache/spark/graphx/util/GraphGeneratorsSuite.scala index f9069be7e0b..cf603fb47f7 100644 --- a/repos/spark/graphx/src/test/scala/org/apache/spark/graphx/util/GraphGeneratorsSuite.scala +++ b/repos/spark/graphx/src/test/scala/org/apache/spark/graphx/util/GraphGeneratorsSuite.scala @@ -46,17 +46,23 @@ class GraphGeneratorsSuite extends SparkFunSuite with LocalSparkContext { GraphGenerators.generateRandomEdges(src, numEdges20, maxVertexId) assert(edges20.length == numEdges20) - val edges10_round1 = GraphGenerators - .generateRandomEdges(src, numEdges10, maxVertexId, seed = 12345) - val edges10_round2 = GraphGenerators - .generateRandomEdges(src, numEdges10, maxVertexId, seed = 12345) + val edges10_round1 = GraphGenerators.generateRandomEdges(src, + numEdges10, + maxVertexId, + seed = 12345) + val edges10_round2 = GraphGenerators.generateRandomEdges(src, + numEdges10, + maxVertexId, + seed = 12345) assert(edges10_round1.zip(edges10_round2).forall { case (e1, e2) => e1.srcId == e2.srcId && e1.dstId == e2.dstId && e1.attr == e2.attr }) - val edges10_round3 = GraphGenerators - .generateRandomEdges(src, numEdges10, maxVertexId, seed = 3467) + val edges10_round3 = GraphGenerators.generateRandomEdges(src, + numEdges10, + maxVertexId, + seed = 3467) assert(!edges10_round1.zip(edges10_round3).forall { case (e1, e2) => e1.srcId == e2.srcId && e1.dstId == e2.dstId && e1.attr == e2.attr @@ -90,8 +96,10 @@ class GraphGeneratorsSuite extends SparkFunSuite with LocalSparkContext { val sigma = 1.3 val numVertices100 = 100 - val graph = GraphGenerators - .logNormalGraph(sc, numVertices100, mu = mu, sigma = sigma) + val graph = GraphGenerators.logNormalGraph(sc, + numVertices100, + mu = mu, + sigma = sigma) assert(graph.vertices.count() == numVertices100) val graph_round1 = GraphGenerators.logNormalGraph(sc, @@ -113,8 +121,11 @@ class GraphGeneratorsSuite extends SparkFunSuite with LocalSparkContext { e1.srcId == e2.srcId && e1.dstId == e2.dstId && e1.attr == e2.attr }) - val graph_round3 = GraphGenerators - .logNormalGraph(sc, numVertices100, mu = mu, sigma = sigma, seed = 567) + val graph_round3 = GraphGenerators.logNormalGraph(sc, + numVertices100, + mu = mu, + sigma = sigma, + seed = 567) val graph_round3_edges = graph_round3.edges.collect() diff --git a/repos/spark/mllib/src/main/scala/org/apache/spark/ml/Pipeline.scala b/repos/spark/mllib/src/main/scala/org/apache/spark/ml/Pipeline.scala index 20ab877e228..dafaddd606d 100644 --- a/repos/spark/mllib/src/main/scala/org/apache/spark/ml/Pipeline.scala +++ b/repos/spark/mllib/src/main/scala/org/apache/spark/ml/Pipeline.scala @@ -243,8 +243,10 @@ object Pipeline extends MLReadable[Pipeline] { val stageUids = stages.map(_.uid) val jsonParams = List( "stageUids" -> parse(compact(render(stageUids.toSeq)))) - DefaultParamsWriter - .saveMetadata(instance, path, sc, paramMap = Some(jsonParams)) + DefaultParamsWriter.saveMetadata(instance, + path, + sc, + paramMap = Some(jsonParams)) // Save stages val stagesDir = new Path(path, "stages").toString @@ -271,8 +273,10 @@ object Pipeline extends MLReadable[Pipeline] { (metadata.params \ "stageUids").extract[Seq[String]].toArray val stages: Array[PipelineStage] = stageUids.zipWithIndex.map { case (stageUid, idx) => - val stagePath = SharedReadWrite - .getStagePath(stageUid, idx, stageUids.length, stagesDir) + val stagePath = SharedReadWrite.getStagePath(stageUid, + idx, + stageUids.length, + stagesDir) DefaultParamsReader.loadParamsInstance[PipelineStage](stagePath, sc) } (metadata.uid, stages) diff --git a/repos/spark/mllib/src/main/scala/org/apache/spark/ml/attribute/AttributeGroup.scala b/repos/spark/mllib/src/main/scala/org/apache/spark/ml/attribute/AttributeGroup.scala index 8759b83bdf4..ccf94bfac04 100644 --- a/repos/spark/mllib/src/main/scala/org/apache/spark/ml/attribute/AttributeGroup.scala +++ b/repos/spark/mllib/src/main/scala/org/apache/spark/ml/attribute/AttributeGroup.scala @@ -144,8 +144,8 @@ class AttributeGroup private (val name: String, nominalMetadata.toArray) } if (binaryMetadata.nonEmpty) { - attrBldr - .putMetadataArray(AttributeType.Binary.name, binaryMetadata.toArray) + attrBldr.putMetadataArray(AttributeType.Binary.name, + binaryMetadata.toArray) } bldr.putMetadata(ATTRIBUTES, attrBldr.build()) bldr.putLong(NUM_ATTRIBUTES, attributes.get.length) diff --git a/repos/spark/mllib/src/main/scala/org/apache/spark/ml/classification/Classifier.scala b/repos/spark/mllib/src/main/scala/org/apache/spark/ml/classification/Classifier.scala index cb88ab61db4..2edd90f192a 100644 --- a/repos/spark/mllib/src/main/scala/org/apache/spark/ml/classification/Classifier.scala +++ b/repos/spark/mllib/src/main/scala/org/apache/spark/ml/classification/Classifier.scala @@ -109,8 +109,8 @@ abstract class ClassificationModel[ val predictRawUDF = udf { (features: Any) => predictRaw(features.asInstanceOf[FeaturesType]) } - outputData = outputData - .withColumn(getRawPredictionCol, predictRawUDF(col(getFeaturesCol))) + outputData = outputData.withColumn(getRawPredictionCol, + predictRawUDF(col(getFeaturesCol))) numColsOutput += 1 } if (getPredictionCol != "") { diff --git a/repos/spark/mllib/src/main/scala/org/apache/spark/ml/classification/LogisticRegression.scala b/repos/spark/mllib/src/main/scala/org/apache/spark/ml/classification/LogisticRegression.scala index b43e1b514a2..be08fe74bc6 100644 --- a/repos/spark/mllib/src/main/scala/org/apache/spark/ml/classification/LogisticRegression.scala +++ b/repos/spark/mllib/src/main/scala/org/apache/spark/ml/classification/LogisticRegression.scala @@ -816,8 +816,8 @@ private[classification] class MultiClassSummarizer extends Serializable { case (key, value) => val (counts: Long, weightSum: Double) = largeMap.distinctMap.getOrElse(key, (0L, 0.0)) - largeMap.distinctMap - .put(key, (counts + value._1, weightSum + value._2)) + largeMap.distinctMap.put(key, + (counts + value._1, weightSum + value._2)) } largeMap.totalInvalidCnt += smallMap.totalInvalidCnt largeMap diff --git a/repos/spark/mllib/src/main/scala/org/apache/spark/ml/classification/ProbabilisticClassifier.scala b/repos/spark/mllib/src/main/scala/org/apache/spark/ml/classification/ProbabilisticClassifier.scala index 7e2d58cf890..d4a6516a93d 100644 --- a/repos/spark/mllib/src/main/scala/org/apache/spark/ml/classification/ProbabilisticClassifier.scala +++ b/repos/spark/mllib/src/main/scala/org/apache/spark/ml/classification/ProbabilisticClassifier.scala @@ -119,8 +119,8 @@ abstract class ProbabilisticClassificationModel[ val predictRawUDF = udf { (features: Any) => predictRaw(features.asInstanceOf[FeaturesType]) } - outputData = outputData - .withColumn(getRawPredictionCol, predictRawUDF(col(getFeaturesCol))) + outputData = outputData.withColumn(getRawPredictionCol, + predictRawUDF(col(getFeaturesCol))) numColsOutput += 1 } if ($(probabilityCol).nonEmpty) { diff --git a/repos/spark/mllib/src/main/scala/org/apache/spark/ml/clustering/LDA.scala b/repos/spark/mllib/src/main/scala/org/apache/spark/ml/clustering/LDA.scala index 195cd079ba4..91ee61b0f83 100644 --- a/repos/spark/mllib/src/main/scala/org/apache/spark/ml/clustering/LDA.scala +++ b/repos/spark/mllib/src/main/scala/org/apache/spark/ml/clustering/LDA.scala @@ -274,8 +274,10 @@ private[clustering] trait LDAParams "subsamplingRate", "Fraction of the corpus" + " to be sampled and used in each iteration of mini-batch gradient descent, in range (0, 1].", - ParamValidators - .inRange(0.0, 1.0, lowerInclusive = false, upperInclusive = true)) + ParamValidators.inRange(0.0, + 1.0, + lowerInclusive = false, + upperInclusive = true)) /** @group getParam */ @Since("1.6.0") diff --git a/repos/spark/mllib/src/main/scala/org/apache/spark/ml/feature/Word2Vec.scala b/repos/spark/mllib/src/main/scala/org/apache/spark/ml/feature/Word2Vec.scala index a516bfc65b0..11ea4c3a170 100644 --- a/repos/spark/mllib/src/main/scala/org/apache/spark/ml/feature/Word2Vec.scala +++ b/repos/spark/mllib/src/main/scala/org/apache/spark/ml/feature/Word2Vec.scala @@ -106,8 +106,9 @@ private[feature] trait Word2VecBase * Validate and transform the input schema. */ protected def validateAndTransformSchema(schema: StructType): StructType = { - SchemaUtils - .checkColumnType(schema, $(inputCol), new ArrayType(StringType, true)) + SchemaUtils.checkColumnType(schema, + $(inputCol), + new ArrayType(StringType, true)) SchemaUtils.appendColumn(schema, $(outputCol), new VectorUDT) } } diff --git a/repos/spark/mllib/src/main/scala/org/apache/spark/ml/regression/DecisionTreeRegressor.scala b/repos/spark/mllib/src/main/scala/org/apache/spark/ml/regression/DecisionTreeRegressor.scala index d46e7fcf2cd..af488e28ad9 100644 --- a/repos/spark/mllib/src/main/scala/org/apache/spark/ml/regression/DecisionTreeRegressor.scala +++ b/repos/spark/mllib/src/main/scala/org/apache/spark/ml/regression/DecisionTreeRegressor.scala @@ -210,8 +210,8 @@ final class DecisionTreeRegressionModel private[ml] ( output.withColumn($(predictionCol), predictUDF(col($(featuresCol)))) } if (isDefined(varianceCol) && $(varianceCol).nonEmpty) { - output = output - .withColumn($(varianceCol), predictVarianceUDF(col($(featuresCol)))) + output = output.withColumn($(varianceCol), + predictVarianceUDF(col($(featuresCol)))) } output } diff --git a/repos/spark/mllib/src/main/scala/org/apache/spark/ml/tree/impl/GradientBoostedTrees.scala b/repos/spark/mllib/src/main/scala/org/apache/spark/ml/tree/impl/GradientBoostedTrees.scala index bea870e66a8..28e0e3a3e64 100644 --- a/repos/spark/mllib/src/main/scala/org/apache/spark/ml/tree/impl/GradientBoostedTrees.scala +++ b/repos/spark/mllib/src/main/scala/org/apache/spark/ml/tree/impl/GradientBoostedTrees.scala @@ -47,8 +47,10 @@ private[ml] object GradientBoostedTrees extends Logging { val algo = boostingStrategy.treeStrategy.algo algo match { case OldAlgo.Regression => - GradientBoostedTrees - .boost(input, input, boostingStrategy, validate = false) + GradientBoostedTrees.boost(input, + input, + boostingStrategy, + validate = false) case OldAlgo.Classification => // Map labels to -1, +1 so binary classification can be treated as regression. val remappedInput = @@ -82,8 +84,10 @@ private[ml] object GradientBoostedTrees extends Logging { val algo = boostingStrategy.treeStrategy.algo algo match { case OldAlgo.Regression => - GradientBoostedTrees - .boost(input, validationInput, boostingStrategy, validate = true) + GradientBoostedTrees.boost(input, + validationInput, + boostingStrategy, + validate = true) case OldAlgo.Classification => // Map labels to -1, +1 so binary classification can be treated as regression. val remappedInput = diff --git a/repos/spark/mllib/src/main/scala/org/apache/spark/ml/tree/impl/RandomForest.scala b/repos/spark/mllib/src/main/scala/org/apache/spark/ml/tree/impl/RandomForest.scala index 0b3cacee21c..3ce0a81d7fe 100644 --- a/repos/spark/mllib/src/main/scala/org/apache/spark/ml/tree/impl/RandomForest.scala +++ b/repos/spark/mllib/src/main/scala/org/apache/spark/ml/tree/impl/RandomForest.scala @@ -66,8 +66,10 @@ private[ml] object RandomForest extends Logging { timer.start("init") val retaggedInput = input.retag(classOf[LabeledPoint]) - val metadata = DecisionTreeMetadata - .buildMetadata(retaggedInput, strategy, numTrees, featureSubsetStrategy) + val metadata = DecisionTreeMetadata.buildMetadata(retaggedInput, + strategy, + numTrees, + featureSubsetStrategy) logDebug("algo = " + strategy.algo) logDebug("numTrees = " + numTrees) logDebug("seed = " + seed) @@ -168,8 +170,10 @@ private[ml] object RandomForest extends Logging { // Collect some nodes to split, and choose features for each node (if subsampling). // Each group of nodes may come from one or multiple trees, and at multiple levels. val (nodesForGroup, treeToNodeToIndexInfo) = - RandomForest - .selectNodesToSplit(nodeQueue, maxMemoryUsage, metadata, rng) + RandomForest.selectNodesToSplit(nodeQueue, + maxMemoryUsage, + metadata, + rng) // Sanity check (should never occur): assert( nodesForGroup.nonEmpty, @@ -283,8 +287,8 @@ private[ml] object RandomForest extends Logging { val featureSplits = splits(featureIndex) var splitIndex = 0 while (splitIndex < numSplits) { - if (featureSplits(splitIndex) - .shouldGoLeft(featureValue, featureSplits)) { + if (featureSplits(splitIndex).shouldGoLeft(featureValue, + featureSplits)) { agg.featureUpdate(leftNodeFeatureOffset, splitIndex, treePoint.label, diff --git a/repos/spark/mllib/src/main/scala/org/apache/spark/ml/tree/treeParams.scala b/repos/spark/mllib/src/main/scala/org/apache/spark/ml/tree/treeParams.scala index f57b23a5fdd..3ef341fcbbb 100644 --- a/repos/spark/mllib/src/main/scala/org/apache/spark/ml/tree/treeParams.scala +++ b/repos/spark/mllib/src/main/scala/org/apache/spark/ml/tree/treeParams.scala @@ -341,8 +341,10 @@ private[ml] trait TreeEnsembleParams extends DecisionTreeParams { this, "subsamplingRate", "Fraction of the training data used for learning each decision tree, in range (0, 1].", - ParamValidators - .inRange(0, 1, lowerInclusive = false, upperInclusive = true)) + ParamValidators.inRange(0, + 1, + lowerInclusive = false, + upperInclusive = true)) setDefault(subsamplingRate -> 1.0) diff --git a/repos/spark/mllib/src/main/scala/org/apache/spark/ml/tuning/CrossValidator.scala b/repos/spark/mllib/src/main/scala/org/apache/spark/ml/tuning/CrossValidator.scala index a50d57db802..2009af65082 100644 --- a/repos/spark/mllib/src/main/scala/org/apache/spark/ml/tuning/CrossValidator.scala +++ b/repos/spark/mllib/src/main/scala/org/apache/spark/ml/tuning/CrossValidator.scala @@ -292,8 +292,11 @@ object CrossValidator extends MLReadable[CrossValidator] { instance.numFolds.jsonEncode(instance.getNumFolds)), "estimatorParamMaps" -> parse(estimatorParamMapsJson) ) - DefaultParamsWriter - .saveMetadata(instance, path, sc, extraMetadata, Some(jsonParams)) + DefaultParamsWriter.saveMetadata(instance, + path, + sc, + extraMetadata, + Some(jsonParams)) val evaluatorPath = new Path(path, "evaluator").toString instance.getEvaluator.asInstanceOf[MLWritable].save(evaluatorPath) diff --git a/repos/spark/mllib/src/main/scala/org/apache/spark/mllib/api/python/PythonMLLibAPI.scala b/repos/spark/mllib/src/main/scala/org/apache/spark/mllib/api/python/PythonMLLibAPI.scala index 5d426b893b0..e8066b7f86f 100644 --- a/repos/spark/mllib/src/main/scala/org/apache/spark/mllib/api/python/PythonMLLibAPI.scala +++ b/repos/spark/mllib/src/main/scala/org/apache/spark/mllib/api/python/PythonMLLibAPI.scala @@ -106,8 +106,8 @@ private[python] class PythonMLLibAPI extends Serializable { data: JavaRDD[LabeledPoint], initialWeights: Vector): JList[Object] = { try { - val model = learner - .run(data.rdd.persist(StorageLevel.MEMORY_AND_DISK), initialWeights) + val model = learner.run(data.rdd.persist(StorageLevel.MEMORY_AND_DISK), + initialWeights) if (model.isInstanceOf[LogisticRegressionModel]) { val lrModel = model.asInstanceOf[LogisticRegressionModel] List(lrModel.weights, @@ -721,8 +721,8 @@ private[python] class PythonMLLibAPI extends Serializable { minInstancesPerNode = minInstancesPerNode, minInfoGain = minInfoGain) try { - DecisionTree - .train(data.rdd.persist(StorageLevel.MEMORY_AND_DISK), strategy) + DecisionTree.train(data.rdd.persist(StorageLevel.MEMORY_AND_DISK), + strategy) } finally { data.rdd.unpersist(blocking = false) } @@ -1114,8 +1114,12 @@ private[python] class PythonMLLibAPI extends Serializable { eps: Double, nparts: Int, intercept: Double): JavaRDD[LabeledPoint] = { - LinearDataGenerator - .generateLinearRDD(sc, nexamples, nfeatures, eps, nparts, intercept) + LinearDataGenerator.generateLinearRDD(sc, + nexamples, + nfeatures, + eps, + nparts, + intercept) } /** diff --git a/repos/spark/mllib/src/main/scala/org/apache/spark/mllib/clustering/LDAModel.scala b/repos/spark/mllib/src/main/scala/org/apache/spark/mllib/clustering/LDAModel.scala index acafcfa3f0c..17e45918413 100644 --- a/repos/spark/mllib/src/main/scala/org/apache/spark/mllib/clustering/LDAModel.scala +++ b/repos/spark/mllib/src/main/scala/org/apache/spark/mllib/clustering/LDAModel.scala @@ -542,8 +542,11 @@ object LocalLDAModel extends Loader[LocalLDAModel] { val model = (loadedClassName, loadedVersion) match { case (className, "1.0") if className == classNameV1_0 => - SaveLoadV1_0 - .load(sc, path, docConcentration, topicConcentration, gammaShape) + SaveLoadV1_0.load(sc, + path, + docConcentration, + topicConcentration, + gammaShape) case _ => throw new Exception( s"LocalLDAModel.load did not recognize model with (className, format version):" + diff --git a/repos/spark/mllib/src/main/scala/org/apache/spark/mllib/feature/Word2Vec.scala b/repos/spark/mllib/src/main/scala/org/apache/spark/mllib/feature/Word2Vec.scala index 50673114e7d..fb238b849b3 100644 --- a/repos/spark/mllib/src/main/scala/org/apache/spark/mllib/feature/Word2Vec.scala +++ b/repos/spark/mllib/src/main/scala/org/apache/spark/mllib/feature/Word2Vec.scala @@ -437,8 +437,11 @@ class Word2Vec extends Serializable with Logging { while (i < synAgg.length) { val index = synAgg(i)._1 if (index < vocabSize) { - Array - .copy(synAgg(i)._2, 0, syn0Global, index * vectorSize, vectorSize) + Array.copy(synAgg(i)._2, + 0, + syn0Global, + index * vectorSize, + vectorSize) } else { Array.copy(synAgg(i)._2, 0, @@ -628,8 +631,11 @@ object Word2VecModel extends Loader[Word2VecModel] { val wordVectors = new Array[Float](vectorSize * numWords) var i = 0 while (i < numWords) { - Array - .copy(model(wordList(i)), 0, wordVectors, i * vectorSize, vectorSize) + Array.copy(model(wordList(i)), + 0, + wordVectors, + i * vectorSize, + vectorSize) i += 1 } wordVectors diff --git a/repos/spark/mllib/src/main/scala/org/apache/spark/mllib/linalg/Matrices.scala b/repos/spark/mllib/src/main/scala/org/apache/spark/mllib/linalg/Matrices.scala index b6be6343725..9c0d3afc3dc 100644 --- a/repos/spark/mllib/src/main/scala/org/apache/spark/mllib/linalg/Matrices.scala +++ b/repos/spark/mllib/src/main/scala/org/apache/spark/mllib/linalg/Matrices.scala @@ -196,8 +196,8 @@ private[spark] class MatrixUDT extends UserDefinedType[Matrix] { row.setByte(0, 0) row.setInt(1, sm.numRows) row.setInt(2, sm.numCols) - row - .update(3, new GenericArrayData(sm.colPtrs.map(_.asInstanceOf[Any]))) + row.update(3, + new GenericArrayData(sm.colPtrs.map(_.asInstanceOf[Any]))) row.update( 4, new GenericArrayData(sm.rowIndices.map(_.asInstanceOf[Any]))) @@ -896,8 +896,9 @@ object SparseMatrix { while (entries.size < nnz) { entries += ((rng.nextInt(numRows), rng.nextInt(numCols))) } - SparseMatrix - .fromCOO(numRows, numCols, entries.map(v => (v._1, v._2, 1.0))) + SparseMatrix.fromCOO(numRows, + numCols, + entries.map(v => (v._1, v._2, 1.0))) } else { // selection-rejection method var idx = 0L diff --git a/repos/spark/mllib/src/main/scala/org/apache/spark/mllib/linalg/distributed/BlockMatrix.scala b/repos/spark/mllib/src/main/scala/org/apache/spark/mllib/linalg/distributed/BlockMatrix.scala index 1de2bdede0d..757fca04f69 100644 --- a/repos/spark/mllib/src/main/scala/org/apache/spark/mllib/linalg/distributed/BlockMatrix.scala +++ b/repos/spark/mllib/src/main/scala/org/apache/spark/mllib/linalg/distributed/BlockMatrix.scala @@ -495,15 +495,17 @@ class BlockMatrix @Since("1.3.0")( // Each block of A must be multiplied with the corresponding blocks in the columns of B. val flatA = blocks.flatMap { case ((blockRowIndex, blockColIndex), block) => - val destinations = leftDestinations - .getOrElse((blockRowIndex, blockColIndex), Set.empty) + val destinations = + leftDestinations.getOrElse((blockRowIndex, blockColIndex), + Set.empty) destinations.map(j => (j, (blockRowIndex, blockColIndex, block))) } // Each block of B must be multiplied with the corresponding blocks in each row of A. val flatB = other.blocks.flatMap { case ((blockRowIndex, blockColIndex), block) => - val destinations = rightDestinations - .getOrElse((blockRowIndex, blockColIndex), Set.empty) + val destinations = + rightDestinations.getOrElse((blockRowIndex, blockColIndex), + Set.empty) destinations.map(j => (j, (blockRowIndex, blockColIndex, block))) } val newBlocks = flatA diff --git a/repos/spark/mllib/src/main/scala/org/apache/spark/mllib/linalg/distributed/RowMatrix.scala b/repos/spark/mllib/src/main/scala/org/apache/spark/mllib/linalg/distributed/RowMatrix.scala index 9a5b3e323f5..459debe95cb 100644 --- a/repos/spark/mllib/src/main/scala/org/apache/spark/mllib/linalg/distributed/RowMatrix.scala +++ b/repos/spark/mllib/src/main/scala/org/apache/spark/mllib/linalg/distributed/RowMatrix.scala @@ -289,8 +289,11 @@ class RowMatrix @Since("1.0.0")(@Since("1.0.0") val rows: RDD[Vector], require( k < n, s"k must be smaller than n in dist-eigs mode but got k=$k and n=$n.") - EigenValueDecomposition - .symmetricEigs(multiplyGramianMatrixBy, n, k, tol, maxIter) + EigenValueDecomposition.symmetricEigs(multiplyGramianMatrixBy, + n, + k, + tol, + maxIter) } val sigmas: BDV[Double] = brzSqrt(sigmaSquares) diff --git a/repos/spark/mllib/src/main/scala/org/apache/spark/mllib/regression/IsotonicRegression.scala b/repos/spark/mllib/src/main/scala/org/apache/spark/mllib/regression/IsotonicRegression.scala index 0dbc4e23615..5a8bb6f410c 100644 --- a/repos/spark/mllib/src/main/scala/org/apache/spark/mllib/regression/IsotonicRegression.scala +++ b/repos/spark/mllib/src/main/scala/org/apache/spark/mllib/regression/IsotonicRegression.scala @@ -166,8 +166,11 @@ class IsotonicRegressionModel @Since("1.3.0")( @Since("1.4.0") override def save(sc: SparkContext, path: String): Unit = { - IsotonicRegressionModel.SaveLoadV1_0 - .save(sc, path, boundaries, predictions, isotonic) + IsotonicRegressionModel.SaveLoadV1_0.save(sc, + path, + boundaries, + predictions, + isotonic) } override protected def formatVersion: String = "1.0" diff --git a/repos/spark/mllib/src/main/scala/org/apache/spark/mllib/regression/Lasso.scala b/repos/spark/mllib/src/main/scala/org/apache/spark/mllib/regression/Lasso.scala index 943333c6604..c3fed998e26 100644 --- a/repos/spark/mllib/src/main/scala/org/apache/spark/mllib/regression/Lasso.scala +++ b/repos/spark/mllib/src/main/scala/org/apache/spark/mllib/regression/Lasso.scala @@ -51,8 +51,11 @@ class LassoModel @Since("1.1.0")( @Since("1.3.0") override def save(sc: SparkContext, path: String): Unit = { - GLMRegressionModel.SaveLoadV1_0 - .save(sc, path, this.getClass.getName, weights, intercept) + GLMRegressionModel.SaveLoadV1_0.save(sc, + path, + this.getClass.getName, + weights, + intercept) } override protected def formatVersion: String = "1.0" @@ -69,8 +72,10 @@ object LassoModel extends Loader[LassoModel] { (loadedClassName, version) match { case (className, "1.0") if className == classNameV1_0 => val numFeatures = RegressionModel.getNumFeatures(metadata) - val data = GLMRegressionModel.SaveLoadV1_0 - .loadData(sc, path, classNameV1_0, numFeatures) + val data = GLMRegressionModel.SaveLoadV1_0.loadData(sc, + path, + classNameV1_0, + numFeatures) new LassoModel(data.weights, data.intercept) case _ => throw new Exception( diff --git a/repos/spark/mllib/src/main/scala/org/apache/spark/mllib/regression/LinearRegression.scala b/repos/spark/mllib/src/main/scala/org/apache/spark/mllib/regression/LinearRegression.scala index e62d0e27329..be22b139c53 100644 --- a/repos/spark/mllib/src/main/scala/org/apache/spark/mllib/regression/LinearRegression.scala +++ b/repos/spark/mllib/src/main/scala/org/apache/spark/mllib/regression/LinearRegression.scala @@ -51,8 +51,11 @@ class LinearRegressionModel @Since("1.1.0")( @Since("1.3.0") override def save(sc: SparkContext, path: String): Unit = { - GLMRegressionModel.SaveLoadV1_0 - .save(sc, path, this.getClass.getName, weights, intercept) + GLMRegressionModel.SaveLoadV1_0.save(sc, + path, + this.getClass.getName, + weights, + intercept) } override protected def formatVersion: String = "1.0" @@ -70,8 +73,10 @@ object LinearRegressionModel extends Loader[LinearRegressionModel] { (loadedClassName, version) match { case (className, "1.0") if className == classNameV1_0 => val numFeatures = RegressionModel.getNumFeatures(metadata) - val data = GLMRegressionModel.SaveLoadV1_0 - .loadData(sc, path, classNameV1_0, numFeatures) + val data = GLMRegressionModel.SaveLoadV1_0.loadData(sc, + path, + classNameV1_0, + numFeatures) new LinearRegressionModel(data.weights, data.intercept) case _ => throw new Exception( diff --git a/repos/spark/mllib/src/main/scala/org/apache/spark/mllib/regression/RidgeRegression.scala b/repos/spark/mllib/src/main/scala/org/apache/spark/mllib/regression/RidgeRegression.scala index a35141d6750..d467fff4ea6 100644 --- a/repos/spark/mllib/src/main/scala/org/apache/spark/mllib/regression/RidgeRegression.scala +++ b/repos/spark/mllib/src/main/scala/org/apache/spark/mllib/regression/RidgeRegression.scala @@ -51,8 +51,11 @@ class RidgeRegressionModel @Since("1.1.0")( @Since("1.3.0") override def save(sc: SparkContext, path: String): Unit = { - GLMRegressionModel.SaveLoadV1_0 - .save(sc, path, this.getClass.getName, weights, intercept) + GLMRegressionModel.SaveLoadV1_0.save(sc, + path, + this.getClass.getName, + weights, + intercept) } override protected def formatVersion: String = "1.0" @@ -70,8 +73,10 @@ object RidgeRegressionModel extends Loader[RidgeRegressionModel] { (loadedClassName, version) match { case (className, "1.0") if className == classNameV1_0 => val numFeatures = RegressionModel.getNumFeatures(metadata) - val data = GLMRegressionModel.SaveLoadV1_0 - .loadData(sc, path, classNameV1_0, numFeatures) + val data = GLMRegressionModel.SaveLoadV1_0.loadData(sc, + path, + classNameV1_0, + numFeatures) new RidgeRegressionModel(data.weights, data.intercept) case _ => throw new Exception( diff --git a/repos/spark/mllib/src/main/scala/org/apache/spark/mllib/tree/GradientBoostedTrees.scala b/repos/spark/mllib/src/main/scala/org/apache/spark/mllib/tree/GradientBoostedTrees.scala index 6c76f34dcb3..3bbbc50a5ab 100644 --- a/repos/spark/mllib/src/main/scala/org/apache/spark/mllib/tree/GradientBoostedTrees.scala +++ b/repos/spark/mllib/src/main/scala/org/apache/spark/mllib/tree/GradientBoostedTrees.scala @@ -68,8 +68,10 @@ class GradientBoostedTrees @Since("1.2.0")( val algo = boostingStrategy.treeStrategy.algo algo match { case Regression => - GradientBoostedTrees - .boost(input, input, boostingStrategy, validate = false) + GradientBoostedTrees.boost(input, + input, + boostingStrategy, + validate = false) case Classification => // Map labels to -1, +1 so binary classification can be treated as regression. val remappedInput = @@ -110,8 +112,10 @@ class GradientBoostedTrees @Since("1.2.0")( val algo = boostingStrategy.treeStrategy.algo algo match { case Regression => - GradientBoostedTrees - .boost(input, validationInput, boostingStrategy, validate = true) + GradientBoostedTrees.boost(input, + validationInput, + boostingStrategy, + validate = true) case Classification => // Map labels to -1, +1 so binary classification can be treated as regression. val remappedInput = diff --git a/repos/spark/mllib/src/main/scala/org/apache/spark/mllib/tree/RandomForest.scala b/repos/spark/mllib/src/main/scala/org/apache/spark/mllib/tree/RandomForest.scala index 90f0aa46202..757749b5402 100644 --- a/repos/spark/mllib/src/main/scala/org/apache/spark/mllib/tree/RandomForest.scala +++ b/repos/spark/mllib/src/main/scala/org/apache/spark/mllib/tree/RandomForest.scala @@ -143,8 +143,10 @@ private class RandomForest(private val strategy: Strategy, timer.start("init") val retaggedInput = input.retag(classOf[LabeledPoint]) - val metadata = DecisionTreeMetadata - .buildMetadata(retaggedInput, strategy, numTrees, featureSubsetStrategy) + val metadata = DecisionTreeMetadata.buildMetadata(retaggedInput, + strategy, + numTrees, + featureSubsetStrategy) logDebug("algo = " + strategy.algo) logDebug("numTrees = " + numTrees) logDebug("seed = " + seed) @@ -245,8 +247,10 @@ private class RandomForest(private val strategy: Strategy, // Collect some nodes to split, and choose features for each node (if subsampling). // Each group of nodes may come from one or multiple trees, and at multiple levels. val (nodesForGroup, treeToNodeToIndexInfo) = - RandomForest - .selectNodesToSplit(nodeQueue, maxMemoryUsage, metadata, rng) + RandomForest.selectNodesToSplit(nodeQueue, + maxMemoryUsage, + metadata, + rng) // Sanity check (should never occur): assert( nodesForGroup.size > 0, @@ -556,8 +560,9 @@ object RandomForest extends Serializable with Logging { RandomForest.aggregateSizeForNode(metadata, featureSubset) * 8L if (memUsage + nodeMemUsage <= maxMemoryUsage) { nodeQueue.dequeue() - mutableNodesForGroup - .getOrElseUpdate(treeIndex, new mutable.ArrayBuffer[Node]()) += node + mutableNodesForGroup.getOrElseUpdate( + treeIndex, + new mutable.ArrayBuffer[Node]()) += node mutableTreeToNodeToIndexInfo.getOrElseUpdate( treeIndex, new mutable.HashMap[Int, NodeIndexInfo]())(node.id) = diff --git a/repos/spark/mllib/src/main/scala/org/apache/spark/mllib/tree/impl/DTStatsAggregator.scala b/repos/spark/mllib/src/main/scala/org/apache/spark/mllib/tree/impl/DTStatsAggregator.scala index 65e7074d1b3..a9171b31486 100644 --- a/repos/spark/mllib/src/main/scala/org/apache/spark/mllib/tree/impl/DTStatsAggregator.scala +++ b/repos/spark/mllib/src/main/scala/org/apache/spark/mllib/tree/impl/DTStatsAggregator.scala @@ -91,8 +91,8 @@ private[spark] class DTStatsAggregator(val metadata: DecisionTreeMetadata, */ def getImpurityCalculator(featureOffset: Int, binIndex: Int): ImpurityCalculator = { - impurityAggregator - .getCalculator(allStats, featureOffset + binIndex * statsSize) + impurityAggregator.getCalculator(allStats, + featureOffset + binIndex * statsSize) } /** diff --git a/repos/spark/mllib/src/main/scala/org/apache/spark/mllib/tree/model/treeEnsembleModels.scala b/repos/spark/mllib/src/main/scala/org/apache/spark/mllib/tree/model/treeEnsembleModels.scala index 00aa460b528..1648fd90a8b 100644 --- a/repos/spark/mllib/src/main/scala/org/apache/spark/mllib/tree/model/treeEnsembleModels.scala +++ b/repos/spark/mllib/src/main/scala/org/apache/spark/mllib/tree/model/treeEnsembleModels.scala @@ -68,8 +68,11 @@ class RandomForestModel @Since("1.2.0")( */ @Since("1.3.0") override def save(sc: SparkContext, path: String): Unit = { - TreeEnsembleModel.SaveLoadV1_0 - .save(sc, path, this, RandomForestModel.SaveLoadV1_0.thisClassName) + TreeEnsembleModel.SaveLoadV1_0.save( + sc, + path, + this, + RandomForestModel.SaveLoadV1_0.thisClassName) } override protected def formatVersion: String = diff --git a/repos/spark/mllib/src/test/scala/org/apache/spark/ml/feature/Word2VecSuite.scala b/repos/spark/mllib/src/test/scala/org/apache/spark/ml/feature/Word2VecSuite.scala index a356102858b..9beadc294f3 100644 --- a/repos/spark/mllib/src/test/scala/org/apache/spark/ml/feature/Word2VecSuite.scala +++ b/repos/spark/mllib/src/test/scala/org/apache/spark/ml/feature/Word2VecSuite.scala @@ -81,8 +81,9 @@ class Word2VecSuite // These expectations are just magic values, characterizing the current // behavior. The test needs to be updated to be more general, see SPARK-11502 - val magicExp = Vectors - .dense(0.30153007534417237, -0.6833061711354689, 0.5116530778733167) + val magicExp = Vectors.dense(0.30153007534417237, + -0.6833061711354689, + 0.5116530778733167) model.transform(docDF).select("result", "expected").collect().foreach { case Row(vector1: Vector, vector2: Vector) => assert(vector1 ~== magicExp absTol 1E-5, diff --git a/repos/spark/mllib/src/test/scala/org/apache/spark/ml/optim/IterativelyReweightedLeastSquaresSuite.scala b/repos/spark/mllib/src/test/scala/org/apache/spark/ml/optim/IterativelyReweightedLeastSquaresSuite.scala index c290f10597a..a1619d2ba15 100644 --- a/repos/spark/mllib/src/test/scala/org/apache/spark/ml/optim/IterativelyReweightedLeastSquaresSuite.scala +++ b/repos/spark/mllib/src/test/scala/org/apache/spark/ml/optim/IterativelyReweightedLeastSquaresSuite.scala @@ -102,8 +102,9 @@ class IterativelyReweightedLeastSquaresSuite regParam = 0.0, maxIter = 25, tol = 1e-8).fit(instances1) - val actual = Vectors - .dense(irls.intercept, irls.coefficients(0), irls.coefficients(1)) + val actual = Vectors.dense(irls.intercept, + irls.coefficients(0), + irls.coefficients(1)) assert(actual ~== expected(idx) absTol 1e-4) idx += 1 } @@ -147,8 +148,9 @@ class IterativelyReweightedLeastSquaresSuite regParam = 0.0, maxIter = 25, tol = 1e-8).fit(instances2) - val actual = Vectors - .dense(irls.intercept, irls.coefficients(0), irls.coefficients(1)) + val actual = Vectors.dense(irls.intercept, + irls.coefficients(0), + irls.coefficients(1)) assert(actual ~== expected(idx) absTol 1e-4) idx += 1 } @@ -188,8 +190,9 @@ class IterativelyReweightedLeastSquaresSuite regParam = 0.0, maxIter = 200, tol = 1e-7).fit(instances2) - val actual = Vectors - .dense(irls.intercept, irls.coefficients(0), irls.coefficients(1)) + val actual = Vectors.dense(irls.intercept, + irls.coefficients(0), + irls.coefficients(1)) assert(actual ~== expected(idx) absTol 1e-4) idx += 1 } diff --git a/repos/spark/mllib/src/test/scala/org/apache/spark/ml/optim/WeightedLeastSquaresSuite.scala b/repos/spark/mllib/src/test/scala/org/apache/spark/ml/optim/WeightedLeastSquaresSuite.scala index c2cc4688022..9c85b32c7aa 100644 --- a/repos/spark/mllib/src/test/scala/org/apache/spark/ml/optim/WeightedLeastSquaresSuite.scala +++ b/repos/spark/mllib/src/test/scala/org/apache/spark/ml/optim/WeightedLeastSquaresSuite.scala @@ -91,8 +91,9 @@ class WeightedLeastSquaresSuite regParam = 0.0, standardizeFeatures = standardization, standardizeLabel = standardization).fit(instances) - val actual = Vectors - .dense(wls.intercept, wls.coefficients(0), wls.coefficients(1)) + val actual = Vectors.dense(wls.intercept, + wls.coefficients(0), + wls.coefficients(1)) assert(actual ~== expected(idx) absTol 1e-4) } idx += 1 @@ -124,8 +125,9 @@ class WeightedLeastSquaresSuite regParam = 0.0, standardizeFeatures = standardization, standardizeLabel = standardization).fit(instancesConstLabel) - val actual = Vectors - .dense(wls.intercept, wls.coefficients(0), wls.coefficients(1)) + val actual = Vectors.dense(wls.intercept, + wls.coefficients(0), + wls.coefficients(1)) assert(actual ~== expected(idx) absTol 1e-4) } idx += 1 diff --git a/repos/spark/mllib/src/test/scala/org/apache/spark/ml/regression/GeneralizedLinearRegressionSuite.scala b/repos/spark/mllib/src/test/scala/org/apache/spark/ml/regression/GeneralizedLinearRegressionSuite.scala index cd622b67b1c..72e77cfb36e 100644 --- a/repos/spark/mllib/src/test/scala/org/apache/spark/ml/regression/GeneralizedLinearRegressionSuite.scala +++ b/repos/spark/mllib/src/test/scala/org/apache/spark/ml/regression/GeneralizedLinearRegressionSuite.scala @@ -344,8 +344,9 @@ class GeneralizedLinearRegressionSuite .setLink(link) .setFitIntercept(fitIntercept) val model = trainer.fit(dataset) - val actual = Vectors - .dense(model.intercept, model.coefficients(0), model.coefficients(1)) + val actual = Vectors.dense(model.intercept, + model.coefficients(0), + model.coefficients(1)) assert(actual ~= expected(idx) absTol 1e-4, "Model mismatch: GLM with gaussian family, " + s"$link link and fitIntercept = $fitIntercept.") @@ -409,8 +410,9 @@ class GeneralizedLinearRegressionSuite .setFitIntercept(fitIntercept) .setRegParam(regParam) val model = trainer.fit(datasetGaussianIdentity) - val actual = Vectors - .dense(model.intercept, model.coefficients(0), model.coefficients(1)) + val actual = Vectors.dense(model.intercept, + model.coefficients(0), + model.coefficients(1)) assert(actual ~= expected(idx) absTol 1e-4, "Model mismatch: GLM with gaussian family, " + s"fitIntercept = $fitIntercept and regParam = $regParam.") @@ -452,14 +454,23 @@ class GeneralizedLinearRegressionSuite */ val expected = Seq(Vectors.dense(0.0, -0.3560284, 1.3010002, -0.3570805, -0.7406762), - Vectors - .dense(2.8367406, -0.5896187, 0.8931655, -0.3925169, -0.7996989), + Vectors.dense(2.8367406, + -0.5896187, + 0.8931655, + -0.3925169, + -0.7996989), Vectors.dense(0.0, -0.2134390, 0.7800646, -0.2144267, -0.4438358), - Vectors - .dense(1.6995366, -0.3524694, 0.5332651, -0.2352985, -0.4780850), + Vectors.dense(1.6995366, + -0.3524694, + 0.5332651, + -0.2352985, + -0.4780850), Vectors.dense(0.0, -0.2832198, 0.8434144, -0.2524727, -0.5293452), - Vectors - .dense(1.5063590, -0.4038015, 0.6133664, -0.2687882, -0.5541758)) + Vectors.dense(1.5063590, + -0.4038015, + 0.6133664, + -0.2687882, + -0.5541758)) import GeneralizedLinearRegression._ @@ -555,8 +566,9 @@ class GeneralizedLinearRegressionSuite .setLink(link) .setFitIntercept(fitIntercept) val model = trainer.fit(dataset) - val actual = Vectors - .dense(model.intercept, model.coefficients(0), model.coefficients(1)) + val actual = Vectors.dense(model.intercept, + model.coefficients(0), + model.coefficients(1)) assert(actual ~= expected(idx) absTol 1e-4, "Model mismatch: GLM with poisson family, " + s"$link link and fitIntercept = $fitIntercept.") @@ -634,8 +646,9 @@ class GeneralizedLinearRegressionSuite .setLink(link) .setFitIntercept(fitIntercept) val model = trainer.fit(dataset) - val actual = Vectors - .dense(model.intercept, model.coefficients(0), model.coefficients(1)) + val actual = Vectors.dense(model.intercept, + model.coefficients(0), + model.coefficients(1)) assert(actual ~= expected(idx) absTol 1e-4, "Model mismatch: GLM with gamma family, " + s"$link link and fitIntercept = $fitIntercept.") diff --git a/repos/spark/mllib/src/test/scala/org/apache/spark/ml/tuning/ParamGridBuilderSuite.scala b/repos/spark/mllib/src/test/scala/org/apache/spark/ml/tuning/ParamGridBuilderSuite.scala index 27747b336b4..9fc93b6b900 100644 --- a/repos/spark/mllib/src/test/scala/org/apache/spark/ml/tuning/ParamGridBuilderSuite.scala +++ b/repos/spark/mllib/src/test/scala/org/apache/spark/ml/tuning/ParamGridBuilderSuite.scala @@ -51,8 +51,10 @@ class ParamGridBuilderSuite extends SparkFunSuite { .addGrid(maxIter, Array(10, 20)) .addGrid(inputCol, Array("input0", "input1")) .build() - val expected1 = mutable - .Set((10, "input0"), (20, "input0"), (10, "input1"), (20, "input1")) + val expected1 = mutable.Set((10, "input0"), + (20, "input0"), + (10, "input1"), + (20, "input1")) validateGrid(maps1, expected1) } } diff --git a/repos/spark/mllib/src/test/scala/org/apache/spark/mllib/classification/NaiveBayesSuite.scala b/repos/spark/mllib/src/test/scala/org/apache/spark/mllib/classification/NaiveBayesSuite.scala index ca8f2c409fb..e71abadb7dc 100644 --- a/repos/spark/mllib/src/test/scala/org/apache/spark/mllib/classification/NaiveBayesSuite.scala +++ b/repos/spark/mllib/src/test/scala/org/apache/spark/mllib/classification/NaiveBayesSuite.scala @@ -151,16 +151,22 @@ class NaiveBayesSuite extends SparkFunSuite with MLlibTestSparkContext { Array(0.10, 0.10, 0.70, 0.10) // label 2 ).map(_.map(math.log)) - val testData = NaiveBayesSuite - .generateNaiveBayesInput(pi, theta, nPoints, 42, Multinomial) + val testData = NaiveBayesSuite.generateNaiveBayesInput(pi, + theta, + nPoints, + 42, + Multinomial) val testRDD = sc.parallelize(testData, 2) testRDD.cache() val model = NaiveBayes.train(testRDD, 1.0, Multinomial) validateModelFit(pi, theta, model) - val validationData = NaiveBayesSuite - .generateNaiveBayesInput(pi, theta, nPoints, 17, Multinomial) + val validationData = NaiveBayesSuite.generateNaiveBayesInput(pi, + theta, + nPoints, + 17, + Multinomial) val validationRDD = sc.parallelize(validationData, 2) // Test prediction on RDD. @@ -242,16 +248,22 @@ class NaiveBayesSuite extends SparkFunSuite with MLlibTestSparkContext { 0.30) // label 2 ).map(_.map(math.log)) - val testData = NaiveBayesSuite - .generateNaiveBayesInput(pi, theta, nPoints, 45, Bernoulli) + val testData = NaiveBayesSuite.generateNaiveBayesInput(pi, + theta, + nPoints, + 45, + Bernoulli) val testRDD = sc.parallelize(testData, 2) testRDD.cache() val model = NaiveBayes.train(testRDD, 1.0, Bernoulli) validateModelFit(pi, theta, model) - val validationData = NaiveBayesSuite - .generateNaiveBayesInput(pi, theta, nPoints, 20, Bernoulli) + val validationData = NaiveBayesSuite.generateNaiveBayesInput(pi, + theta, + nPoints, + 20, + Bernoulli) val validationRDD = sc.parallelize(validationData, 2) // Test prediction on RDD. diff --git a/repos/spark/mllib/src/test/scala/org/apache/spark/mllib/classification/StreamingLogisticRegressionSuite.scala b/repos/spark/mllib/src/test/scala/org/apache/spark/mllib/classification/StreamingLogisticRegressionSuite.scala index 2db78a7e662..d983e488d96 100644 --- a/repos/spark/mllib/src/test/scala/org/apache/spark/mllib/classification/StreamingLogisticRegressionSuite.scala +++ b/repos/spark/mllib/src/test/scala/org/apache/spark/mllib/classification/StreamingLogisticRegressionSuite.scala @@ -57,8 +57,10 @@ class StreamingLogisticRegressionSuite // generate sequence of simulated data val numBatches = 20 val input = (0 until numBatches).map { i => - LogisticRegressionSuite - .generateLogisticInput(0.0, B, nPoints, 42 * (i + 1)) + LogisticRegressionSuite.generateLogisticInput(0.0, + B, + nPoints, + 42 * (i + 1)) } // apply model training to input stream @@ -87,8 +89,10 @@ class StreamingLogisticRegressionSuite // generate sequence of simulated data val numBatches = 20 val input = (0 until numBatches).map { i => - LogisticRegressionSuite - .generateLogisticInput(0.0, B, nPoints, 42 * (i + 1)) + LogisticRegressionSuite.generateLogisticInput(0.0, + B, + nPoints, + 42 * (i + 1)) } // create buffer to store intermediate fits @@ -127,8 +131,10 @@ class StreamingLogisticRegressionSuite // generate sequence of simulated data for testing val numBatches = 10 val testInput = (0 until numBatches).map { i => - LogisticRegressionSuite - .generateLogisticInput(0.0, B, nPoints, 42 * (i + 1)) + LogisticRegressionSuite.generateLogisticInput(0.0, + B, + nPoints, + 42 * (i + 1)) } // apply model predictions to test stream @@ -159,8 +165,10 @@ class StreamingLogisticRegressionSuite val numBatches = 10 val nPoints = 100 val testInput = (0 until numBatches).map { i => - LogisticRegressionSuite - .generateLogisticInput(0.0, 5.0, nPoints, 42 * (i + 1)) + LogisticRegressionSuite.generateLogisticInput(0.0, + 5.0, + nPoints, + 42 * (i + 1)) } // train and predict diff --git a/repos/spark/mllib/src/test/scala/org/apache/spark/mllib/linalg/MatricesSuite.scala b/repos/spark/mllib/src/test/scala/org/apache/spark/mllib/linalg/MatricesSuite.scala index fddcf57060c..e2afb8faef7 100644 --- a/repos/spark/mllib/src/test/scala/org/apache/spark/mllib/linalg/MatricesSuite.scala +++ b/repos/spark/mllib/src/test/scala/org/apache/spark/mllib/linalg/MatricesSuite.scala @@ -82,8 +82,11 @@ class MatricesSuite extends SparkFunSuite { } test("index in matrices incorrect input") { - val sm = Matrices - .sparse(3, 2, Array(0, 2, 3), Array(1, 2, 1), Array(0.0, 1.0, 2.0)) + val sm = Matrices.sparse(3, + 2, + Array(0, 2, 3), + Array(1, 2, 1), + Array(0.0, 1.0, 2.0)) val dm = Matrices.dense(3, 2, Array(0.0, 2.3, 1.4, 3.2, 1.0, 9.1)) Array(sm, dm).foreach { mat => intercept[IllegalArgumentException] { mat.index(4, 1) } @@ -452,8 +455,16 @@ class MatricesSuite extends SparkFunSuite { test("sprand") { val rng = mock[Random] when(rng.nextInt(4)).thenReturn(0, 1, 1, 3, 2, 2, 0, 1, 3, 0) - when(rng.nextDouble()) - .thenReturn(1.0, 2.0, 3.0, 4.0, 0.5, 0.6, 0.7, 0.8, 0.9, 1.0) + when(rng.nextDouble()).thenReturn(1.0, + 2.0, + 3.0, + 4.0, + 0.5, + 0.6, + 0.7, + 0.8, + 0.9, + 1.0) val mat = SparseMatrix.sprand(4, 4, 0.25, rng) assert(mat.numRows === 4) assert(mat.numCols === 4) @@ -511,8 +522,11 @@ class MatricesSuite extends SparkFunSuite { assert(dm1.numNonzeros === 3) assert(dm1.numActives === 6) - val sm1 = Matrices - .sparse(3, 2, Array(0, 2, 3), Array(0, 2, 1), Array(0.0, -1.2, 0.0)) + val sm1 = Matrices.sparse(3, + 2, + Array(0, 2, 3), + Array(0, 2, 1), + Array(0.0, -1.2, 0.0)) assert(sm1.numNonzeros === 1) assert(sm1.numActives === 3) } diff --git a/repos/spark/mllib/src/test/scala/org/apache/spark/mllib/linalg/VectorsSuite.scala b/repos/spark/mllib/src/test/scala/org/apache/spark/mllib/linalg/VectorsSuite.scala index 5ca1f9e1996..ed539c46977 100644 --- a/repos/spark/mllib/src/test/scala/org/apache/spark/mllib/linalg/VectorsSuite.scala +++ b/repos/spark/mllib/src/test/scala/org/apache/spark/mllib/linalg/VectorsSuite.scala @@ -321,8 +321,9 @@ class VectorsSuite extends SparkFunSuite with Logging { test("vector p-norm") { val dv = Vectors.dense(0.0, -1.2, 3.1, 0.0, -4.5, 1.9) - val sv = Vectors - .sparse(6, Seq((1, -1.2), (2, 3.1), (3, 0.0), (4, -4.5), (5, 1.9))) + val sv = + Vectors.sparse(6, + Seq((1, -1.2), (2, 3.1), (3, 0.0), (4, -4.5), (5, 1.9))) assert( Vectors.norm(dv, 1.0) ~== diff --git a/repos/spark/mllib/src/test/scala/org/apache/spark/mllib/random/RandomRDDsSuite.scala b/repos/spark/mllib/src/test/scala/org/apache/spark/mllib/random/RandomRDDsSuite.scala index be9be92edb8..82301dd0c12 100644 --- a/repos/spark/mllib/src/test/scala/org/apache/spark/mllib/random/RandomRDDsSuite.scala +++ b/repos/spark/mllib/src/test/scala/org/apache/spark/mllib/random/RandomRDDsSuite.scala @@ -161,8 +161,11 @@ class RandomRDDsSuite math.sqrt(poissonMean), 0.1) - val exponential = RandomRDDs - .exponentialRDD(sc, exponentialMean, size, numPartitions, seed) + val exponential = RandomRDDs.exponentialRDD(sc, + exponentialMean, + size, + numPartitions, + seed) testGeneratedRDD(exponential, size, numPartitions, @@ -170,8 +173,12 @@ class RandomRDDsSuite exponentialMean, 0.1) - val gamma = RandomRDDs - .gammaRDD(sc, gammaShape, gammaScale, size, numPartitions, seed) + val gamma = RandomRDDs.gammaRDD(sc, + gammaShape, + gammaScale, + size, + numPartitions, + seed) testGeneratedRDD(gamma, size, numPartitions, gammaMean, gammaStd, 0.1) } @@ -230,8 +237,12 @@ class RandomRDDsSuite math.sqrt(poissonMean), 0.1) - val exponential = RandomRDDs - .exponentialVectorRDD(sc, exponentialMean, rows, cols, parts, seed) + val exponential = RandomRDDs.exponentialVectorRDD(sc, + exponentialMean, + rows, + cols, + parts, + seed) testGeneratedVectorRDD(exponential, rows, cols, @@ -240,8 +251,13 @@ class RandomRDDsSuite exponentialMean, 0.1) - val gamma = RandomRDDs - .gammaVectorRDD(sc, gammaShape, gammaScale, rows, cols, parts, seed) + val gamma = RandomRDDs.gammaVectorRDD(sc, + gammaShape, + gammaScale, + rows, + cols, + parts, + seed) testGeneratedVectorRDD(gamma, rows, cols, diff --git a/repos/spark/mllib/src/test/scala/org/apache/spark/mllib/regression/LinearRegressionSuite.scala b/repos/spark/mllib/src/test/scala/org/apache/spark/mllib/regression/LinearRegressionSuite.scala index 4b699d57bbd..d79b41e8bb6 100644 --- a/repos/spark/mllib/src/test/scala/org/apache/spark/mllib/regression/LinearRegressionSuite.scala +++ b/repos/spark/mllib/src/test/scala/org/apache/spark/mllib/regression/LinearRegressionSuite.scala @@ -50,8 +50,10 @@ class LinearRegressionSuite extends SparkFunSuite with MLlibTestSparkContext { // Test if we can correctly learn Y = 3 + 10*X1 + 10*X2 test("linear regression") { val testRDD = sc - .parallelize(LinearDataGenerator - .generateLinearInput(3.0, Array(10.0, 10.0), 100, 42), + .parallelize(LinearDataGenerator.generateLinearInput(3.0, + Array(10.0, 10.0), + 100, + 42), 2) .cache() val linReg = new LinearRegressionWithSGD().setIntercept(true) @@ -81,8 +83,10 @@ class LinearRegressionSuite extends SparkFunSuite with MLlibTestSparkContext { // Test if we can correctly learn Y = 10*X1 + 10*X2 test("linear regression without intercept") { val testRDD = sc - .parallelize(LinearDataGenerator - .generateLinearInput(0.0, Array(10.0, 10.0), 100, 42), + .parallelize(LinearDataGenerator.generateLinearInput(0.0, + Array(10.0, 10.0), + 100, + 42), 2) .cache() val linReg = new LinearRegressionWithSGD().setIntercept(false) diff --git a/repos/spark/mllib/src/test/scala/org/apache/spark/mllib/regression/RidgeRegressionSuite.scala b/repos/spark/mllib/src/test/scala/org/apache/spark/mllib/regression/RidgeRegressionSuite.scala index 9fbb1197670..3cdb9635bd4 100644 --- a/repos/spark/mllib/src/test/scala/org/apache/spark/mllib/regression/RidgeRegressionSuite.scala +++ b/repos/spark/mllib/src/test/scala/org/apache/spark/mllib/regression/RidgeRegressionSuite.scala @@ -61,8 +61,11 @@ class RidgeRegressionSuite extends SparkFunSuite with MLlibTestSparkContext { val w = Array.fill(numFeatures)(random.nextDouble() - 0.5) // Use half of data for training and other half for validation - val data = LinearDataGenerator - .generateLinearInput(3.0, w, 2 * numExamples, 42, 10.0) + val data = LinearDataGenerator.generateLinearInput(3.0, + w, + 2 * numExamples, + 42, + 10.0) val testData = data.take(numExamples) val validationData = data.takeRight(numExamples) diff --git a/repos/spark/mllib/src/test/scala/org/apache/spark/mllib/regression/StreamingLinearRegressionSuite.scala b/repos/spark/mllib/src/test/scala/org/apache/spark/mllib/regression/StreamingLinearRegressionSuite.scala index f282b75e209..e2f16b130ad 100644 --- a/repos/spark/mllib/src/test/scala/org/apache/spark/mllib/regression/StreamingLinearRegressionSuite.scala +++ b/repos/spark/mllib/src/test/scala/org/apache/spark/mllib/regression/StreamingLinearRegressionSuite.scala @@ -68,8 +68,10 @@ class StreamingLinearRegressionSuite extends SparkFunSuite with TestSuiteBase { // generate sequence of simulated data val numBatches = 10 val input = (0 until numBatches).map { i => - LinearDataGenerator - .generateLinearInput(0.0, Array(10.0, 10.0), 100, 42 * (i + 1)) + LinearDataGenerator.generateLinearInput(0.0, + Array(10.0, 10.0), + 100, + 42 * (i + 1)) } // apply model training to input stream @@ -103,8 +105,10 @@ class StreamingLinearRegressionSuite extends SparkFunSuite with TestSuiteBase { // generate sequence of simulated data val numBatches = 10 val input = (0 until numBatches).map { i => - LinearDataGenerator - .generateLinearInput(0.0, Array(10.0), 100, 42 * (i + 1)) + LinearDataGenerator.generateLinearInput(0.0, + Array(10.0), + 100, + 42 * (i + 1)) } // create buffer to store intermediate fits @@ -140,8 +144,10 @@ class StreamingLinearRegressionSuite extends SparkFunSuite with TestSuiteBase { val numBatches = 10 val nPoints = 100 val testInput = (0 until numBatches).map { i => - LinearDataGenerator - .generateLinearInput(0.0, Array(10.0, 10.0), nPoints, 42 * (i + 1)) + LinearDataGenerator.generateLinearInput(0.0, + Array(10.0, 10.0), + nPoints, + 42 * (i + 1)) } // apply model predictions to test stream @@ -170,8 +176,10 @@ class StreamingLinearRegressionSuite extends SparkFunSuite with TestSuiteBase { val numBatches = 10 val nPoints = 100 val testInput = (0 until numBatches).map { i => - LinearDataGenerator - .generateLinearInput(0.0, Array(10.0, 10.0), nPoints, 42 * (i + 1)) + LinearDataGenerator.generateLinearInput(0.0, + Array(10.0, 10.0), + nPoints, + 42 * (i + 1)) } // train and predict diff --git a/repos/spark/mllib/src/test/scala/org/apache/spark/mllib/tree/DecisionTreeSuite.scala b/repos/spark/mllib/src/test/scala/org/apache/spark/mllib/tree/DecisionTreeSuite.scala index b6b5fa64462..7743450d187 100644 --- a/repos/spark/mllib/src/test/scala/org/apache/spark/mllib/tree/DecisionTreeSuite.scala +++ b/repos/spark/mllib/src/test/scala/org/apache/spark/mllib/tree/DecisionTreeSuite.scala @@ -130,8 +130,9 @@ class DecisionTreeSuite extends SparkFunSuite with MLlibTestSparkContext { 0, 0) val featureSamples = Array.fill(200000)(math.random) - val splits = DecisionTree - .findSplitsForContinuousFeature(featureSamples, fakeMetadata, 0) + val splits = DecisionTree.findSplitsForContinuousFeature(featureSamples, + fakeMetadata, + 0) assert(splits.length === 5) assert(fakeMetadata.numSplits(0) === 5) assert(fakeMetadata.numBins(0) === 6) @@ -158,8 +159,9 @@ class DecisionTreeSuite extends SparkFunSuite with MLlibTestSparkContext { 0) val featureSamples = Array(1, 1, 2, 2, 2, 2, 2, 2, 2, 2, 3).map(_.toDouble) - val splits = DecisionTree - .findSplitsForContinuousFeature(featureSamples, fakeMetadata, 0) + val splits = DecisionTree.findSplitsForContinuousFeature(featureSamples, + fakeMetadata, + 0) assert(splits.length === 3) // check returned splits are distinct assert(splits.distinct.length === splits.length) @@ -183,8 +185,9 @@ class DecisionTreeSuite extends SparkFunSuite with MLlibTestSparkContext { 0) val featureSamples = Array(2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 3, 4, 5).map(_.toDouble) - val splits = DecisionTree - .findSplitsForContinuousFeature(featureSamples, fakeMetadata, 0) + val splits = DecisionTree.findSplitsForContinuousFeature(featureSamples, + fakeMetadata, + 0) assert(splits.length === 2) assert(splits(0) === 2.0) assert(splits(1) === 3.0) @@ -208,8 +211,9 @@ class DecisionTreeSuite extends SparkFunSuite with MLlibTestSparkContext { 0) val featureSamples = Array(0, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2).map(_.toDouble) - val splits = DecisionTree - .findSplitsForContinuousFeature(featureSamples, fakeMetadata, 0) + val splits = DecisionTree.findSplitsForContinuousFeature(featureSamples, + fakeMetadata, + 0) assert(splits.length === 1) assert(splits(0) === 1.0) } diff --git a/repos/spark/mllib/src/test/scala/org/apache/spark/mllib/tree/RandomForestSuite.scala b/repos/spark/mllib/src/test/scala/org/apache/spark/mllib/tree/RandomForestSuite.scala index d7411e333a0..7b59d28bebf 100644 --- a/repos/spark/mllib/src/test/scala/org/apache/spark/mllib/tree/RandomForestSuite.scala +++ b/repos/spark/mllib/src/test/scala/org/apache/spark/mllib/tree/RandomForestSuite.scala @@ -149,8 +149,10 @@ class RandomForestSuite extends SparkFunSuite with MLlibTestSparkContext { numFeaturesPerNode: Int): Unit = { val seeds = Array(123, 5354, 230, 349867, 23987) val maxMemoryUsage: Long = 128 * 1024L * 1024L - val metadata = DecisionTreeMetadata - .buildMetadata(rdd, strategy, numTrees, featureSubsetStrategy) + val metadata = DecisionTreeMetadata.buildMetadata(rdd, + strategy, + numTrees, + featureSubsetStrategy) seeds.foreach { seed => val failString = s"Failed on test with:" + @@ -167,8 +169,10 @@ class RandomForestSuite extends SparkFunSuite with MLlibTestSparkContext { nodesForGroup: Map[Int, Array[Node]], treeToNodeToIndexInfo: Map[Int, Map[Int, RandomForest.NodeIndexInfo]]) = - RandomForest - .selectNodesToSplit(nodeQueue, maxMemoryUsage, metadata, rng) + RandomForest.selectNodesToSplit(nodeQueue, + maxMemoryUsage, + metadata, + rng) assert(nodesForGroup.size === numTrees, failString) assert(nodesForGroup.values.forall(_.size == 1), failString) // 1 node per tree diff --git a/repos/spark/mllib/src/test/scala/org/apache/spark/mllib/tree/impl/BaggedPointSuite.scala b/repos/spark/mllib/src/test/scala/org/apache/spark/mllib/tree/impl/BaggedPointSuite.scala index ef19f23e121..110d944c707 100644 --- a/repos/spark/mllib/src/test/scala/org/apache/spark/mllib/tree/impl/BaggedPointSuite.scala +++ b/repos/spark/mllib/src/test/scala/org/apache/spark/mllib/tree/impl/BaggedPointSuite.scala @@ -66,8 +66,11 @@ class BaggedPointSuite extends SparkFunSuite with MLlibTestSparkContext { val arr = EnsembleTestHelper.generateOrderedLabeledPoints(1, 1000) val rdd = sc.parallelize(arr) seeds.foreach { seed => - val baggedRDD = BaggedPoint - .convertToBaggedRDD(rdd, subsample, numSubsamples, true, seed) + val baggedRDD = BaggedPoint.convertToBaggedRDD(rdd, + subsample, + numSubsamples, + true, + seed) val subsampleCounts: Array[Array[Double]] = baggedRDD.map(_.subsampleWeights).collect() EnsembleTestHelper.testRandomArrays(subsampleCounts, @@ -110,8 +113,11 @@ class BaggedPointSuite extends SparkFunSuite with MLlibTestSparkContext { val arr = EnsembleTestHelper.generateOrderedLabeledPoints(1, 1000) val rdd = sc.parallelize(arr) seeds.foreach { seed => - val baggedRDD = BaggedPoint - .convertToBaggedRDD(rdd, subsample, numSubsamples, false, seed) + val baggedRDD = BaggedPoint.convertToBaggedRDD(rdd, + subsample, + numSubsamples, + false, + seed) val subsampleCounts: Array[Array[Double]] = baggedRDD.map(_.subsampleWeights).collect() EnsembleTestHelper.testRandomArrays(subsampleCounts, diff --git a/repos/spark/repl/scala-2.10/src/main/scala/org/apache/spark/repl/SparkILoopInit.scala b/repos/spark/repl/scala-2.10/src/main/scala/org/apache/spark/repl/SparkILoopInit.scala index 132d5de486d..d97215e1d49 100644 --- a/repos/spark/repl/scala-2.10/src/main/scala/org/apache/spark/repl/SparkILoopInit.scala +++ b/repos/spark/repl/scala-2.10/src/main/scala/org/apache/spark/repl/SparkILoopInit.scala @@ -31,8 +31,9 @@ private[repl] trait SparkILoopInit { self: SparkILoop => /_/ """.format(SPARK_VERSION)) import Properties._ - val welcomeMsg = "Using Scala %s (%s, Java %s)" - .format(versionString, javaVmName, javaVersion) + val welcomeMsg = "Using Scala %s (%s, Java %s)".format(versionString, + javaVmName, + javaVersion) echo(welcomeMsg) echo("Type in expressions to have them evaluated.") echo("Type :help for more information.") diff --git a/repos/spark/repl/scala-2.10/src/main/scala/org/apache/spark/repl/SparkIMain.scala b/repos/spark/repl/scala-2.10/src/main/scala/org/apache/spark/repl/SparkIMain.scala index ccd6396dd26..fd157e924f0 100644 --- a/repos/spark/repl/scala-2.10/src/main/scala/org/apache/spark/repl/SparkIMain.scala +++ b/repos/spark/repl/scala-2.10/src/main/scala/org/apache/spark/repl/SparkIMain.scala @@ -938,8 +938,10 @@ class SparkIMain(initialSettings: Settings, IR.Error case Right(_) => - val line = "%sval %s = %s.value" - .format(modifiers map (_ + " ") mkString, name, bindRep.evalPath) + val line = "%sval %s = %s.value".format( + modifiers map (_ + " ") mkString, + name, + bindRep.evalPath) logDebug("Interpreting: " + line) interpret(line) } diff --git a/repos/spark/repl/scala-2.10/src/main/scala/org/apache/spark/repl/SparkMemberHandlers.scala b/repos/spark/repl/scala-2.10/src/main/scala/org/apache/spark/repl/SparkMemberHandlers.scala index 796f1df35a2..62274a9e1d1 100644 --- a/repos/spark/repl/scala-2.10/src/main/scala/org/apache/spark/repl/SparkMemberHandlers.scala +++ b/repos/spark/repl/scala-2.10/src/main/scala/org/apache/spark/repl/SparkMemberHandlers.scala @@ -161,8 +161,9 @@ private[repl] trait SparkMemberHandlers { override def resultExtractionCode(req: Request) = { val lhsType = string2code(req lookupTypeOf name) val res = string2code(req fullPath name) - """ + "%s: %s = " + %s + "\n" """ - .format(string2code(lhs.toString), lhsType, res) + "\n" + """ + "%s: %s = " + %s + "\n" """.format(string2code(lhs.toString), + lhsType, + res) + "\n" } } diff --git a/repos/spark/repl/scala-2.11/src/main/scala/org/apache/spark/repl/SparkILoop.scala b/repos/spark/repl/scala-2.11/src/main/scala/org/apache/spark/repl/SparkILoop.scala index 7f388641bfa..8378d83148b 100644 --- a/repos/spark/repl/scala-2.11/src/main/scala/org/apache/spark/repl/SparkILoop.scala +++ b/repos/spark/repl/scala-2.11/src/main/scala/org/apache/spark/repl/SparkILoop.scala @@ -69,8 +69,9 @@ class SparkILoop(in0: Option[BufferedReader], out: JPrintWriter) /___/ .__/\_,_/_/ /_/\_\ version %s /_/ """.format(SPARK_VERSION)) - val welcomeMsg = "Using Scala %s (%s, Java %s)" - .format(versionString, javaVmName, javaVersion) + val welcomeMsg = "Using Scala %s (%s, Java %s)".format(versionString, + javaVmName, + javaVersion) echo(welcomeMsg) echo("Type in expressions to have them evaluated.") echo("Type :help for more information.") diff --git a/repos/spark/repl/src/main/scala/org/apache/spark/repl/ExecutorClassLoader.scala b/repos/spark/repl/src/main/scala/org/apache/spark/repl/ExecutorClassLoader.scala index a3de4cba31a..04dac0ad069 100644 --- a/repos/spark/repl/src/main/scala/org/apache/spark/repl/ExecutorClassLoader.scala +++ b/repos/spark/repl/src/main/scala/org/apache/spark/repl/ExecutorClassLoader.scala @@ -129,8 +129,9 @@ class ExecutorClassLoader(conf: SparkConf, val url = if (SparkEnv.get.securityManager.isAuthenticationEnabled()) { val uri = new URI(classUri + "/" + urlEncode(pathInDirectory)) - val newuri = Utils - .constructURIForAuthentication(uri, SparkEnv.get.securityManager) + val newuri = Utils.constructURIForAuthentication( + uri, + SparkEnv.get.securityManager) newuri.toURL } else { new URL(classUri + "/" + urlEncode(pathInDirectory)) diff --git a/repos/spark/sql/catalyst/src/main/scala/org/apache/spark/sql/Encoder.scala b/repos/spark/sql/catalyst/src/main/scala/org/apache/spark/sql/Encoder.scala index 19797a0ccff..3d286e7afce 100644 --- a/repos/spark/sql/catalyst/src/main/scala/org/apache/spark/sql/Encoder.scala +++ b/repos/spark/sql/catalyst/src/main/scala/org/apache/spark/sql/Encoder.scala @@ -292,8 +292,10 @@ object Encoders { e2: Encoder[T2], e3: Encoder[T3], e4: Encoder[T4]): Encoder[(T1, T2, T3, T4)] = { - ExpressionEncoder - .tuple(encoderFor(e1), encoderFor(e2), encoderFor(e3), encoderFor(e4)) + ExpressionEncoder.tuple(encoderFor(e1), + encoderFor(e2), + encoderFor(e3), + encoderFor(e4)) } /** diff --git a/repos/spark/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/ScalaReflection.scala b/repos/spark/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/ScalaReflection.scala index c0e67677b74..30d8471393c 100644 --- a/repos/spark/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/ScalaReflection.scala +++ b/repos/spark/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/ScalaReflection.scala @@ -516,11 +516,11 @@ object ScalaReflection extends ScalaReflection { } val unwrapped = UnwrapOption(optionObjectType, inputObject) - expressions.If( - IsNull(unwrapped), - expressions.Literal - .create(null, silentSchemaFor(optType).dataType), - extractorFor(unwrapped, optType, newPath)) + expressions.If(IsNull(unwrapped), + expressions.Literal.create( + null, + silentSchemaFor(optType).dataType), + extractorFor(unwrapped, optType, newPath)) } case t if t <:< localTypeOf[Product] => diff --git a/repos/spark/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/DecimalPrecision.scala b/repos/spark/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/DecimalPrecision.scala index 2f978208377..bf8729846f6 100644 --- a/repos/spark/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/DecimalPrecision.scala +++ b/repos/spark/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/DecimalPrecision.scala @@ -96,15 +96,15 @@ object DecimalPrecision extends Rule[LogicalPlan] { case Add(e1 @ DecimalType.Expression(p1, s1), e2 @ DecimalType.Expression(p2, s2)) => - val dt = DecimalType - .bounded(max(s1, s2) + max(p1 - s1, p2 - s2) + 1, max(s1, s2)) + val dt = DecimalType.bounded(max(s1, s2) + max(p1 - s1, p2 - s2) + 1, + max(s1, s2)) CheckOverflow(Add(promotePrecision(e1, dt), promotePrecision(e2, dt)), dt) case Subtract(e1 @ DecimalType.Expression(p1, s1), e2 @ DecimalType.Expression(p2, s2)) => - val dt = DecimalType - .bounded(max(s1, s2) + max(p1 - s1, p2 - s2) + 1, max(s1, s2)) + val dt = DecimalType.bounded(max(s1, s2) + max(p1 - s1, p2 - s2) + 1, + max(s1, s2)) CheckOverflow( Subtract(promotePrecision(e1, dt), promotePrecision(e2, dt)), dt) diff --git a/repos/spark/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalog.scala b/repos/spark/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalog.scala index eaa22ae0079..1aed85cc423 100644 --- a/repos/spark/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalog.scala +++ b/repos/spark/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalog.scala @@ -281,8 +281,10 @@ class SessionCatalog(externalCatalog: ExternalCatalog) { parts: Seq[CatalogTablePartition], ignoreIfExists: Boolean): Unit = { val db = tableName.database.getOrElse(currentDb) - externalCatalog - .createPartitions(db, tableName.table, parts, ignoreIfExists) + externalCatalog.createPartitions(db, + tableName.table, + parts, + ignoreIfExists) } /** @@ -293,8 +295,10 @@ class SessionCatalog(externalCatalog: ExternalCatalog) { parts: Seq[TablePartitionSpec], ignoreIfNotExists: Boolean): Unit = { val db = tableName.database.getOrElse(currentDb) - externalCatalog - .dropPartitions(db, tableName.table, parts, ignoreIfNotExists) + externalCatalog.dropPartitions(db, + tableName.table, + parts, + ignoreIfNotExists) } /** diff --git a/repos/spark/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/complexTypeCreator.scala b/repos/spark/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/complexTypeCreator.scala index 1c94a023dc4..2188de2d86b 100644 --- a/repos/spark/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/complexTypeCreator.scala +++ b/repos/spark/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/complexTypeCreator.scala @@ -32,8 +32,8 @@ case class CreateArray(children: Seq[Expression]) extends Expression { override def foldable: Boolean = children.forall(_.foldable) override def checkInputDataTypes(): TypeCheckResult = - TypeUtils - .checkForSameTypeInputExpr(children.map(_.dataType), "function array") + TypeUtils.checkForSameTypeInputExpr(children.map(_.dataType), + "function array") override def dataType: DataType = { ArrayType(children.headOption.map(_.dataType).getOrElse(NullType), diff --git a/repos/spark/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/complexTypeExtractors.scala b/repos/spark/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/complexTypeExtractors.scala index 86bfaa0d0e8..c4d95a37a83 100644 --- a/repos/spark/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/complexTypeExtractors.scala +++ b/repos/spark/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/complexTypeExtractors.scala @@ -212,8 +212,9 @@ case class GetArrayStructFields(child: Expression, if ($row.isNullAt($ordinal)) { $values[$j] = null; } else { - $values[$j] = ${ctx - .getValue(row, field.dataType, ordinal.toString)}; + $values[$j] = ${ctx.getValue(row, + field.dataType, + ordinal.toString)}; } } } @@ -342,8 +343,8 @@ case class GetMapValue(child: Expression, key: Expression) int $index = 0; boolean $found = false; while ($index < $length && !$found) { - final ${ctx.javaType(keyType)} $key = ${ctx - .getValue(keys, keyType, index)}; + final ${ctx + .javaType(keyType)} $key = ${ctx.getValue(keys, keyType, index)}; if (${ctx.genEqual(keyType, key, eval2)}) { $found = true; } else { diff --git a/repos/spark/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/datetimeExpressions.scala b/repos/spark/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/datetimeExpressions.scala index 6af22f777b8..0d3fb30bcbe 100644 --- a/repos/spark/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/datetimeExpressions.scala +++ b/repos/spark/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/datetimeExpressions.scala @@ -799,8 +799,8 @@ case class AddMonths(startDate: Expression, numMonths: Expression) override def dataType: DataType = DateType override def nullSafeEval(start: Any, months: Any): Any = { - DateTimeUtils - .dateAddMonths(start.asInstanceOf[Int], months.asInstanceOf[Int]) + DateTimeUtils.dateAddMonths(start.asInstanceOf[Int], + months.asInstanceOf[Int]) } override def genCode(ctx: CodegenContext, ev: ExprCode): String = { diff --git a/repos/spark/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/misc.scala b/repos/spark/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/misc.scala index 77a123f401b..6e94dba51e1 100644 --- a/repos/spark/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/misc.scala +++ b/repos/spark/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/misc.scala @@ -297,8 +297,10 @@ case class Murmur3Hash(children: Seq[Expression], seed: Int) case c: CalendarInterval => Murmur3_x86_32.hashInt(c.months, hashLong(c.microseconds)) case a: Array[Byte] => - Murmur3_x86_32 - .hashUnsafeBytes(a, Platform.BYTE_ARRAY_OFFSET, a.length, seed) + Murmur3_x86_32.hashUnsafeBytes(a, + Platform.BYTE_ARRAY_OFFSET, + a.length, + seed) case s: UTF8String => Murmur3_x86_32.hashUnsafeBytes(s.getBaseObject, s.getBaseOffset, @@ -379,8 +381,10 @@ case class Murmur3Hash(children: Seq[Expression], seed: Int) ctx.nullSafeExec(nullable, s"$input.isNullAt($index)") { s""" - final ${ctx.javaType(elementType)} $element = ${ctx - .getValue(input, elementType, index)}; + final ${ctx.javaType(elementType)} $element = ${ctx.getValue( + input, + elementType, + index)}; ${computeHash(element, elementType, result, ctx)} """ } diff --git a/repos/spark/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/QueryPlan.scala b/repos/spark/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/QueryPlan.scala index 011b27baa1f..8bcf47f37f4 100644 --- a/repos/spark/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/QueryPlan.scala +++ b/repos/spark/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/QueryPlan.scala @@ -315,8 +315,9 @@ abstract class QueryPlan[PlanType <: QueryPlan[PlanType]] val cleanedExprId = Alias(a.child, a.name)(ExprId(-1), a.qualifiers, isGenerated = a.isGenerated) - BindReferences - .bindReference(cleanedExprId, allAttributes, allowFailures = true) + BindReferences.bindReference(cleanedExprId, + allAttributes, + allowFailures = true) case other => BindReferences.bindReference(other, allAttributes, allowFailures = true) } diff --git a/repos/spark/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/trees/TreeNode.scala b/repos/spark/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/trees/TreeNode.scala index 91ef529ff8b..e148a8a1f08 100644 --- a/repos/spark/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/trees/TreeNode.scala +++ b/repos/spark/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/trees/TreeNode.scala @@ -544,15 +544,17 @@ abstract class TreeNode[BaseType <: TreeNode[BaseType]] extends Product { _.generateTreeString(depth + 2, lastChildren :+ false :+ false, builder)) - innerChildren.last - .generateTreeString(depth + 2, lastChildren :+ false :+ true, builder) + innerChildren.last.generateTreeString(depth + 2, + lastChildren :+ false :+ true, + builder) } if (treeChildren.nonEmpty) { treeChildren.init.foreach( _.generateTreeString(depth + 1, lastChildren :+ false, builder)) - treeChildren.last - .generateTreeString(depth + 1, lastChildren :+ true, builder) + treeChildren.last.generateTreeString(depth + 1, + lastChildren :+ true, + builder) } builder diff --git a/repos/spark/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/AnalysisTest.scala b/repos/spark/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/AnalysisTest.scala index be0daa76871..91dcc7ea231 100644 --- a/repos/spark/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/AnalysisTest.scala +++ b/repos/spark/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/AnalysisTest.scala @@ -33,10 +33,10 @@ trait AnalysisTest extends PlanTest { val caseSensitiveCatalog = new SimpleCatalog(caseSensitiveConf) val caseInsensitiveCatalog = new SimpleCatalog(caseInsensitiveConf) - caseSensitiveCatalog - .registerTable(TableIdentifier("TaBlE"), TestRelations.testRelation) - caseInsensitiveCatalog - .registerTable(TableIdentifier("TaBlE"), TestRelations.testRelation) + caseSensitiveCatalog.registerTable(TableIdentifier("TaBlE"), + TestRelations.testRelation) + caseInsensitiveCatalog.registerTable(TableIdentifier("TaBlE"), + TestRelations.testRelation) new Analyzer(caseSensitiveCatalog, EmptyFunctionRegistry, diff --git a/repos/spark/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/catalog/CatalogTestCases.scala b/repos/spark/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/catalog/CatalogTestCases.scala index 571a04606bc..aa58ed2a9b4 100644 --- a/repos/spark/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/catalog/CatalogTestCases.scala +++ b/repos/spark/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/catalog/CatalogTestCases.scala @@ -257,8 +257,9 @@ abstract class CatalogTestCases extends SparkFunSuite with BeforeAndAfterEach { test("basic create and list partitions") { val catalog = newEmptyCatalog() catalog.createDatabase(newDb("mydb"), ignoreIfExists = false) - catalog - .createTable("mydb", newTable("tbl", "mydb"), ignoreIfExists = false) + catalog.createTable("mydb", + newTable("tbl", "mydb"), + ignoreIfExists = false) catalog.createPartitions("mydb", "tbl", Seq(part1, part2), @@ -285,8 +286,10 @@ abstract class CatalogTestCases extends SparkFunSuite with BeforeAndAfterEach { test("create partitions that already exist") { val catalog = newBasicCatalog() intercept[AnalysisException] { - catalog - .createPartitions("db2", "tbl2", Seq(part1), ignoreIfExists = false) + catalog.createPartitions("db2", + "tbl2", + Seq(part1), + ignoreIfExists = false) } catalog.createPartitions("db2", "tbl2", Seq(part1), ignoreIfExists = true) } @@ -333,8 +336,10 @@ abstract class CatalogTestCases extends SparkFunSuite with BeforeAndAfterEach { Seq(part3.spec), ignoreIfNotExists = false) } - catalog - .dropPartitions("db2", "tbl2", Seq(part3.spec), ignoreIfNotExists = true) + catalog.dropPartitions("db2", + "tbl2", + Seq(part3.spec), + ignoreIfNotExists = true) } test("get partition") { @@ -361,8 +366,10 @@ abstract class CatalogTestCases extends SparkFunSuite with BeforeAndAfterEach { val newPart1 = part1.copy(spec = Map("a" -> "100", "b" -> "101")) val newPart2 = part2.copy(spec = Map("a" -> "200", "b" -> "201")) val newSpecs = Seq(newPart1.spec, newPart2.spec) - catalog - .renamePartitions("db2", "tbl2", Seq(part1.spec, part2.spec), newSpecs) + catalog.renamePartitions("db2", + "tbl2", + Seq(part1.spec, part2.spec), + newSpecs) assert( catalog .getPartition("db2", "tbl2", newPart1.spec) diff --git a/repos/spark/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalogSuite.scala b/repos/spark/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalogSuite.scala index 7a601a4b6a3..53986a85f99 100644 --- a/repos/spark/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalogSuite.scala +++ b/repos/spark/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalogSuite.scala @@ -119,8 +119,9 @@ class SessionCatalogSuite extends SparkFunSuite { // When cascade is true, it should drop them val externalCatalog3 = newBasicCatalog() val sessionCatalog3 = new SessionCatalog(externalCatalog3) - externalCatalog3 - .dropDatabase("db2", ignoreIfNotExists = false, cascade = true) + externalCatalog3.dropDatabase("db2", + ignoreIfNotExists = false, + cascade = true) assert(sessionCatalog3.listDatabases().toSet == Set("default", "db1")) } diff --git a/repos/spark/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/encoders/EncoderResolutionSuite.scala b/repos/spark/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/encoders/EncoderResolutionSuite.scala index 1b0415f5017..2a0e1087566 100644 --- a/repos/spark/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/encoders/EncoderResolutionSuite.scala +++ b/repos/spark/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/encoders/EncoderResolutionSuite.scala @@ -147,8 +147,9 @@ class EncoderResolutionSuite extends PlanTest { val structType = new StructType() .add("a", StringType) .add("b", DecimalType.SYSTEM_DEFAULT) - ExpressionEncoder[ComplexClass] - .resolve(Seq('a.long, 'b.struct(structType)), null) + ExpressionEncoder[ComplexClass].resolve(Seq('a.long, + 'b.struct(structType)), + null) }.message assert(msg2 == s""" |Cannot up cast `b`.`b` from decimal(38,18) to bigint as it may truncate diff --git a/repos/spark/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/codegen/GeneratedProjectionSuite.scala b/repos/spark/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/codegen/GeneratedProjectionSuite.scala index d1a12080c09..584983648a5 100644 --- a/repos/spark/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/codegen/GeneratedProjectionSuite.scala +++ b/repos/spark/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/codegen/GeneratedProjectionSuite.scala @@ -96,13 +96,13 @@ class GeneratedProjectionSuite extends SparkFunSuite { val unsafeRow: UnsafeRow = unsafeProj(row) assert(java.util.Arrays.equals(unsafeRow.getBinary(0), Array[Byte](1, 2))) assert( - java.util.Arrays - .equals(unsafeRow.getArray(1).getBinary(0), Array[Byte](1, 2))) + java.util.Arrays.equals(unsafeRow.getArray(1).getBinary(0), + Array[Byte](1, 2))) assert(unsafeRow.getArray(1).isNullAt(1)) assert(unsafeRow.getArray(1).getBinary(1) === null) assert( - java.util.Arrays - .equals(unsafeRow.getArray(1).getBinary(2), Array[Byte](3, 4))) + java.util.Arrays.equals(unsafeRow.getArray(1).getBinary(2), + Array[Byte](3, 4))) val safeProj = FromUnsafeProjection(fields) val row2 = safeProj(unsafeRow) diff --git a/repos/spark/sql/core/src/main/scala/org/apache/spark/sql/DataFrameReader.scala b/repos/spark/sql/core/src/main/scala/org/apache/spark/sql/DataFrameReader.scala index 3269fb192d7..31444506ffc 100644 --- a/repos/spark/sql/core/src/main/scala/org/apache/spark/sql/DataFrameReader.scala +++ b/repos/spark/sql/core/src/main/scala/org/apache/spark/sql/DataFrameReader.scala @@ -138,8 +138,8 @@ class DataFrameReader private[sql] (sqlContext: SQLContext) extends Logging { userSpecifiedSchema = userSpecifiedSchema, className = source, options = extraOptions.toMap) - Dataset - .newDataFrame(sqlContext, LogicalRelation(dataSource.resolveRelation())) + Dataset.newDataFrame(sqlContext, + LogicalRelation(dataSource.resolveRelation())) } /** @@ -185,8 +185,8 @@ class DataFrameReader private[sql] (sqlContext: SQLContext) extends Logging { userSpecifiedSchema = userSpecifiedSchema, className = source, options = extraOptions.toMap) - Dataset - .newDataFrame(sqlContext, StreamingRelation(dataSource.createSource())) + Dataset.newDataFrame(sqlContext, + StreamingRelation(dataSource.createSource())) } /** diff --git a/repos/spark/sql/core/src/main/scala/org/apache/spark/sql/Dataset.scala b/repos/spark/sql/core/src/main/scala/org/apache/spark/sql/Dataset.scala index 339f00da4ce..0ec7bbeb326 100644 --- a/repos/spark/sql/core/src/main/scala/org/apache/spark/sql/Dataset.scala +++ b/repos/spark/sql/core/src/main/scala/org/apache/spark/sql/Dataset.scala @@ -666,8 +666,8 @@ class Dataset[T] private[sql] ( // resolved and become AttributeReference. val cond = plan.condition.map { _.transform { - case catalyst.expressions - .EqualTo(a: AttributeReference, b: AttributeReference) + case catalyst.expressions.EqualTo(a: AttributeReference, + b: AttributeReference) if a.sameRef(b) => catalyst.expressions.EqualTo(withPlan(plan.left).resolve(a.name), withPlan(plan.right).resolve(b.name)) @@ -720,8 +720,9 @@ class Dataset[T] private[sql] ( case _ => Alias(CreateStruct(rightOutput), "_2")() } - implicit val tuple2Encoder: Encoder[(T, U)] = ExpressionEncoder - .tuple(this.unresolvedTEncoder, other.unresolvedTEncoder) + implicit val tuple2Encoder: Encoder[(T, U)] = ExpressionEncoder.tuple( + this.unresolvedTEncoder, + other.unresolvedTEncoder) withTypedPlan[(T, U)](other, encoderFor[(T, U)]) { (left, right) => Project(leftData :: rightData :: Nil, joined.analyzed) } @@ -2393,8 +2394,9 @@ class Dataset[T] private[sql] ( val start = System.nanoTime() val result = action(df) val end = System.nanoTime() - sqlContext.listenerManager - .onSuccess(name, df.queryExecution, end - start) + sqlContext.listenerManager.onSuccess(name, + df.queryExecution, + end - start) result } catch { case e: Exception => @@ -2412,8 +2414,9 @@ class Dataset[T] private[sql] ( val start = System.nanoTime() val result = action(ds) val end = System.nanoTime() - sqlContext.listenerManager - .onSuccess(name, ds.queryExecution, end - start) + sqlContext.listenerManager.onSuccess(name, + ds.queryExecution, + end - start) result } catch { case e: Exception => diff --git a/repos/spark/sql/core/src/main/scala/org/apache/spark/sql/execution/CoGroupedIterator.scala b/repos/spark/sql/core/src/main/scala/org/apache/spark/sql/execution/CoGroupedIterator.scala index 1b85c403993..f42769f8f7e 100644 --- a/repos/spark/sql/core/src/main/scala/org/apache/spark/sql/execution/CoGroupedIterator.scala +++ b/repos/spark/sql/core/src/main/scala/org/apache/spark/sql/execution/CoGroupedIterator.scala @@ -36,8 +36,9 @@ class CoGroupedIterator(left: Iterator[(InternalRow, Iterator[InternalRow])], extends Iterator[ (InternalRow, Iterator[InternalRow], Iterator[InternalRow])] { - private val keyOrdering = GenerateOrdering - .generate(groupingSchema.map(SortOrder(_, Ascending)), groupingSchema) + private val keyOrdering = GenerateOrdering.generate( + groupingSchema.map(SortOrder(_, Ascending)), + groupingSchema) private var currentLeftData: (InternalRow, Iterator[InternalRow]) = _ private var currentRightData: (InternalRow, Iterator[InternalRow]) = _ diff --git a/repos/spark/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkPlan.scala b/repos/spark/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkPlan.scala index 250cc0777a9..0a1e5478429 100644 --- a/repos/spark/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkPlan.scala +++ b/repos/spark/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkPlan.scala @@ -365,8 +365,9 @@ abstract class SparkPlan inputSchema: Seq[Attribute], useSubexprElimination: Boolean = false): () => MutableProjection = { log.debug(s"Creating MutableProj: $expressions, inputSchema: $inputSchema") - GenerateMutableProjection - .generate(expressions, inputSchema, useSubexprElimination) + GenerateMutableProjection.generate(expressions, + inputSchema, + useSubexprElimination) } protected def newPredicate( diff --git a/repos/spark/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkStrategies.scala b/repos/spark/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkStrategies.scala index 71e99ac488b..b1e90cc8eea 100644 --- a/repos/spark/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkStrategies.scala +++ b/repos/spark/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkStrategies.scala @@ -477,10 +477,10 @@ private[sql] abstract class SparkStrategies extends QueryPlanner[SparkPlan] { execution.joins .CartesianProduct(planLater(left), planLater(right)) :: Nil case logical.Join(left, right, Inner, Some(condition)) => - execution.Filter( - condition, - execution.joins - .CartesianProduct(planLater(left), planLater(right))) :: Nil + execution.Filter(condition, + execution.joins.CartesianProduct( + planLater(left), + planLater(right))) :: Nil case _ => Nil } } diff --git a/repos/spark/sql/core/src/main/scala/org/apache/spark/sql/execution/Window.scala b/repos/spark/sql/core/src/main/scala/org/apache/spark/sql/execution/Window.scala index 4fd8ba60c4e..24ddd868920 100644 --- a/repos/spark/sql/core/src/main/scala/org/apache/spark/sql/execution/Window.scala +++ b/repos/spark/sql/core/src/main/scala/org/apache/spark/sql/execution/Window.scala @@ -305,8 +305,8 @@ case class Window(windowExpression: Seq[NamedExpression], val unboundToRefMap = expressions.zip(references).toMap val patchedWindowExpression = windowExpression.map(_.transform(unboundToRefMap)) - UnsafeProjection - .create(child.output ++ patchedWindowExpression, child.output) + UnsafeProjection.create(child.output ++ patchedWindowExpression, + child.output) } protected override def doExecute(): RDD[InternalRow] = { @@ -550,8 +550,9 @@ private[execution] class ExternalRowBuffer(sorter: UnsafeExternalSorter, def next(): InternalRow = { if (iter.hasNext) { iter.loadNext() - currentRow - .pointTo(iter.getBaseObject, iter.getBaseOffset, iter.getRecordLength) + currentRow.pointTo(iter.getBaseObject, + iter.getBaseOffset, + iter.getRecordLength) currentRow } else { null diff --git a/repos/spark/sql/core/src/main/scala/org/apache/spark/sql/execution/aggregate/AggregationIterator.scala b/repos/spark/sql/core/src/main/scala/org/apache/spark/sql/execution/aggregate/AggregationIterator.scala index def0ccf8ac3..52e11b2b99f 100644 --- a/repos/spark/sql/core/src/main/scala/org/apache/spark/sql/execution/aggregate/AggregationIterator.scala +++ b/repos/spark/sql/core/src/main/scala/org/apache/spark/sql/execution/aggregate/AggregationIterator.scala @@ -234,8 +234,9 @@ abstract class AggregationIterator( newMutableProjection(evalExpressions, bufferAttributes)() expressionAggEvalProjection.target(aggregateResult) - val resultProjection = UnsafeProjection - .create(resultExpressions, groupingAttributes ++ aggregateAttributes) + val resultProjection = UnsafeProjection.create( + resultExpressions, + groupingAttributes ++ aggregateAttributes) (currentGroupingKey: UnsafeRow, currentBuffer: MutableRow) => { diff --git a/repos/spark/sql/core/src/main/scala/org/apache/spark/sql/execution/aggregate/TungstenAggregate.scala b/repos/spark/sql/core/src/main/scala/org/apache/spark/sql/execution/aggregate/TungstenAggregate.scala index 7bfe43246ab..83b4596a8a8 100644 --- a/repos/spark/sql/core/src/main/scala/org/apache/spark/sql/execution/aggregate/TungstenAggregate.scala +++ b/repos/spark/sql/core/src/main/scala/org/apache/spark/sql/execution/aggregate/TungstenAggregate.scala @@ -471,8 +471,9 @@ case class TungstenAggregate( hashMapTerm, s"$hashMapTerm = $thisPlan.createHashMap();") sorterTerm = ctx.freshName("sorter") - ctx - .addMutableState(classOf[UnsafeKVExternalSorter].getName, sorterTerm, "") + ctx.addMutableState(classOf[UnsafeKVExternalSorter].getName, + sorterTerm, + "") // Create a name for iterator from HashMap val iterTerm = ctx.freshName("mapIter") diff --git a/repos/spark/sql/core/src/main/scala/org/apache/spark/sql/execution/aggregate/TungstenAggregationIterator.scala b/repos/spark/sql/core/src/main/scala/org/apache/spark/sql/execution/aggregate/TungstenAggregationIterator.scala index 8c2a4aad3af..33da540a745 100644 --- a/repos/spark/sql/core/src/main/scala/org/apache/spark/sql/execution/aggregate/TungstenAggregationIterator.scala +++ b/repos/spark/sql/core/src/main/scala/org/apache/spark/sql/execution/aggregate/TungstenAggregationIterator.scala @@ -149,8 +149,8 @@ class TungstenAggregationIterator( (currentGroupingKey: UnsafeRow, currentBuffer: MutableRow) => { - unsafeRowJoiner - .join(currentGroupingKey, currentBuffer.asInstanceOf[UnsafeRow]) + unsafeRowJoiner.join(currentGroupingKey, + currentBuffer.asInstanceOf[UnsafeRow]) } } else { super.generateResultProjection() diff --git a/repos/spark/sql/core/src/main/scala/org/apache/spark/sql/execution/columnar/InMemoryColumnarTableScan.scala b/repos/spark/sql/core/src/main/scala/org/apache/spark/sql/execution/columnar/InMemoryColumnarTableScan.scala index 8b6ac480fc8..248b7ef8f62 100644 --- a/repos/spark/sql/core/src/main/scala/org/apache/spark/sql/execution/columnar/InMemoryColumnarTableScan.scala +++ b/repos/spark/sql/core/src/main/scala/org/apache/spark/sql/execution/columnar/InMemoryColumnarTableScan.scala @@ -381,8 +381,9 @@ private[sql] case class InMemoryColumnarTableScan( case other => other }.toArray val columnarIterator = GenerateColumnAccessor.generate(columnTypes) - columnarIterator - .initialize(withMetrics, columnTypes, requestedColumnIndices.toArray) + columnarIterator.initialize(withMetrics, + columnTypes, + requestedColumnIndices.toArray) if (enableAccumulators && columnarIterator.hasNext) { readPartitions += 1 } diff --git a/repos/spark/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/DataSourceStrategy.scala b/repos/spark/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/DataSourceStrategy.scala index b726de1dcc8..16068d92229 100644 --- a/repos/spark/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/DataSourceStrategy.scala +++ b/repos/spark/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/DataSourceStrategy.scala @@ -472,8 +472,9 @@ private[sql] object DataSourceStrategy extends Strategy with Logging { val mutableJoinedRow = new JoinedRow() val unsafePartitionValues = UnsafeProjection.create(partitionColumnSchema)(partitionValues) - val unsafeProjection = UnsafeProjection - .create(requiredColumns, dataColumns ++ partitionColumns) + val unsafeProjection = UnsafeProjection.create( + requiredColumns, + dataColumns ++ partitionColumns) // If we are returning batches directly, we need to augment them with the partitioning // columns. We want to do this without a row by row operation. diff --git a/repos/spark/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/InsertIntoHadoopFsRelation.scala b/repos/spark/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/InsertIntoHadoopFsRelation.scala index 0fcac6ef9c5..073802b641c 100644 --- a/repos/spark/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/InsertIntoHadoopFsRelation.scala +++ b/repos/spark/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/InsertIntoHadoopFsRelation.scala @@ -155,8 +155,8 @@ private[sql] case class InsertIntoHadoopFsRelation( writerContainer.driverSideSetup() try { - sqlContext.sparkContext - .runJob(queryExecution.toRdd, writerContainer.writeRows _) + sqlContext.sparkContext.runJob(queryExecution.toRdd, + writerContainer.writeRows _) writerContainer.commitJob() refreshFunction() } catch { diff --git a/repos/spark/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/WriterContainer.scala b/repos/spark/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/WriterContainer.scala index dc0478c5ceb..4a34ca4ba46 100644 --- a/repos/spark/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/WriterContainer.scala +++ b/repos/spark/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/WriterContainer.scala @@ -145,8 +145,10 @@ private[sql] abstract class BaseWriterContainer( protected def newOutputWriter(path: String, bucketId: Option[Int] = None): OutputWriter = { try { - outputWriterFactory - .newInstance(path, bucketId, dataSchema, taskAttemptContext) + outputWriterFactory.newInstance(path, + bucketId, + dataSchema, + taskAttemptContext) } catch { case e: org.apache.hadoop.fs.FileAlreadyExistsException => if (outputCommitter @@ -427,8 +429,9 @@ private[sql] class DynamicPartitionWriterContainer( val getOutputRow = UnsafeProjection.create(dataColumns, inputSchema) // Returns the partition path given a partition key. - val getPartitionString = UnsafeProjection - .create(Concat(partitionStringExpression) :: Nil, partitionColumns) + val getPartitionString = UnsafeProjection.create( + Concat(partitionStringExpression) :: Nil, + partitionColumns) // Sorts the data before write, so that we only need one writer at the same time. // TODO: inject a local sort operator in planning. diff --git a/repos/spark/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/jdbc/DefaultSource.scala b/repos/spark/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/jdbc/DefaultSource.scala index d1ff32fda54..106686689a3 100644 --- a/repos/spark/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/jdbc/DefaultSource.scala +++ b/repos/spark/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/jdbc/DefaultSource.scala @@ -36,8 +36,9 @@ class DefaultSource extends RelationProvider with DataSourceRegister { parameters: Map[String, String]): BaseRelation = { val url = parameters.getOrElse("url", sys.error("Option 'url' not specified")) - val table = parameters - .getOrElse("dbtable", sys.error("Option 'dbtable' not specified")) + val table = parameters.getOrElse( + "dbtable", + sys.error("Option 'dbtable' not specified")) val partitionColumn = parameters.getOrElse("partitionColumn", null) val lowerBound = parameters.getOrElse("lowerBound", null) val upperBound = parameters.getOrElse("upperBound", null) diff --git a/repos/spark/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/json/JSONOptions.scala b/repos/spark/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/json/JSONOptions.scala index 0a0d15b698f..0ac80557c00 100644 --- a/repos/spark/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/json/JSONOptions.scala +++ b/repos/spark/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/json/JSONOptions.scala @@ -76,8 +76,8 @@ private[sql] class JSONOptions( factory.configure(JsonParser.Feature.ALLOW_COMMENTS, allowComments) factory.configure(JsonParser.Feature.ALLOW_UNQUOTED_FIELD_NAMES, allowUnquotedFieldNames) - factory - .configure(JsonParser.Feature.ALLOW_SINGLE_QUOTES, allowSingleQuotes) + factory.configure(JsonParser.Feature.ALLOW_SINGLE_QUOTES, + allowSingleQuotes) factory.configure(JsonParser.Feature.ALLOW_NUMERIC_LEADING_ZEROS, allowNumericLeadingZeros) factory.configure(JsonParser.Feature.ALLOW_NON_NUMERIC_NUMBERS, diff --git a/repos/spark/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/parquet/CatalystReadSupport.scala b/repos/spark/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/parquet/CatalystReadSupport.scala index 58c1fa663f7..1d9e3c48d1c 100644 --- a/repos/spark/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/parquet/CatalystReadSupport.scala +++ b/repos/spark/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/parquet/CatalystReadSupport.scala @@ -66,8 +66,9 @@ private[parquet] class CatalystReadSupport StructType.fromString(schemaString) } - val parquetRequestedSchema = CatalystReadSupport - .clipParquetSchema(context.getFileSchema, catalystRequestedSchema) + val parquetRequestedSchema = CatalystReadSupport.clipParquetSchema( + context.getFileSchema, + catalystRequestedSchema) new ReadContext(parquetRequestedSchema, Map.empty[String, String].asJava) } diff --git a/repos/spark/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/parquet/DirectParquetOutputCommitter.scala b/repos/spark/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/parquet/DirectParquetOutputCommitter.scala index 48aded59f5f..bb1ba6ebcd3 100644 --- a/repos/spark/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/parquet/DirectParquetOutputCommitter.scala +++ b/repos/spark/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/parquet/DirectParquetOutputCommitter.scala @@ -66,11 +66,12 @@ private[datasources] class DirectParquetOutputCommitter( if (configuration.getBoolean(ParquetOutputFormat.ENABLE_JOB_SUMMARY, true)) { try { val outputStatus = fileSystem.getFileStatus(outputPath) - val footers = ParquetFileReader - .readAllFootersInParallel(configuration, outputStatus) + val footers = ParquetFileReader.readAllFootersInParallel(configuration, + outputStatus) try { - ParquetFileWriter - .writeMetadataFile(configuration, outputPath, footers) + ParquetFileWriter.writeMetadataFile(configuration, + outputPath, + footers) } catch { case e: Exception => LOG.warn("could not write summary file for " + outputPath, e) diff --git a/repos/spark/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetRelation.scala b/repos/spark/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetRelation.scala index 411de404492..0382b50f335 100644 --- a/repos/spark/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetRelation.scala +++ b/repos/spark/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetRelation.scala @@ -119,8 +119,8 @@ private[sql] class DefaultSource // bundled with `ParquetOutputFormat[Row]`. job.setOutputFormatClass(classOf[ParquetOutputFormat[Row]]) - ParquetOutputFormat - .setWriteSupportClass(job, classOf[CatalystWriteSupport]) + ParquetOutputFormat.setWriteSupportClass(job, + classOf[CatalystWriteSupport]) // We want to clear this temporary metadata from saving into Parquet file. // This metadata is only useful for detecting optional columns when pushdowning filters. @@ -303,8 +303,9 @@ private[sql] class DefaultSource val inputFiles = splitFiles(allFiles).data.toArray // Create the function to set input paths at the driver side. - val setInputPaths = ParquetRelation - .initializeDriverSideJobFunc(inputFiles, parquetBlockSize) _ + val setInputPaths = ParquetRelation.initializeDriverSideJobFunc( + inputFiles, + parquetBlockSize) _ Utils.withDummyCallSite(sqlContext.sparkContext) { new SqlNewHadoopRDD(sqlContext = sqlContext, diff --git a/repos/spark/sql/core/src/main/scala/org/apache/spark/sql/execution/exchange/ShuffleExchange.scala b/repos/spark/sql/core/src/main/scala/org/apache/spark/sql/execution/exchange/ShuffleExchange.scala index 894b70adba6..d85f0168e70 100644 --- a/repos/spark/sql/core/src/main/scala/org/apache/spark/sql/execution/exchange/ShuffleExchange.scala +++ b/repos/spark/sql/core/src/main/scala/org/apache/spark/sql/execution/exchange/ShuffleExchange.scala @@ -252,8 +252,9 @@ object ShuffleExchange { position } case h: HashPartitioning => - val projection = UnsafeProjection - .create(h.partitionIdExpression :: Nil, outputAttributes) + val projection = UnsafeProjection.create( + h.partitionIdExpression :: Nil, + outputAttributes) row => projection(row).getInt(0) case RangePartitioning(_, _) | SinglePartition => identity diff --git a/repos/spark/sql/core/src/main/scala/org/apache/spark/sql/execution/joins/CartesianProduct.scala b/repos/spark/sql/core/src/main/scala/org/apache/spark/sql/execution/joins/CartesianProduct.scala index af70edade19..ef7ec6d5598 100644 --- a/repos/spark/sql/core/src/main/scala/org/apache/spark/sql/execution/joins/CartesianProduct.scala +++ b/repos/spark/sql/core/src/main/scala/org/apache/spark/sql/execution/joins/CartesianProduct.scala @@ -52,8 +52,10 @@ private[spark] class UnsafeCartesianRDD(left: RDD[UnsafeRow], val partition = split.asInstanceOf[CartesianPartition] for (y <- rdd2.iterator(partition.s2, context)) { - sorter - .insertRecord(y.getBaseObject, y.getBaseOffset, y.getSizeInBytes, 0) + sorter.insertRecord(y.getBaseObject, + y.getBaseOffset, + y.getSizeInBytes, + 0) } // Create an iterator from sorter and wrapper it as Iterator[UnsafeRow] diff --git a/repos/spark/sql/core/src/main/scala/org/apache/spark/sql/execution/joins/HashedRelation.scala b/repos/spark/sql/core/src/main/scala/org/apache/spark/sql/execution/joins/HashedRelation.scala index 0b634c05f98..a58b54a5fec 100644 --- a/repos/spark/sql/core/src/main/scala/org/apache/spark/sql/execution/joins/HashedRelation.scala +++ b/repos/spark/sql/core/src/main/scala/org/apache/spark/sql/execution/joins/HashedRelation.scala @@ -572,8 +572,9 @@ private[joins] final class LongArrayRelation( val idx = (key - start).toInt if (idx >= 0 && idx < sizes.length && sizes(idx) > 0) { val result = new UnsafeRow(numFields) - result - .pointTo(bytes, Platform.BYTE_ARRAY_OFFSET + offsets(idx), sizes(idx)) + result.pointTo(bytes, + Platform.BYTE_ARRAY_OFFSET + offsets(idx), + sizes(idx)) result } else { null diff --git a/repos/spark/sql/core/src/main/scala/org/apache/spark/sql/sources/interfaces.scala b/repos/spark/sql/core/src/main/scala/org/apache/spark/sql/sources/interfaces.scala index 290c8279a69..ef48559fd11 100644 --- a/repos/spark/sql/core/src/main/scala/org/apache/spark/sql/sources/interfaces.scala +++ b/repos/spark/sql/core/src/main/scala/org/apache/spark/sql/sources/interfaces.scala @@ -615,8 +615,9 @@ class HDFSFileCatalog(val sqlContext: SQLContext, private def listLeafFiles( paths: Seq[Path]): mutable.LinkedHashSet[FileStatus] = { if (paths.length >= sqlContext.conf.parallelPartitionDiscoveryThreshold) { - HadoopFsRelation - .listLeafFilesInParallel(paths, hadoopConf, sqlContext.sparkContext) + HadoopFsRelation.listLeafFilesInParallel(paths, + hadoopConf, + sqlContext.sparkContext) } else { val statuses = paths.flatMap { path => val fs = path.getFileSystem(hadoopConf) @@ -669,13 +670,11 @@ class HDFSFileCatalog(val sqlContext: SQLContext, part.copy(values = castPartitionValuesToUserSchema(part.values)) }) case _ => - PartitioningUtils - .parsePartitions( - leafDirs, - PartitioningUtils.DEFAULT_PARTITION_NAME, - typeInference = - sqlContext.conf.partitionColumnTypeInferenceEnabled(), - basePaths = basePaths) + PartitioningUtils.parsePartitions( + leafDirs, + PartitioningUtils.DEFAULT_PARTITION_NAME, + typeInference = sqlContext.conf.partitionColumnTypeInferenceEnabled(), + basePaths = basePaths) } } diff --git a/repos/spark/sql/core/src/test/scala/org/apache/spark/sql/ColumnExpressionSuite.scala b/repos/spark/sql/core/src/test/scala/org/apache/spark/sql/ColumnExpressionSuite.scala index 9ded298c60d..8a2307a34b0 100644 --- a/repos/spark/sql/core/src/test/scala/org/apache/spark/sql/ColumnExpressionSuite.scala +++ b/repos/spark/sql/core/src/test/scala/org/apache/spark/sql/ColumnExpressionSuite.scala @@ -247,15 +247,15 @@ class ColumnExpressionSuite extends QueryTest with SharedSQLContext { test("isNaN") { val testData = - sqlContext - .createDataFrame( - sparkContext.parallelize( + sqlContext.createDataFrame( + sparkContext + .parallelize( Row(Double.NaN, Float.NaN) :: Row(math.log(-1), math.log(-3).toFloat) :: Row( null, null) :: Row(Double.MaxValue, Float.MinValue) :: Nil), - StructType( - Seq(StructField("a", DoubleType), StructField("b", FloatType)))) + StructType( + Seq(StructField("a", DoubleType), StructField("b", FloatType)))) checkAnswer(testData.select($"a".isNaN, $"b".isNaN), Row(true, true) :: Row(true, true) :: Row(false, false) :: Row( diff --git a/repos/spark/sql/core/src/test/scala/org/apache/spark/sql/DataFrameAggregateSuite.scala b/repos/spark/sql/core/src/test/scala/org/apache/spark/sql/DataFrameAggregateSuite.scala index 54d84f5cb4f..84dd0ce3e75 100644 --- a/repos/spark/sql/core/src/test/scala/org/apache/spark/sql/DataFrameAggregateSuite.scala +++ b/repos/spark/sql/core/src/test/scala/org/apache/spark/sql/DataFrameAggregateSuite.scala @@ -54,8 +54,11 @@ class DataFrameAggregateSuite extends QueryTest with SharedSQLContext { Row(1, 3) :: Row(2, 3) :: Row(3, 3) :: Nil ) - val df1 = Seq(("a", 1, 0, "b"), ("b", 2, 4, "c"), ("a", 2, 3, "d")) - .toDF("key", "value1", "value2", "rest") + val df1 = + Seq(("a", 1, 0, "b"), ("b", 2, 4, "c"), ("a", 2, 3, "d")).toDF("key", + "value1", + "value2", + "rest") checkAnswer( df1.groupBy("key").min(), diff --git a/repos/spark/sql/core/src/test/scala/org/apache/spark/sql/DateFunctionsSuite.scala b/repos/spark/sql/core/src/test/scala/org/apache/spark/sql/DateFunctionsSuite.scala index 8f6ec2de0a3..e52e14ccc42 100644 --- a/repos/spark/sql/core/src/test/scala/org/apache/spark/sql/DateFunctionsSuite.scala +++ b/repos/spark/sql/core/src/test/scala/org/apache/spark/sql/DateFunctionsSuite.scala @@ -415,8 +415,10 @@ class DateFunctionsSuite extends QueryTest with SharedSQLContext { val ss1 = "2015-07-24 10:00:00" val ss2 = "2015-07-25 02:02:02" val fmt = "yyyy/MM/dd HH:mm:ss.S" - val df = Seq((date1, ts1, s1, ss1), (date2, ts2, s2, ss2)) - .toDF("d", "ts", "s", "ss") + val df = Seq((date1, ts1, s1, ss1), (date2, ts2, s2, ss2)).toDF("d", + "ts", + "s", + "ss") checkAnswer(df.select(unix_timestamp(col("ts"))), Seq(Row(ts1.getTime / 1000L), Row(ts2.getTime / 1000L))) checkAnswer(df.select(unix_timestamp(col("ss"))), @@ -449,8 +451,10 @@ class DateFunctionsSuite extends QueryTest with SharedSQLContext { val ss1 = "2015-07-24 10:00:00" val ss2 = "2015-07-25 02:02:02" val fmt = "yyyy/MM/dd HH:mm:ss.S" - val df = Seq((date1, ts1, s1, ss1), (date2, ts2, s2, ss2)) - .toDF("d", "ts", "s", "ss") + val df = Seq((date1, ts1, s1, ss1), (date2, ts2, s2, ss2)).toDF("d", + "ts", + "s", + "ss") checkAnswer(df.selectExpr("to_unix_timestamp(ts)"), Seq(Row(ts1.getTime / 1000L), Row(ts2.getTime / 1000L))) checkAnswer(df.selectExpr("to_unix_timestamp(ss)"), diff --git a/repos/spark/sql/core/src/test/scala/org/apache/spark/sql/UDFSuite.scala b/repos/spark/sql/core/src/test/scala/org/apache/spark/sql/UDFSuite.scala index a135a1a95e9..ff2826dff7a 100644 --- a/repos/spark/sql/core/src/test/scala/org/apache/spark/sql/UDFSuite.scala +++ b/repos/spark/sql/core/src/test/scala/org/apache/spark/sql/UDFSuite.scala @@ -213,10 +213,10 @@ class UDFSuite extends QueryTest with SharedSQLContext { (data: Seq[Int], nestedData: Seq[Seq[Int]]) => { (data, nestedData) }) - sqlContext.udf - .register("mapDataFunc", (data: scala.collection.Map[Int, String]) => { - data - }) + sqlContext.udf.register("mapDataFunc", + (data: scala.collection.Map[Int, String]) => { + data + }) sqlContext.udf.register("complexDataFunc", (m: Map[String, Int], a: Seq[Int], b: Boolean) => { (m, a, b) diff --git a/repos/spark/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetCompatibilityTest.scala b/repos/spark/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetCompatibilityTest.scala index ad72d5a5dce..c35d9c185fc 100644 --- a/repos/spark/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetCompatibilityTest.scala +++ b/repos/spark/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetCompatibilityTest.scala @@ -56,8 +56,10 @@ private[sql] abstract class ParquetCompatibilityTest .toSeq .asJava - val footers = ParquetFileReader - .readAllFootersInParallel(hadoopConfiguration, parquetFiles, true) + val footers = ParquetFileReader.readAllFootersInParallel( + hadoopConfiguration, + parquetFiles, + true) footers.asScala.head.getParquetMetadata.getFileMetaData.getSchema } diff --git a/repos/spark/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetTest.scala b/repos/spark/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetTest.scala index 2a22e783fb2..fc3acbd5129 100644 --- a/repos/spark/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetTest.scala +++ b/repos/spark/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetTest.scala @@ -147,8 +147,9 @@ private[sql] trait ParquetTest extends SQLTestUtils { val parquetMetadata = new ParquetMetadata(fileMetadata, Seq.empty[BlockMetaData].asJava) val footer = new Footer(path, parquetMetadata) - ParquetFileWriter - .writeMetadataFile(configuration, path, Seq(footer).asJava) + ParquetFileWriter.writeMetadataFile(configuration, + path, + Seq(footer).asJava) } /** @@ -167,8 +168,9 @@ private[sql] trait ParquetTest extends SQLTestUtils { val parquetMetadata = new ParquetMetadata(fileMetadata, Seq.empty[BlockMetaData].asJava) val footer = new Footer(path, parquetMetadata) - ParquetFileWriter - .writeMetadataFile(configuration, path, Seq(footer).asJava) + ParquetFileWriter.writeMetadataFile(configuration, + path, + Seq(footer).asJava) } protected def readAllFootersWithoutSummaryFiles( diff --git a/repos/spark/sql/core/src/test/scala/org/apache/spark/sql/internal/SQLConfEntrySuite.scala b/repos/spark/sql/core/src/test/scala/org/apache/spark/sql/internal/SQLConfEntrySuite.scala index 676ce9093d5..843cd34b26c 100644 --- a/repos/spark/sql/core/src/test/scala/org/apache/spark/sql/internal/SQLConfEntrySuite.scala +++ b/repos/spark/sql/core/src/test/scala/org/apache/spark/sql/internal/SQLConfEntrySuite.scala @@ -116,8 +116,10 @@ class SQLConfEntrySuite extends SparkFunSuite { test("enumConf") { val key = "spark.sql.SQLConfEntrySuite.enum" - val confEntry = SQLConfEntry - .enumConf(key, v => v, Set("a", "b", "c"), defaultValue = Some("a")) + val confEntry = SQLConfEntry.enumConf(key, + v => v, + Set("a", "b", "c"), + defaultValue = Some("a")) assert(conf.getConf(confEntry) === "a") conf.setConf(confEntry, "b") diff --git a/repos/spark/sql/core/src/test/scala/org/apache/spark/sql/jdbc/JDBCSuite.scala b/repos/spark/sql/core/src/test/scala/org/apache/spark/sql/jdbc/JDBCSuite.scala index d58b9b54eae..e5c7d5312aa 100644 --- a/repos/spark/sql/core/src/test/scala/org/apache/spark/sql/jdbc/JDBCSuite.scala +++ b/repos/spark/sql/core/src/test/scala/org/apache/spark/sql/jdbc/JDBCSuite.scala @@ -832,8 +832,9 @@ class JDBCSuite // Regression test for bug SPARK-11788 val timestamp = java.sql.Timestamp.valueOf("2001-02-20 11:22:33.543543"); val date = java.sql.Date.valueOf("1995-01-01") - val jdbcDf = sqlContext.read - .jdbc(urlWithUserAndPass, "TEST.TIMETYPES", new Properties) + val jdbcDf = sqlContext.read.jdbc(urlWithUserAndPass, + "TEST.TIMETYPES", + new Properties) val rows = jdbcDf.where($"B" > date && $"C" > timestamp).collect() assert( rows(0).getAs[java.sql.Date](1) === java.sql.Date.valueOf("1996-01-01")) diff --git a/repos/spark/sql/core/src/test/scala/org/apache/spark/sql/sources/FilteredScanSuite.scala b/repos/spark/sql/core/src/test/scala/org/apache/spark/sql/sources/FilteredScanSuite.scala index 9404142a3a0..078308f88a3 100644 --- a/repos/spark/sql/core/src/test/scala/org/apache/spark/sql/sources/FilteredScanSuite.scala +++ b/repos/spark/sql/core/src/test/scala/org/apache/spark/sql/sources/FilteredScanSuite.scala @@ -395,8 +395,8 @@ class FilteredScanSuite test(s"PushDown Returns $expectedCount: $sqlString") { // These tests check a particular plan, disable whole stage codegen. - caseInsensitiveContext.conf - .setConf(SQLConf.WHOLESTAGE_CODEGEN_ENABLED, false) + caseInsensitiveContext.conf.setConf(SQLConf.WHOLESTAGE_CODEGEN_ENABLED, + false) try { val queryExecution = sql(sqlString).queryExecution val rawPlan = queryExecution.executedPlan.collect { diff --git a/repos/spark/sql/core/src/test/scala/org/apache/spark/sql/sources/InsertSuite.scala b/repos/spark/sql/core/src/test/scala/org/apache/spark/sql/sources/InsertSuite.scala index 2dcc7316271..52e6645f9d8 100644 --- a/repos/spark/sql/core/src/test/scala/org/apache/spark/sql/sources/InsertSuite.scala +++ b/repos/spark/sql/core/src/test/scala/org/apache/spark/sql/sources/InsertSuite.scala @@ -106,8 +106,9 @@ class InsertSuite extends DataSourceTest with SharedSQLContext { ) // Writing the table to less part files. - val rdd1 = sparkContext - .parallelize((1 to 10).map(i => s"""{"a":$i, "b":"str$i"}"""), 5) + val rdd1 = sparkContext.parallelize( + (1 to 10).map(i => s"""{"a":$i, "b":"str$i"}"""), + 5) caseInsensitiveContext.read.json(rdd1).registerTempTable("jt1") sql(s""" |INSERT OVERWRITE TABLE jsonTable SELECT a, b FROM jt1 @@ -118,8 +119,9 @@ class InsertSuite extends DataSourceTest with SharedSQLContext { ) // Writing the table to more part files. - val rdd2 = sparkContext - .parallelize((1 to 10).map(i => s"""{"a":$i, "b":"str$i"}"""), 10) + val rdd2 = sparkContext.parallelize( + (1 to 10).map(i => s"""{"a":$i, "b":"str$i"}"""), + 10) caseInsensitiveContext.read.json(rdd2).registerTempTable("jt2") sql(s""" |INSERT OVERWRITE TABLE jsonTable SELECT a, b FROM jt2 diff --git a/repos/spark/sql/core/src/test/scala/org/apache/spark/sql/sources/PrunedScanSuite.scala b/repos/spark/sql/core/src/test/scala/org/apache/spark/sql/sources/PrunedScanSuite.scala index 234347ff278..828f2c8937a 100644 --- a/repos/spark/sql/core/src/test/scala/org/apache/spark/sql/sources/PrunedScanSuite.scala +++ b/repos/spark/sql/core/src/test/scala/org/apache/spark/sql/sources/PrunedScanSuite.scala @@ -120,8 +120,8 @@ class PrunedScanSuite extends DataSourceTest with SharedSQLContext { test(s"Columns output ${expectedColumns.mkString(",")}: $sqlString") { // These tests check a particular plan, disable whole stage codegen. - caseInsensitiveContext.conf - .setConf(SQLConf.WHOLESTAGE_CODEGEN_ENABLED, false) + caseInsensitiveContext.conf.setConf(SQLConf.WHOLESTAGE_CODEGEN_ENABLED, + false) try { val queryExecution = sql(sqlString).queryExecution val rawPlan = queryExecution.executedPlan.collect { diff --git a/repos/spark/sql/core/src/test/scala/org/apache/spark/sql/sources/SaveLoadSuite.scala b/repos/spark/sql/core/src/test/scala/org/apache/spark/sql/sources/SaveLoadSuite.scala index 0e0bb5486b6..fe11a00d2d9 100644 --- a/repos/spark/sql/core/src/test/scala/org/apache/spark/sql/sources/SaveLoadSuite.scala +++ b/repos/spark/sql/core/src/test/scala/org/apache/spark/sql/sources/SaveLoadSuite.scala @@ -84,30 +84,30 @@ class SaveLoadSuite } test("save with path and load") { - caseInsensitiveContext.conf - .setConf(SQLConf.DEFAULT_DATA_SOURCE_NAME, "org.apache.spark.sql.json") + caseInsensitiveContext.conf.setConf(SQLConf.DEFAULT_DATA_SOURCE_NAME, + "org.apache.spark.sql.json") df.write.save(path.toString) checkLoad() } test("save with string mode and path, and load") { - caseInsensitiveContext.conf - .setConf(SQLConf.DEFAULT_DATA_SOURCE_NAME, "org.apache.spark.sql.json") + caseInsensitiveContext.conf.setConf(SQLConf.DEFAULT_DATA_SOURCE_NAME, + "org.apache.spark.sql.json") path.createNewFile() df.write.mode("overwrite").save(path.toString) checkLoad() } test("save with path and datasource, and load") { - caseInsensitiveContext.conf - .setConf(SQLConf.DEFAULT_DATA_SOURCE_NAME, "not a source name") + caseInsensitiveContext.conf.setConf(SQLConf.DEFAULT_DATA_SOURCE_NAME, + "not a source name") df.write.json(path.toString) checkLoad() } test("save with data source and options, and load") { - caseInsensitiveContext.conf - .setConf(SQLConf.DEFAULT_DATA_SOURCE_NAME, "not a source name") + caseInsensitiveContext.conf.setConf(SQLConf.DEFAULT_DATA_SOURCE_NAME, + "not a source name") df.write.mode(SaveMode.ErrorIfExists).json(path.toString) checkLoad() } diff --git a/repos/spark/sql/core/src/test/scala/org/apache/spark/sql/streaming/ContinuousQueryManagerSuite.scala b/repos/spark/sql/core/src/test/scala/org/apache/spark/sql/streaming/ContinuousQueryManagerSuite.scala index 7ff68197443..2bc52259803 100644 --- a/repos/spark/sql/core/src/test/scala/org/apache/spark/sql/streaming/ContinuousQueryManagerSuite.scala +++ b/repos/spark/sql/core/src/test/scala/org/apache/spark/sql/streaming/ContinuousQueryManagerSuite.scala @@ -280,8 +280,9 @@ class ContinuousQueryManagerSuite } } - AwaitTerminationTester - .test(expectedBehavior, awaitTermFunc, testBehaviorFor) + AwaitTerminationTester.test(expectedBehavior, + awaitTermFunc, + testBehaviorFor) } /** Stop a random active query either with `stop()` or with an error */ diff --git a/repos/spark/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/HiveThriftServer2.scala b/repos/spark/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/HiveThriftServer2.scala index ea4624da4ff..dae284412f6 100644 --- a/repos/spark/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/HiveThriftServer2.scala +++ b/repos/spark/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/HiveThriftServer2.scala @@ -103,8 +103,8 @@ object HiveThriftServer2 extends Logging { new HiveThriftServer2Listener(server, SparkSQLEnv.hiveContext.conf) SparkSQLEnv.sparkContext.addSparkListener(listener) uiTab = - if (SparkSQLEnv.sparkContext.getConf - .getBoolean("spark.ui.enabled", true)) { + if (SparkSQLEnv.sparkContext.getConf.getBoolean("spark.ui.enabled", + true)) { Some(new ThriftServerTab(SparkSQLEnv.sparkContext)) } else { None diff --git a/repos/spark/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkSQLEnv.scala b/repos/spark/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkSQLEnv.scala index c4be6c11fb2..57650e073e3 100644 --- a/repos/spark/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkSQLEnv.scala +++ b/repos/spark/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkSQLEnv.scala @@ -66,8 +66,8 @@ private[hive] object SparkSQLEnv extends Logging { hiveContext.metadataHive.setError( new PrintStream(System.err, true, "UTF-8")) - hiveContext - .setConf("spark.sql.hive.version", HiveContext.hiveExecutionVersion) + hiveContext.setConf("spark.sql.hive.version", + HiveContext.hiveExecutionVersion) if (log.isDebugEnabled) { hiveContext.hiveconf.getAllProperties.asScala.toSeq.sorted.foreach { diff --git a/repos/spark/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveInspectors.scala b/repos/spark/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveInspectors.scala index 26614d55e6f..cfcc0dc2cb7 100644 --- a/repos/spark/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveInspectors.scala +++ b/repos/spark/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveInspectors.scala @@ -788,8 +788,9 @@ private[hive] trait HiveInspectors { val keyOI = toInspector(keyType) val valueOI = toInspector(valueType) if (value == null) { - ObjectInspectorFactory - .getStandardConstantMapObjectInspector(keyOI, valueOI, null) + ObjectInspectorFactory.getStandardConstantMapObjectInspector(keyOI, + valueOI, + null) } else { val map = value.asInstanceOf[MapData] val jmap = new java.util.HashMap[Any, Any](map.numElements()) @@ -798,8 +799,9 @@ private[hive] trait HiveInspectors { jmap.put(wrap(k, keyOI, keyType), wrap(v, valueOI, valueType)) }) - ObjectInspectorFactory - .getStandardConstantMapObjectInspector(keyOI, valueOI, jmap) + ObjectInspectorFactory.getStandardConstantMapObjectInspector(keyOI, + valueOI, + jmap) } // We will enumerate all of the possible constant expressions, throw exception if we missed case Literal(_, dt) => diff --git a/repos/spark/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveQl.scala b/repos/spark/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveQl.scala index bed9d10d32b..cb954c97e0e 100644 --- a/repos/spark/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveQl.scala +++ b/repos/spark/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveQl.scala @@ -445,8 +445,9 @@ private[hive] class HiveQl(conf: ParserConf) tableDesc = tableDesc.withNewStorage( serdeProperties = tableDesc.storage.serdeProperties ++ serdeParams.asScala) case Token("TOK_TABLELOCATION", child :: Nil) => - val location = EximUtil - .relativeToAbsolutePath(hiveConf, unescapeSQLString(child.text)) + val location = + EximUtil.relativeToAbsolutePath(hiveConf, + unescapeSQLString(child.text)) tableDesc = tableDesc.withNewStorage(locationUri = Option(location)) case Token("TOK_TABLESERIALIZER", child :: Nil) => diff --git a/repos/spark/sql/hive/src/main/scala/org/apache/spark/sql/hive/TableReader.scala b/repos/spark/sql/hive/src/main/scala/org/apache/spark/sql/hive/TableReader.scala index db1b5981dd9..40a6dc85c72 100644 --- a/repos/spark/sql/hive/src/main/scala/org/apache/spark/sql/hive/TableReader.scala +++ b/repos/spark/sql/hive/src/main/scala/org/apache/spark/sql/hive/TableReader.scala @@ -356,8 +356,9 @@ private[hive] object HadoopTableReader extends HiveInspectors with Logging { jobConf: JobConf) { FileInputFormat.setInputPaths(jobConf, Seq[Path](new Path(path)): _*) if (tableDesc != null) { - HiveTableUtil - .configureJobPropertiesForStorageHandler(tableDesc, jobConf, true) + HiveTableUtil.configureJobPropertiesForStorageHandler(tableDesc, + jobConf, + true) Utilities.copyTableJobPropertiesToConf(tableDesc, jobConf) } val bufferSize = System.getProperty("spark.buffer.size", "65536") diff --git a/repos/spark/sql/hive/src/main/scala/org/apache/spark/sql/hive/execution/HiveTableScan.scala b/repos/spark/sql/hive/src/main/scala/org/apache/spark/sql/hive/execution/HiveTableScan.scala index 82d58011bbf..134a590638e 100644 --- a/repos/spark/sql/hive/src/main/scala/org/apache/spark/sql/hive/execution/HiveTableScan.scala +++ b/repos/spark/sql/hive/src/main/scala/org/apache/spark/sql/hive/execution/HiveTableScan.scala @@ -94,8 +94,9 @@ private[hive] case class HiveTableScan( val neededColumnIDs = attributes.flatMap(relation.columnOrdinals.get).map(o => o: Integer) - HiveShim - .appendReadColumns(hiveConf, neededColumnIDs, attributes.map(_.name)) + HiveShim.appendReadColumns(hiveConf, + neededColumnIDs, + attributes.map(_.name)) val tableDesc = relation.tableDesc val deserializer = tableDesc.getDeserializerClass.newInstance diff --git a/repos/spark/sql/hive/src/main/scala/org/apache/spark/sql/hive/hiveWriterContainers.scala b/repos/spark/sql/hive/src/main/scala/org/apache/spark/sql/hive/hiveWriterContainers.scala index b7a49d655be..e7605cde4aa 100644 --- a/repos/spark/sql/hive/src/main/scala/org/apache/spark/sql/hive/hiveWriterContainers.scala +++ b/repos/spark/sql/hive/src/main/scala/org/apache/spark/sql/hive/hiveWriterContainers.scala @@ -66,8 +66,9 @@ private[hive] class SparkHiveWriterContainer( // Add table properties from storage handler to jobConf, so any custom storage // handler settings can be set to jobConf if (tableDesc != null) { - HiveTableUtil - .configureJobPropertiesForStorageHandler(tableDesc, jobConf, false) + HiveTableUtil.configureJobPropertiesForStorageHandler(tableDesc, + jobConf, + false) Utilities.copyTableJobPropertiesToConf(tableDesc, jobConf) } protected val conf = new SerializableJobConf(jobConf) @@ -104,8 +105,9 @@ private[hive] class SparkHiveWriterContainer( val numberFormat = NumberFormat.getInstance() numberFormat.setMinimumIntegerDigits(5) numberFormat.setGroupingUsed(false) - val extension = Utilities - .getFileExtension(conf.value, fileSinkConf.getCompressed, outputFormat) + val extension = Utilities.getFileExtension(conf.value, + fileSinkConf.getCompressed, + outputFormat) "part-" + numberFormat.format(splitID) + extension } @@ -299,8 +301,9 @@ private[spark] class SparkHiveDynamicPartitionWriterContainer( } // Returns the partition path given a partition key. - val getPartitionString = UnsafeProjection - .create(Concat(partitionStringExpression) :: Nil, partitionOutput) + val getPartitionString = UnsafeProjection.create( + Concat(partitionStringExpression) :: Nil, + partitionOutput) // If anything below fails, we should abort the task. try { diff --git a/repos/spark/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveInspectorSuite.scala b/repos/spark/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveInspectorSuite.scala index 80ee1b1155a..c6f61b099c5 100644 --- a/repos/spark/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveInspectorSuite.scala +++ b/repos/spark/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveInspectorSuite.scala @@ -92,8 +92,9 @@ class HiveInspectorSuite extends SparkFunSuite with HiveInspectors { 0.asInstanceOf[Float]) :: Literal(0.asInstanceOf[Double]) :: Literal( "0") :: Literal(java.sql.Date.valueOf("2014-09-23")) :: Literal(Decimal( BigDecimal(123.123))) :: Literal(new java.sql.Timestamp(123123)) :: Literal( - Array[Byte](1, 2, 3)) :: Literal - .create(Seq[Int](1, 2, 3), ArrayType(IntegerType)) :: Literal.create( + Array[Byte](1, 2, 3)) :: Literal.create( + Seq[Int](1, 2, 3), + ArrayType(IntegerType)) :: Literal.create( Map[Int, Int](1 -> 2, 2 -> 1), MapType(IntegerType, IntegerType)) :: Literal.create( Row(1, 2.0d, 3.0f), diff --git a/repos/spark/sql/hive/src/test/scala/org/apache/spark/sql/hive/MetastoreDataSourcesSuite.scala b/repos/spark/sql/hive/src/test/scala/org/apache/spark/sql/hive/MetastoreDataSourcesSuite.scala index bf4b4a447cd..9380616bac5 100644 --- a/repos/spark/sql/hive/src/test/scala/org/apache/spark/sql/hive/MetastoreDataSourcesSuite.scala +++ b/repos/spark/sql/hive/src/test/scala/org/apache/spark/sql/hive/MetastoreDataSourcesSuite.scala @@ -727,8 +727,8 @@ class MetastoreDataSourcesSuite "spark.sql.sources.schema" -> schema.json, "EXTERNAL" -> "FALSE")) - sessionState.catalog.client - .createTable(hiveTable, ignoreIfExists = false) + sessionState.catalog.client.createTable(hiveTable, + ignoreIfExists = false) invalidateTable(tableName) val actualSchema = table(tableName).schema diff --git a/repos/spark/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/AggregationQuerySuite.scala b/repos/spark/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/AggregationQuerySuite.scala index 9683e3b96d3..59aa51e931f 100644 --- a/repos/spark/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/AggregationQuerySuite.scala +++ b/repos/spark/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/AggregationQuerySuite.scala @@ -185,12 +185,11 @@ abstract class AggregationQuerySuite (Seq[Integer](3), null, null)).toDF("key", "value1", "value2") data3.write.saveAsTable("agg3") - val emptyDF = sqlContext - .createDataFrame(sparkContext.emptyRDD[Row], - StructType( - StructField("key", StringType) :: StructField( - "value", - IntegerType) :: Nil)) + val emptyDF = sqlContext.createDataFrame( + sparkContext.emptyRDD[Row], + StructType(StructField("key", StringType) :: StructField( + "value", + IntegerType) :: Nil)) emptyDF.registerTempTable("emptyTable") // Register UDAFs diff --git a/repos/spark/sql/hive/src/test/scala/org/apache/spark/sql/hive/orc/OrcFilterSuite.scala b/repos/spark/sql/hive/src/test/scala/org/apache/spark/sql/hive/orc/OrcFilterSuite.scala index cf44ef6c2a8..6d073e702cd 100644 --- a/repos/spark/sql/hive/src/test/scala/org/apache/spark/sql/hive/orc/OrcFilterSuite.scala +++ b/repos/spark/sql/hive/src/test/scala/org/apache/spark/sql/hive/orc/OrcFilterSuite.scala @@ -56,8 +56,9 @@ class OrcFilterSuite extends QueryTest with OrcTest { assert(maybeAnalyzedPredicate.isDefined, "No filter is analyzed from the given query") - val (_, selectedFilters) = DataSourceStrategy - .selectFilters(maybeRelation.get, maybeAnalyzedPredicate.toSeq) + val (_, selectedFilters) = DataSourceStrategy.selectFilters( + maybeRelation.get, + maybeAnalyzedPredicate.toSeq) assert(selectedFilters.nonEmpty, "No filter is pushed down") val maybeFilter = OrcFilters.createFilter(selectedFilters.toArray) diff --git a/repos/spark/sql/hive/src/test/scala/org/apache/spark/sql/hive/parquetSuites.scala b/repos/spark/sql/hive/src/test/scala/org/apache/spark/sql/hive/parquetSuites.scala index e08ca10d1f4..290ee72a177 100644 --- a/repos/spark/sql/hive/src/test/scala/org/apache/spark/sql/hive/parquetSuites.scala +++ b/repos/spark/sql/hive/src/test/scala/org/apache/spark/sql/hive/parquetSuites.scala @@ -518,8 +518,9 @@ class ParquetMetastoreSuite extends ParquetPartitioningTest { | OUTPUTFORMAT 'org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat' """.stripMargin) - tableIdentifier = _catalog - .QualifiedTableName("default", "test_parquet_partitioned_cache_test") + tableIdentifier = + _catalog.QualifiedTableName("default", + "test_parquet_partitioned_cache_test") assert( sessionState.catalog.cachedDataSourceTables .getIfPresent(tableIdentifier) === null) diff --git a/repos/spark/sql/hive/src/test/scala/org/apache/spark/sql/sources/hadoopFsRelationSuites.scala b/repos/spark/sql/hive/src/test/scala/org/apache/spark/sql/sources/hadoopFsRelationSuites.scala index 8f9a883c319..85ed3fd3f4e 100644 --- a/repos/spark/sql/hive/src/test/scala/org/apache/spark/sql/sources/hadoopFsRelationSuites.scala +++ b/repos/spark/sql/hive/src/test/scala/org/apache/spark/sql/sources/hadoopFsRelationSuites.scala @@ -571,8 +571,12 @@ abstract class HadoopFsRelationTest val df = (for { i <- 1 to 3 p2 <- Seq("foo", "bar") - } yield (i, s"val_$i", 1.0d, p2, 123, 123.123f)) - .toDF("a", "b", "p1", "p2", "p3", "f") + } yield (i, s"val_$i", 1.0d, p2, 123, 123.123f)).toDF("a", + "b", + "p1", + "p2", + "p3", + "f") val input = df.select('a, 'b, diff --git a/repos/spark/streaming/src/main/scala/org/apache/spark/streaming/Checkpoint.scala b/repos/spark/streaming/src/main/scala/org/apache/spark/streaming/Checkpoint.scala index 7d902acdafa..a52186bafd0 100644 --- a/repos/spark/streaming/src/main/scala/org/apache/spark/streaming/Checkpoint.scala +++ b/repos/spark/streaming/src/main/scala/org/apache/spark/streaming/Checkpoint.scala @@ -274,8 +274,8 @@ private[streaming] class CheckpointWriter( "Checkpoint for time " + checkpointTime + " saved to file '" + checkpointFile + "', took " + bytes.length + " bytes and " + (finishTime - startTime) + " ms") - jobGenerator - .onCheckpointCompletion(checkpointTime, clearCheckpointDataLater) + jobGenerator.onCheckpointCompletion(checkpointTime, + clearCheckpointDataLater) return } catch { case ioe: IOException => diff --git a/repos/spark/streaming/src/main/scala/org/apache/spark/streaming/StreamingContext.scala b/repos/spark/streaming/src/main/scala/org/apache/spark/streaming/StreamingContext.scala index a113fdaa4ac..6fc1ae22c33 100644 --- a/repos/spark/streaming/src/main/scala/org/apache/spark/streaming/StreamingContext.scala +++ b/repos/spark/streaming/src/main/scala/org/apache/spark/streaming/StreamingContext.scala @@ -102,11 +102,13 @@ class StreamingContext private[streaming] ( sparkHome: String = null, jars: Seq[String] = Nil, environment: Map[String, String] = Map()) = { - this( - StreamingContext - .createNewSparkContext(master, appName, sparkHome, jars, environment), - null, - batchDuration) + this(StreamingContext.createNewSparkContext(master, + appName, + sparkHome, + jars, + environment), + null, + batchDuration) } /** @@ -287,8 +289,10 @@ class StreamingContext private[streaming] ( * Note: Return statements are NOT allowed in the given body. */ private[streaming] def withNamedScope[U](name: String)(body: => U): U = { - RDDOperationScope - .withScope(sc, name, allowNesting = false, ignoreParent = false)(body) + RDDOperationScope.withScope(sc, + name, + allowNesting = false, + ignoreParent = false)(body) } /** @@ -865,8 +869,10 @@ object StreamingContext extends Logging { hadoopConf: Configuration = SparkHadoopUtil.get.conf, createOnError: Boolean = false ): StreamingContext = { - val checkpointOption = CheckpointReader - .read(checkpointPath, new SparkConf(), hadoopConf, createOnError) + val checkpointOption = CheckpointReader.read(checkpointPath, + new SparkConf(), + hadoopConf, + createOnError) checkpointOption .map(new StreamingContext(null, _, null)) .getOrElse(creatingFunc()) @@ -919,8 +925,10 @@ private class StreamingContextPythonHelper { */ def tryRecoverFromCheckpoint( checkpointPath: String): Option[StreamingContext] = { - val checkpointOption = CheckpointReader - .read(checkpointPath, new SparkConf(), SparkHadoopUtil.get.conf, false) + val checkpointOption = CheckpointReader.read(checkpointPath, + new SparkConf(), + SparkHadoopUtil.get.conf, + false) checkpointOption.map(new StreamingContext(null, _, null)) } } diff --git a/repos/spark/streaming/src/main/scala/org/apache/spark/streaming/StreamingSource.scala b/repos/spark/streaming/src/main/scala/org/apache/spark/streaming/StreamingSource.scala index ac47f5f78b0..9e4a68efa20 100644 --- a/repos/spark/streaming/src/main/scala/org/apache/spark/streaming/StreamingSource.scala +++ b/repos/spark/streaming/src/main/scala/org/apache/spark/streaming/StreamingSource.scala @@ -43,10 +43,11 @@ private[streaming] class StreamingSource(ssc: StreamingContext) name: String, f: StreamingJobProgressListener => Option[T], defaultValue: T): Unit = { - metricRegistry - .register(MetricRegistry.name("streaming", name), new Gauge[T] { - override def getValue: T = f(streamingListener).getOrElse(defaultValue) - }) + metricRegistry.register(MetricRegistry.name("streaming", name), + new Gauge[T] { + override def getValue: T = + f(streamingListener).getOrElse(defaultValue) + }) } // Gauge for number of network receivers diff --git a/repos/spark/streaming/src/main/scala/org/apache/spark/streaming/api/java/JavaDStreamLike.scala b/repos/spark/streaming/src/main/scala/org/apache/spark/streaming/api/java/JavaDStreamLike.scala index 1681cfe18fa..06cf00ecebe 100644 --- a/repos/spark/streaming/src/main/scala/org/apache/spark/streaming/api/java/JavaDStreamLike.scala +++ b/repos/spark/streaming/src/main/scala/org/apache/spark/streaming/api/java/JavaDStreamLike.scala @@ -271,8 +271,10 @@ trait JavaDStreamLike[ windowDuration: Duration, slideDuration: Duration ): JavaDStream[T] = { - dstream - .reduceByWindow(reduceFunc, invReduceFunc, windowDuration, slideDuration) + dstream.reduceByWindow(reduceFunc, + invReduceFunc, + windowDuration, + slideDuration) } /** diff --git a/repos/spark/streaming/src/main/scala/org/apache/spark/streaming/api/java/JavaPairDStream.scala b/repos/spark/streaming/src/main/scala/org/apache/spark/streaming/api/java/JavaPairDStream.scala index 98533fbaa18..99370ffe23e 100644 --- a/repos/spark/streaming/src/main/scala/org/apache/spark/streaming/api/java/JavaPairDStream.scala +++ b/repos/spark/streaming/src/main/scala/org/apache/spark/streaming/api/java/JavaPairDStream.scala @@ -187,8 +187,10 @@ class JavaPairDStream[K, V](val dstream: DStream[(K, V)])( mergeCombiners: JFunction2[C, C, C], partitioner: Partitioner): JavaPairDStream[K, C] = { implicit val cm: ClassTag[C] = fakeClassTag - dstream - .combineByKey(createCombiner, mergeValue, mergeCombiners, partitioner) + dstream.combineByKey(createCombiner, + mergeValue, + mergeCombiners, + partitioner) } /** @@ -546,8 +548,8 @@ class JavaPairDStream[K, V](val dstream: DStream[(K, V)])( updateFunc: JFunction2[JList[V], Optional[S], Optional[S]], numPartitions: Int): JavaPairDStream[K, S] = { implicit val cm: ClassTag[S] = fakeClassTag - dstream - .updateStateByKey(convertUpdateStateFunction(updateFunc), numPartitions) + dstream.updateStateByKey(convertUpdateStateFunction(updateFunc), + numPartitions) } /** @@ -565,8 +567,8 @@ class JavaPairDStream[K, V](val dstream: DStream[(K, V)])( partitioner: Partitioner ): JavaPairDStream[K, S] = { implicit val cm: ClassTag[S] = fakeClassTag - dstream - .updateStateByKey(convertUpdateStateFunction(updateFunc), partitioner) + dstream.updateStateByKey(convertUpdateStateFunction(updateFunc), + partitioner) } /** diff --git a/repos/spark/streaming/src/main/scala/org/apache/spark/streaming/api/python/PythonDStream.scala b/repos/spark/streaming/src/main/scala/org/apache/spark/streaming/api/python/PythonDStream.scala index 5538c3e2dd8..2fb29235d7c 100644 --- a/repos/spark/streaming/src/main/scala/org/apache/spark/streaming/api/python/PythonDStream.scala +++ b/repos/spark/streaming/src/main/scala/org/apache/spark/streaming/api/python/PythonDStream.scala @@ -332,8 +332,8 @@ private[python] class PythonReducedWindowedDStream( windowDuration >= slideDuration * 5) { // subtract the values from old RDDs - val oldRDDs = parent - .slice(previous.beginTime + parent.slideDuration, current.beginTime) + val oldRDDs = parent.slice(previous.beginTime + parent.slideDuration, + current.beginTime) val subtracted = if (oldRDDs.size > 0) { invReduceFunc(previousRDD, Some(ssc.sc.union(oldRDDs)), validTime) diff --git a/repos/spark/streaming/src/main/scala/org/apache/spark/streaming/dstream/FileInputDStream.scala b/repos/spark/streaming/src/main/scala/org/apache/spark/streaming/dstream/FileInputDStream.scala index 35f4f8c8d7a..007c638fb8a 100644 --- a/repos/spark/streaming/src/main/scala/org/apache/spark/streaming/dstream/FileInputDStream.scala +++ b/repos/spark/streaming/src/main/scala/org/apache/spark/streaming/dstream/FileInputDStream.scala @@ -118,8 +118,8 @@ private[streaming] class FileInputDStream[K, V, F <: NewInputFormat[K, V]]( * selected and processed. */ private val numBatchesToRemember = - FileInputDStream - .calculateNumBatchesToRemember(slideDuration, minRememberDurationS) + FileInputDStream.calculateNumBatchesToRemember(slideDuration, + minRememberDurationS) private val durationToRemember = slideDuration * numBatchesToRemember remember(durationToRemember) diff --git a/repos/spark/streaming/src/main/scala/org/apache/spark/streaming/rdd/MapWithStateRDD.scala b/repos/spark/streaming/src/main/scala/org/apache/spark/streaming/rdd/MapWithStateRDD.scala index 0953c79f4ef..37a28d9134d 100644 --- a/repos/spark/streaming/src/main/scala/org/apache/spark/streaming/rdd/MapWithStateRDD.scala +++ b/repos/spark/streaming/src/main/scala/org/apache/spark/streaming/rdd/MapWithStateRDD.scala @@ -157,10 +157,12 @@ private[streaming] class MapWithStateRDD[K: ClassTag, context: TaskContext): Iterator[MapWithStateRDDRecord[K, S, E]] = { val stateRDDPartition = partition.asInstanceOf[MapWithStateRDDPartition] - val prevStateRDDIterator = prevStateRDD - .iterator(stateRDDPartition.previousSessionRDDPartition, context) - val dataIterator = partitionedDataRDD - .iterator(stateRDDPartition.partitionedDataRDDPartition, context) + val prevStateRDDIterator = prevStateRDD.iterator( + stateRDDPartition.previousSessionRDDPartition, + context) + val dataIterator = partitionedDataRDD.iterator( + stateRDDPartition.partitionedDataRDDPartition, + context) val prevRecord = if (prevStateRDDIterator.hasNext) Some(prevStateRDDIterator.next()) diff --git a/repos/spark/streaming/src/main/scala/org/apache/spark/streaming/receiver/ReceivedBlockHandler.scala b/repos/spark/streaming/src/main/scala/org/apache/spark/streaming/receiver/ReceivedBlockHandler.scala index d98cfb0de92..b0cc66676b0 100644 --- a/repos/spark/streaming/src/main/scala/org/apache/spark/streaming/receiver/ReceivedBlockHandler.scala +++ b/repos/spark/streaming/src/main/scala/org/apache/spark/streaming/receiver/ReceivedBlockHandler.scala @@ -88,8 +88,10 @@ private[streaming] class BlockManagerBasedBlockHandler( tellMaster = true) case IteratorBlock(iterator) => val countIterator = new CountingIterator(iterator) - val putResult = blockManager - .putIterator(blockId, countIterator, storageLevel, tellMaster = true) + val putResult = blockManager.putIterator(blockId, + countIterator, + storageLevel, + tellMaster = true) numRecords = countIterator.count putResult case ByteBufferBlock(byteBuffer) => diff --git a/repos/spark/streaming/src/main/scala/org/apache/spark/streaming/scheduler/JobGenerator.scala b/repos/spark/streaming/src/main/scala/org/apache/spark/streaming/scheduler/JobGenerator.scala index a6cdf39593c..3b2e8c84927 100644 --- a/repos/spark/streaming/src/main/scala/org/apache/spark/streaming/scheduler/JobGenerator.scala +++ b/repos/spark/streaming/src/main/scala/org/apache/spark/streaming/scheduler/JobGenerator.scala @@ -330,8 +330,8 @@ private[streaming] class JobGenerator(jobScheduler: JobScheduler) ssc.checkpointDuration)) { logInfo("Checkpointing graph for time " + time) ssc.graph.updateCheckpointData(time) - checkpointWriter - .write(new Checkpoint(ssc, time), clearCheckpointDataLater) + checkpointWriter.write(new Checkpoint(ssc, time), + clearCheckpointDataLater) } } diff --git a/repos/spark/streaming/src/main/scala/org/apache/spark/streaming/scheduler/JobScheduler.scala b/repos/spark/streaming/src/main/scala/org/apache/spark/streaming/scheduler/JobScheduler.scala index ffa370ef536..db6d7ca6ceb 100644 --- a/repos/spark/streaming/src/main/scala/org/apache/spark/streaming/scheduler/JobScheduler.scala +++ b/repos/spark/streaming/src/main/scala/org/apache/spark/streaming/scheduler/JobScheduler.scala @@ -49,8 +49,9 @@ private[streaming] class JobScheduler(val ssc: StreamingContext) new ConcurrentHashMap[Time, JobSet] private val numConcurrentJobs = ssc.conf.getInt("spark.streaming.concurrentJobs", 1) - private val jobExecutor = ThreadUtils - .newDaemonFixedThreadPool(numConcurrentJobs, "streaming-job-executor") + private val jobExecutor = ThreadUtils.newDaemonFixedThreadPool( + numConcurrentJobs, + "streaming-job-executor") private val jobGenerator = new JobGenerator(this) val clock = jobGenerator.clock val listenerBus = new StreamingListenerBus(ssc.sparkContext.listenerBus) diff --git a/repos/spark/streaming/src/main/scala/org/apache/spark/streaming/scheduler/ReceivedBlockTracker.scala b/repos/spark/streaming/src/main/scala/org/apache/spark/streaming/scheduler/ReceivedBlockTracker.scala index 328ed11d1e2..64db5cf4eb9 100644 --- a/repos/spark/streaming/src/main/scala/org/apache/spark/streaming/scheduler/ReceivedBlockTracker.scala +++ b/repos/spark/streaming/src/main/scala/org/apache/spark/streaming/scheduler/ReceivedBlockTracker.scala @@ -276,8 +276,8 @@ private[streaming] class ReceivedBlockTracker( /** Get the queue of received blocks belonging to a particular stream */ private def getReceivedBlockQueue(streamId: Int): ReceivedBlockQueue = { - streamIdToUnallocatedBlockQueues - .getOrElseUpdate(streamId, new ReceivedBlockQueue) + streamIdToUnallocatedBlockQueues.getOrElseUpdate(streamId, + new ReceivedBlockQueue) } /** Optionally create the write ahead log manager only if the feature is enabled */ diff --git a/repos/spark/streaming/src/main/scala/org/apache/spark/streaming/scheduler/ReceiverTracker.scala b/repos/spark/streaming/src/main/scala/org/apache/spark/streaming/scheduler/ReceiverTracker.scala index a09d29fbad7..34a027978a9 100644 --- a/repos/spark/streaming/src/main/scala/org/apache/spark/streaming/scheduler/ReceiverTracker.scala +++ b/repos/spark/streaming/src/main/scala/org/apache/spark/streaming/scheduler/ReceiverTracker.scala @@ -232,8 +232,8 @@ private[streaming] class ReceiverTracker(ssc: StreamingContext, */ def cleanupOldBlocksAndBatches(cleanupThreshTime: Time) { // Clean up old block and batch metadata - receivedBlockTracker - .cleanupOldBatches(cleanupThreshTime, waitForCompletion = false) + receivedBlockTracker.cleanupOldBatches(cleanupThreshTime, + waitForCompletion = false) // Signal the receivers to delete old block data if (WriteAheadLogUtils.enableReceiverLog(ssc.conf)) { @@ -315,8 +315,8 @@ private[streaming] class ReceiverTracker(ssc: StreamingContext, lastErrorTime = lastErrorTime) val newReceiverTrackingInfo = receiverTrackingInfos.get(streamId) match { case Some(oldInfo) => - oldInfo - .copy(state = ReceiverState.INACTIVE, errorInfo = Some(errorInfo)) + oldInfo.copy(state = ReceiverState.INACTIVE, + errorInfo = Some(errorInfo)) case None => logWarning("No prior receiver info") ReceiverTrackingInfo(streamId, @@ -521,8 +521,9 @@ private[streaming] class ReceiverTracker(ssc: StreamingContext, } else { val oldReceiverInfo = receiverTrackingInfos(receiver.streamId) // Clear "scheduledLocations" to indicate we are going to do local scheduling - val newReceiverInfo = oldReceiverInfo - .copy(state = ReceiverState.INACTIVE, scheduledLocations = None) + val newReceiverInfo = oldReceiverInfo.copy( + state = ReceiverState.INACTIVE, + scheduledLocations = None) receiverTrackingInfos(receiver.streamId) = newReceiverInfo schedulingPolicy.rescheduleReceiver(receiver.streamId, receiver.preferredLocation, diff --git a/repos/spark/streaming/src/main/scala/org/apache/spark/streaming/ui/StreamingPage.scala b/repos/spark/streaming/src/main/scala/org/apache/spark/streaming/ui/StreamingPage.scala index 63745cda0bb..0442e42373d 100644 --- a/repos/spark/streaming/src/main/scala/org/apache/spark/streaming/ui/StreamingPage.scala +++ b/repos/spark/streaming/src/main/scala/org/apache/spark/streaming/ui/StreamingPage.scala @@ -155,8 +155,10 @@ private[ui] class StreamingPage(parent: StreamingTab) resources ++ basicInfo ++ listener.synchronized { generateStatTable() ++ generateBatchListTables() } - SparkUIUtils - .headerSparkPage("Streaming Statistics", content, parent, Some(5000)) + SparkUIUtils.headerSparkPage("Streaming Statistics", + content, + parent, + Some(5000)) } /** diff --git a/repos/spark/streaming/src/test/scala/org/apache/spark/streaming/FailureSuite.scala b/repos/spark/streaming/src/test/scala/org/apache/spark/streaming/FailureSuite.scala index 53c65fc861c..747399864a5 100644 --- a/repos/spark/streaming/src/test/scala/org/apache/spark/streaming/FailureSuite.scala +++ b/repos/spark/streaming/src/test/scala/org/apache/spark/streaming/FailureSuite.scala @@ -52,8 +52,9 @@ class FailureSuite extends SparkFunSuite with BeforeAndAfter with Logging { } test("multiple failures with map") { - MasterFailureTest - .testMap(directory.getAbsolutePath, numBatches, batchDuration) + MasterFailureTest.testMap(directory.getAbsolutePath, + numBatches, + batchDuration) } test("multiple failures with updateStateByKey") { diff --git a/repos/spark/streaming/src/test/scala/org/apache/spark/streaming/ReceivedBlockTrackerSuite.scala b/repos/spark/streaming/src/test/scala/org/apache/spark/streaming/ReceivedBlockTrackerSuite.scala index 9c9620004c2..7f8233d13b9 100644 --- a/repos/spark/streaming/src/test/scala/org/apache/spark/streaming/ReceivedBlockTrackerSuite.scala +++ b/repos/spark/streaming/src/test/scala/org/apache/spark/streaming/ReceivedBlockTrackerSuite.scala @@ -77,8 +77,7 @@ class ReceivedBlockTrackerSuite // Allocate the blocks to a batch and verify that all of them have been allocated receivedBlockTracker.allocateBlocksToBatch(1) - receivedBlockTracker - .getBlocksOfBatchAndStream(1, streamId) shouldEqual blockInfos + receivedBlockTracker.getBlocksOfBatchAndStream(1, streamId) shouldEqual blockInfos receivedBlockTracker.getBlocksOfBatch(1) shouldEqual Map( streamId -> blockInfos) receivedBlockTracker.getUnallocatedBlocks(streamId) shouldBe empty @@ -93,8 +92,7 @@ class ReceivedBlockTrackerSuite // Verify that older batches have no operation on batch allocation, // will return the same blocks as previously allocated. receivedBlockTracker.allocateBlocksToBatch(1) - receivedBlockTracker - .getBlocksOfBatchAndStream(1, streamId) shouldEqual blockInfos + receivedBlockTracker.getBlocksOfBatchAndStream(1, streamId) shouldEqual blockInfos blockInfos.map(receivedBlockTracker.addBlock) receivedBlockTracker.allocateBlocksToBatch(2) @@ -165,8 +163,7 @@ class ReceivedBlockTrackerSuite // Allocate blocks to batch and verify whether the unallocated blocks got allocated val batchTime1 = manualClock.getTimeMillis() tracker2.allocateBlocksToBatch(batchTime1) - tracker2 - .getBlocksOfBatchAndStream(batchTime1, streamId) shouldEqual blockInfos1 + tracker2.getBlocksOfBatchAndStream(batchTime1, streamId) shouldEqual blockInfos1 tracker2.getBlocksOfBatch(batchTime1) shouldEqual Map( streamId -> blockInfos1) @@ -175,8 +172,7 @@ class ReceivedBlockTrackerSuite val batchTime2 = manualClock.getTimeMillis() val blockInfos2 = addBlockInfos(tracker2) tracker2.allocateBlocksToBatch(batchTime2) - tracker2 - .getBlocksOfBatchAndStream(batchTime2, streamId) shouldEqual blockInfos2 + tracker2.getBlocksOfBatchAndStream(batchTime2, streamId) shouldEqual blockInfos2 // Verify whether log has correct contents val expectedWrittenData2 = @@ -190,10 +186,8 @@ class ReceivedBlockTrackerSuite incrementTime() val tracker3 = createTracker(clock = manualClock, recoverFromWriteAheadLog = true) - tracker3 - .getBlocksOfBatchAndStream(batchTime1, streamId) shouldEqual blockInfos1 - tracker3 - .getBlocksOfBatchAndStream(batchTime2, streamId) shouldEqual blockInfos2 + tracker3.getBlocksOfBatchAndStream(batchTime1, streamId) shouldEqual blockInfos1 + tracker3.getBlocksOfBatchAndStream(batchTime2, streamId) shouldEqual blockInfos2 tracker3.getUnallocatedBlocks(streamId) shouldBe empty // Cleanup first batch but not second batch @@ -202,8 +196,7 @@ class ReceivedBlockTrackerSuite tracker3.cleanupOldBatches(batchTime2, waitForCompletion = true) // Verify that the batch allocations have been cleaned, and the act has been written to log - tracker3 - .getBlocksOfBatchAndStream(batchTime1, streamId) shouldEqual Seq.empty + tracker3.getBlocksOfBatchAndStream(batchTime1, streamId) shouldEqual Seq.empty getWrittenLogData(getWriteAheadLogFiles().last) should contain( createBatchCleanup(batchTime1)) @@ -219,10 +212,8 @@ class ReceivedBlockTrackerSuite val tracker4 = createTracker(clock = manualClock, recoverFromWriteAheadLog = true) tracker4.getUnallocatedBlocks(streamId) shouldBe empty - tracker4 - .getBlocksOfBatchAndStream(batchTime1, streamId) shouldBe empty // should be cleaned - tracker4 - .getBlocksOfBatchAndStream(batchTime2, streamId) shouldEqual blockInfos2 + tracker4.getBlocksOfBatchAndStream(batchTime1, streamId) shouldBe empty // should be cleaned + tracker4.getBlocksOfBatchAndStream(batchTime2, streamId) shouldEqual blockInfos2 } test("disable write ahead log when checkpoint directory is not set") { diff --git a/repos/spark/streaming/src/test/scala/org/apache/spark/streaming/ReceiverInputDStreamSuite.scala b/repos/spark/streaming/src/test/scala/org/apache/spark/streaming/ReceiverInputDStreamSuite.scala index ccb941984c9..67256d8cb46 100644 --- a/repos/spark/streaming/src/test/scala/org/apache/spark/streaming/ReceiverInputDStreamSuite.scala +++ b/repos/spark/streaming/src/test/scala/org/apache/spark/streaming/ReceiverInputDStreamSuite.scala @@ -146,8 +146,8 @@ class ReceiverInputDStreamSuite extends TestSuiteBase with BeforeAndAfterAll { body: ReceiverInputDStream[_] => Unit): Unit = { val conf = new SparkConf() conf.setMaster("local[4]").setAppName("ReceiverInputDStreamSuite") - conf - .set(WriteAheadLogUtils.RECEIVER_WAL_ENABLE_CONF_KEY, enableWAL.toString) + conf.set(WriteAheadLogUtils.RECEIVER_WAL_ENABLE_CONF_KEY, + enableWAL.toString) require(WriteAheadLogUtils.enableReceiverLog(conf) === enableWAL) val ssc = new StreamingContext(conf, Seconds(1)) val receiverStream = new ReceiverInputDStream[Int](ssc) { @@ -169,8 +169,10 @@ class ReceiverInputDStreamSuite extends TestSuiteBase with BeforeAndAfterAll { createBlock: Boolean = true): ReceivedBlockInfo = { val blockId = new StreamBlockId(0, Random.nextLong()) if (createBlock) { - SparkEnv.get.blockManager - .putSingle(blockId, 1, StorageLevel.MEMORY_ONLY, tellMaster = true) + SparkEnv.get.blockManager.putSingle(blockId, + 1, + StorageLevel.MEMORY_ONLY, + tellMaster = true) require(SparkEnv.get.blockManager.master.contains(blockId)) } val storeResult = diff --git a/repos/spark/streaming/src/test/scala/org/apache/spark/streaming/rdd/MapWithStateRDDSuite.scala b/repos/spark/streaming/src/test/scala/org/apache/spark/streaming/rdd/MapWithStateRDDSuite.scala index 38feb149f64..9932d4ec67e 100644 --- a/repos/spark/streaming/src/test/scala/org/apache/spark/streaming/rdd/MapWithStateRDDSuite.scala +++ b/repos/spark/streaming/src/test/scala/org/apache/spark/streaming/rdd/MapWithStateRDDSuite.scala @@ -399,8 +399,9 @@ class MapWithStateRDDSuite /** Generate MapWithStateRDD with data RDD having a long lineage */ def makeStateRDDWithLongLineageDataRDD( longLineageRDD: RDD[Int]): MapWithStateRDD[Int, Int, Int, Int] = { - MapWithStateRDD - .createFromPairRDD(longLineageRDD.map { _ -> 1 }, partitioner, Time(0)) + MapWithStateRDD.createFromPairRDD(longLineageRDD.map { _ -> 1 }, + partitioner, + Time(0)) } testRDD(makeStateRDDWithLongLineageDataRDD, diff --git a/repos/spark/streaming/src/test/scala/org/apache/spark/streaming/scheduler/ReceiverTrackerSuite.scala b/repos/spark/streaming/src/test/scala/org/apache/spark/streaming/scheduler/ReceiverTrackerSuite.scala index a56cd0743f2..4040b316546 100644 --- a/repos/spark/streaming/src/test/scala/org/apache/spark/streaming/scheduler/ReceiverTrackerSuite.scala +++ b/repos/spark/streaming/src/test/scala/org/apache/spark/streaming/scheduler/ReceiverTrackerSuite.scala @@ -93,8 +93,8 @@ class ReceiverTrackerSuite extends TestSuiteBase { "SPARK-11063: TaskSetManager should use Receiver RDD's preferredLocations") { // Use ManualClock to prevent from starting batches so that we can make sure the only task is // for starting the Receiver - val _conf = conf.clone - .set("spark.streaming.clock", "org.apache.spark.util.ManualClock") + val _conf = conf.clone.set("spark.streaming.clock", + "org.apache.spark.util.ManualClock") withStreamingContext(new StreamingContext(_conf, Milliseconds(100))) { ssc => @volatile var receiverTaskLocality: TaskLocality = null diff --git a/repos/spark/streaming/src/test/scala/org/apache/spark/streaming/ui/StreamingJobProgressListenerSuite.scala b/repos/spark/streaming/src/test/scala/org/apache/spark/streaming/ui/StreamingJobProgressListenerSuite.scala index d48705acf67..c7e5e4de3b2 100644 --- a/repos/spark/streaming/src/test/scala/org/apache/spark/streaming/ui/StreamingJobProgressListenerSuite.scala +++ b/repos/spark/streaming/src/test/scala/org/apache/spark/streaming/ui/StreamingJobProgressListenerSuite.scala @@ -46,8 +46,8 @@ class StreamingJobProgressListenerSuite extends TestSuiteBase with Matchers { val properties = new Properties() properties.setProperty(JobScheduler.BATCH_TIME_PROPERTY_KEY, batchTime.milliseconds.toString) - properties - .setProperty(JobScheduler.OUTPUT_OP_ID_PROPERTY_KEY, outputOpId.toString) + properties.setProperty(JobScheduler.OUTPUT_OP_ID_PROPERTY_KEY, + outputOpId.toString) SparkListenerJobStart(jobId = jobId, 0L, // unused Nil, // unused diff --git a/repos/spark/yarn/src/main/scala/org/apache/spark/deploy/yarn/AMDelegationTokenRenewer.scala b/repos/spark/yarn/src/main/scala/org/apache/spark/deploy/yarn/AMDelegationTokenRenewer.scala index 6d340dde7f9..be360142e26 100644 --- a/repos/spark/yarn/src/main/scala/org/apache/spark/deploy/yarn/AMDelegationTokenRenewer.scala +++ b/repos/spark/yarn/src/main/scala/org/apache/spark/deploy/yarn/AMDelegationTokenRenewer.scala @@ -95,8 +95,9 @@ private[yarn] class AMDelegationTokenRenewer(sparkConf: SparkConf, runnable.run() } else { logInfo(s"Scheduling login from keytab in $renewalInterval millis.") - delegationTokenRenewer - .schedule(runnable, renewalInterval, TimeUnit.MILLISECONDS) + delegationTokenRenewer.schedule(runnable, + renewalInterval, + TimeUnit.MILLISECONDS) } } @@ -183,8 +184,9 @@ private[yarn] class AMDelegationTokenRenewer(sparkConf: SparkConf, override def run(): Void = { val nns = YarnSparkHadoopUtil.get.getNameNodesToAccess(sparkConf) + dst hadoopUtil.obtainTokensForNamenodes(nns, freshHadoopConf, tempCreds) - hadoopUtil - .obtainTokenForHiveMetastore(sparkConf, freshHadoopConf, tempCreds) + hadoopUtil.obtainTokenForHiveMetastore(sparkConf, + freshHadoopConf, + tempCreds) hadoopUtil.obtainTokenForHBase(sparkConf, freshHadoopConf, tempCreds) null } diff --git a/repos/spark/yarn/src/main/scala/org/apache/spark/deploy/yarn/Client.scala b/repos/spark/yarn/src/main/scala/org/apache/spark/deploy/yarn/Client.scala index 165944e4202..941f2af5239 100644 --- a/repos/spark/yarn/src/main/scala/org/apache/spark/deploy/yarn/Client.scala +++ b/repos/spark/yarn/src/main/scala/org/apache/spark/deploy/yarn/Client.scala @@ -373,16 +373,19 @@ private[spark] class Client(val args: ClientArguments, val fs = FileSystem.get(hadoopConf) val dst = new Path(fs.getHomeDirectory(), appStagingDir) val nns = YarnSparkHadoopUtil.get.getNameNodesToAccess(sparkConf) + dst - YarnSparkHadoopUtil.get - .obtainTokensForNamenodes(nns, hadoopConf, credentials) + YarnSparkHadoopUtil.get.obtainTokensForNamenodes(nns, + hadoopConf, + credentials) // Used to keep track of URIs added to the distributed cache. If the same URI is added // multiple times, YARN will fail to launch containers for the app with an internal // error. val distributedUris = new HashSet[String] - YarnSparkHadoopUtil.get - .obtainTokenForHiveMetastore(sparkConf, hadoopConf, credentials) - YarnSparkHadoopUtil.get - .obtainTokenForHBase(sparkConf, hadoopConf, credentials) + YarnSparkHadoopUtil.get.obtainTokenForHiveMetastore(sparkConf, + hadoopConf, + credentials) + YarnSparkHadoopUtil.get.obtainTokenForHBase(sparkConf, + hadoopConf, + credentials) val replication = sparkConf .get(STAGING_FILE_REPLICATION) @@ -1287,8 +1290,9 @@ object Client extends Logging { val classPathElementsToAdd = getYarnAppClasspath(conf) ++ getMRAppClasspath(conf) for (c <- classPathElementsToAdd.flatten) { - YarnSparkHadoopUtil - .addPathToEnvironment(env, Environment.CLASSPATH.name, c.trim) + YarnSparkHadoopUtil.addPathToEnvironment(env, + Environment.CLASSPATH.name, + c.trim) } } @@ -1488,8 +1492,9 @@ object Client extends Logging { */ private def addClasspathEntry(path: String, env: HashMap[String, String]): Unit = - YarnSparkHadoopUtil - .addPathToEnvironment(env, Environment.CLASSPATH.name, path) + YarnSparkHadoopUtil.addPathToEnvironment(env, + Environment.CLASSPATH.name, + path) /** * Returns the path to be sent to the NM for a path that is valid on the gateway. diff --git a/repos/spark/yarn/src/main/scala/org/apache/spark/deploy/yarn/ClientArguments.scala b/repos/spark/yarn/src/main/scala/org/apache/spark/deploy/yarn/ClientArguments.scala index 1df686ac891..8bdae378a0d 100644 --- a/repos/spark/yarn/src/main/scala/org/apache/spark/deploy/yarn/ClientArguments.scala +++ b/repos/spark/yarn/src/main/scala/org/apache/spark/deploy/yarn/ClientArguments.scala @@ -91,8 +91,9 @@ private[spark] class ClientArguments(args: Array[String], sparkConf: SparkConf) .orNull // If dynamic allocation is enabled, start at the configured initial number of executors. // Default to minExecutors if no initialExecutors is set. - numExecutors = YarnSparkHadoopUtil - .getInitialTargetExecutorNumber(sparkConf, numExecutors) + numExecutors = YarnSparkHadoopUtil.getInitialTargetExecutorNumber( + sparkConf, + numExecutors) principal = Option(principal).orElse(sparkConf.get(PRINCIPAL)).orNull keytab = Option(keytab).orElse(sparkConf.get(KEYTAB)).orNull } diff --git a/repos/spark/yarn/src/main/scala/org/apache/spark/deploy/yarn/ExecutorDelegationTokenUpdater.scala b/repos/spark/yarn/src/main/scala/org/apache/spark/deploy/yarn/ExecutorDelegationTokenUpdater.scala index b3be0a7eb35..b3fec773bc7 100644 --- a/repos/spark/yarn/src/main/scala/org/apache/spark/deploy/yarn/ExecutorDelegationTokenUpdater.scala +++ b/repos/spark/yarn/src/main/scala/org/apache/spark/deploy/yarn/ExecutorDelegationTokenUpdater.scala @@ -78,8 +78,9 @@ private[spark] class ExecutorDelegationTokenUpdater(sparkConf: SparkConf, logInfo( "Updated delegation tokens were expected, but the driver has not updated the " + "tokens yet, will check again in an hour.") - delegationTokenRenewer - .schedule(executorUpdaterRunnable, 1, TimeUnit.HOURS) + delegationTokenRenewer.schedule(executorUpdaterRunnable, + 1, + TimeUnit.HOURS) return } } @@ -91,8 +92,9 @@ private[spark] class ExecutorDelegationTokenUpdater(sparkConf: SparkConf, // We just checked for new credentials but none were there, wait a minute and retry. // This handles the shutdown case where the staging directory may have been removed(see // SPARK-12316 for more details). - delegationTokenRenewer - .schedule(executorUpdaterRunnable, 1, TimeUnit.MINUTES) + delegationTokenRenewer.schedule(executorUpdaterRunnable, + 1, + TimeUnit.MINUTES) } else { logInfo( s"Scheduling token refresh from HDFS in $timeFromNowToRenewal millis.") @@ -107,8 +109,9 @@ private[spark] class ExecutorDelegationTokenUpdater(sparkConf: SparkConf, logWarning( "Error while trying to update credentials, will try again in 1 hour", e) - delegationTokenRenewer - .schedule(executorUpdaterRunnable, 1, TimeUnit.HOURS) + delegationTokenRenewer.schedule(executorUpdaterRunnable, + 1, + TimeUnit.HOURS) } } diff --git a/repos/spark/yarn/src/main/scala/org/apache/spark/deploy/yarn/YarnAllocator.scala b/repos/spark/yarn/src/main/scala/org/apache/spark/deploy/yarn/YarnAllocator.scala index 62c14b95675..c410b489672 100644 --- a/repos/spark/yarn/src/main/scala/org/apache/spark/deploy/yarn/YarnAllocator.scala +++ b/repos/spark/yarn/src/main/scala/org/apache/spark/deploy/yarn/YarnAllocator.scala @@ -501,8 +501,9 @@ private[yarn] class YarnAllocator(driverUrl: String, executorIdToContainer(executorId) = container containerIdToExecutorId(container.getId) = executorId - val containerSet = allocatedHostToContainersMap - .getOrElseUpdate(executorHostname, new HashSet[ContainerId]) + val containerSet = allocatedHostToContainersMap.getOrElseUpdate( + executorHostname, + new HashSet[ContainerId]) containerSet += containerId allocatedContainerToHostMap.put(containerId, executorHostname) diff --git a/repos/spark/yarn/src/main/scala/org/apache/spark/deploy/yarn/YarnRMClient.scala b/repos/spark/yarn/src/main/scala/org/apache/spark/deploy/yarn/YarnRMClient.scala index c172cdf93be..46e4c171cc6 100644 --- a/repos/spark/yarn/src/main/scala/org/apache/spark/deploy/yarn/YarnRMClient.scala +++ b/repos/spark/yarn/src/main/scala/org/apache/spark/deploy/yarn/YarnRMClient.scala @@ -93,8 +93,9 @@ private[spark] class YarnRMClient(args: ApplicationMasterArguments) diagnostics: String = ""): Unit = synchronized { if (registered) { - amClient - .unregisterApplicationMaster(status, diagnostics, uiHistoryAddress) + amClient.unregisterApplicationMaster(status, + diagnostics, + uiHistoryAddress) } } diff --git a/repos/spark/yarn/src/main/scala/org/apache/spark/deploy/yarn/YarnSparkHadoopUtil.scala b/repos/spark/yarn/src/main/scala/org/apache/spark/deploy/yarn/YarnSparkHadoopUtil.scala index fbdcb34ddbe..59da6c0f384 100644 --- a/repos/spark/yarn/src/main/scala/org/apache/spark/deploy/yarn/YarnSparkHadoopUtil.scala +++ b/repos/spark/yarn/src/main/scala/org/apache/spark/deploy/yarn/YarnSparkHadoopUtil.scala @@ -227,8 +227,8 @@ class YarnSparkHadoopUtil extends SparkHadoopUtil { mirror.classLoader.loadClass("org.apache.hadoop.hive.conf.HiveConf") // using the (Configuration, Class) constructor allows the current configuration to be included // in the hive config. - val ctor = hiveConfClass - .getDeclaredConstructor(classOf[Configuration], classOf[Object].getClass) + val ctor = hiveConfClass.getDeclaredConstructor(classOf[Configuration], + classOf[Object].getClass) val hiveConf = ctor.newInstance(conf, hiveConfClass).asInstanceOf[Configuration] val metastoreUri = hiveConf.getTrimmed("hive.metastore.uris", "") @@ -247,8 +247,9 @@ class YarnSparkHadoopUtil extends SparkHadoopUtil { val closeCurrent = hiveClass.getMethod("closeCurrent") try { // get all the instance methods before invoking any - val getDelegationToken = hiveClass - .getMethod("getDelegationToken", classOf[String], classOf[String]) + val getDelegationToken = hiveClass.getMethod("getDelegationToken", + classOf[String], + classOf[String]) val getHive = hiveClass.getMethod("get", hiveConfClass) doAsRealUser { diff --git a/repos/spark/yarn/src/main/scala/org/apache/spark/scheduler/cluster/YarnSchedulerBackend.scala b/repos/spark/yarn/src/main/scala/org/apache/spark/scheduler/cluster/YarnSchedulerBackend.scala index b9c6c0085ca..8dff30bcce0 100644 --- a/repos/spark/yarn/src/main/scala/org/apache/spark/scheduler/cluster/YarnSchedulerBackend.scala +++ b/repos/spark/yarn/src/main/scala/org/apache/spark/scheduler/cluster/YarnSchedulerBackend.scala @@ -47,8 +47,9 @@ private[spark] abstract class YarnSchedulerBackend( private val yarnSchedulerEndpoint = new YarnSchedulerEndpoint(rpcEnv) - private val yarnSchedulerEndpointRef = rpcEnv - .setupEndpoint(YarnSchedulerBackend.ENDPOINT_NAME, yarnSchedulerEndpoint) + private val yarnSchedulerEndpointRef = rpcEnv.setupEndpoint( + YarnSchedulerBackend.ENDPOINT_NAME, + yarnSchedulerEndpoint) private implicit val askTimeout = RpcUtils.askRpcTimeout(sc.conf) diff --git a/repos/spark/yarn/src/test/scala/org/apache/spark/deploy/yarn/BaseYarnClusterSuite.scala b/repos/spark/yarn/src/test/scala/org/apache/spark/deploy/yarn/BaseYarnClusterSuite.scala index af6b63245f7..3d97cb0f09f 100644 --- a/repos/spark/yarn/src/test/scala/org/apache/spark/deploy/yarn/BaseYarnClusterSuite.scala +++ b/repos/spark/yarn/src/test/scala/org/apache/spark/deploy/yarn/BaseYarnClusterSuite.scala @@ -226,8 +226,8 @@ abstract class BaseYarnClusterSuite props.put("spark.executor.extraClassPath", testClasspath) // SPARK-4267: make sure java options are propagated correctly. - props - .setProperty("spark.driver.extraJavaOptions", "-Dfoo=\"one two three\"") + props.setProperty("spark.driver.extraJavaOptions", + "-Dfoo=\"one two three\"") props.setProperty("spark.executor.extraJavaOptions", "-Dfoo=\"one two three\"") diff --git a/repos/spark/yarn/src/test/scala/org/apache/spark/deploy/yarn/YarnAllocatorSuite.scala b/repos/spark/yarn/src/test/scala/org/apache/spark/deploy/yarn/YarnAllocatorSuite.scala index 9f3684abea1..e552898196d 100644 --- a/repos/spark/yarn/src/test/scala/org/apache/spark/deploy/yarn/YarnAllocatorSuite.scala +++ b/repos/spark/yarn/src/test/scala/org/apache/spark/deploy/yarn/YarnAllocatorSuite.scala @@ -265,8 +265,10 @@ class YarnAllocatorSuite } val statuses = Seq(container1, container2).map { c => - ContainerStatus - .newInstance(c.getId(), ContainerState.COMPLETE, "Finished", 0) + ContainerStatus.newInstance(c.getId(), + ContainerState.COMPLETE, + "Finished", + 0) } handler.updateResourceRequests() handler.processCompletedContainers(statuses.toSeq) @@ -287,8 +289,10 @@ class YarnAllocatorSuite handler.requestTotalExecutorsWithPreferredLocalities(2, 0, Map()) val statuses = Seq(container1, container2).map { c => - ContainerStatus - .newInstance(c.getId(), ContainerState.COMPLETE, "Failed", -1) + ContainerStatus.newInstance(c.getId(), + ContainerState.COMPLETE, + "Failed", + -1) } handler.updateResourceRequests() handler.processCompletedContainers(statuses.toSeq) diff --git a/repos/spark/yarn/src/test/scala/org/apache/spark/deploy/yarn/YarnClusterSuite.scala b/repos/spark/yarn/src/test/scala/org/apache/spark/deploy/yarn/YarnClusterSuite.scala index 97dfc963f90..71b2e48e05b 100644 --- a/repos/spark/yarn/src/test/scala/org/apache/spark/deploy/yarn/YarnClusterSuite.scala +++ b/repos/spark/yarn/src/test/scala/org/apache/spark/deploy/yarn/YarnClusterSuite.scala @@ -197,8 +197,9 @@ class YarnClusterSuite extends BaseYarnClusterSuite { // Create a jar file that contains a different version of "test.resource". val originalJar = TestUtils.createJarWithFiles(Map("test.resource" -> "ORIGINAL"), tempDir) - val userJar = TestUtils - .createJarWithFiles(Map("test.resource" -> "OVERRIDDEN"), tempDir) + val userJar = TestUtils.createJarWithFiles( + Map("test.resource" -> "OVERRIDDEN"), + tempDir) val driverResult = File.createTempFile("driver", null, tempDir) val executorResult = File.createTempFile("executor", null, tempDir) val finalState = runSpark( diff --git a/repos/spark/yarn/src/test/scala/org/apache/spark/launcher/TestClasspathBuilder.scala b/repos/spark/yarn/src/test/scala/org/apache/spark/launcher/TestClasspathBuilder.scala index 16a331580a3..2c2298bc549 100644 --- a/repos/spark/yarn/src/test/scala/org/apache/spark/launcher/TestClasspathBuilder.scala +++ b/repos/spark/yarn/src/test/scala/org/apache/spark/launcher/TestClasspathBuilder.scala @@ -25,8 +25,8 @@ import java.util.{List => JList, Map => JMap} */ private[spark] class TestClasspathBuilder extends AbstractCommandBuilder { - childEnv - .put(CommandBuilderUtils.ENV_SPARK_HOME, sys.props("spark.test.home")) + childEnv.put(CommandBuilderUtils.ENV_SPARK_HOME, + sys.props("spark.test.home")) override def buildClassPath(extraCp: String): JList[String] = super.buildClassPath(extraCp) diff --git a/repos/spark/yarn/src/test/scala/org/apache/spark/network/yarn/YarnShuffleServiceSuite.scala b/repos/spark/yarn/src/test/scala/org/apache/spark/network/yarn/YarnShuffleServiceSuite.scala index 5a17741064f..965d10f87f7 100644 --- a/repos/spark/yarn/src/test/scala/org/apache/spark/network/yarn/YarnShuffleServiceSuite.scala +++ b/repos/spark/yarn/src/test/scala/org/apache/spark/network/yarn/YarnShuffleServiceSuite.scala @@ -105,11 +105,9 @@ class YarnShuffleServiceSuite blockResolver.registerExecutor(app1Id.toString, "exec-1", shuffleInfo1) blockResolver.registerExecutor(app2Id.toString, "exec-2", shuffleInfo2) - ShuffleTestAccessor - .getExecutorInfo(app1Id, "exec-1", blockResolver) should be( + ShuffleTestAccessor.getExecutorInfo(app1Id, "exec-1", blockResolver) should be( Some(shuffleInfo1)) - ShuffleTestAccessor - .getExecutorInfo(app2Id, "exec-2", blockResolver) should be( + ShuffleTestAccessor.getExecutorInfo(app2Id, "exec-2", blockResolver) should be( Some(shuffleInfo2)) if (!execStateFile.exists()) { diff --git a/repos/spire/core/shared/src/main/scala/spire/math/poly/Term.scala b/repos/spire/core/shared/src/main/scala/spire/math/poly/Term.scala index 6defe6e61e8..dca24928b11 100644 --- a/repos/spire/core/shared/src/main/scala/spire/math/poly/Term.scala +++ b/repos/spire/core/shared/src/main/scala/spire/math/poly/Term.scala @@ -107,8 +107,8 @@ object Term { "[\\u2070\\u2071\\u2072\\u2073\\u2074\\u2075\\u2076\\u2077\\u2078\\u2079\\u207B\\u00B9\\u00B2\\u00B3]+") private[spire] def removeSuperscript(text: String): String = - superscriptRegex - .replaceAllIn(text, "^" + _.group(0).map(removeSuperscript)) + superscriptRegex.replaceAllIn(text, + "^" + _.group(0).map(removeSuperscript)) private val superscript: (Char => Char) = Map(digitToSuperscript: _*) diff --git a/repos/spire/examples/src/main/scala/spire/example/randomforest.scala b/repos/spire/examples/src/main/scala/spire/example/randomforest.scala index d9415b284be..2040de6b096 100644 --- a/repos/spire/examples/src/main/scala/spire/example/randomforest.scala +++ b/repos/spire/examples/src/main/scala/spire/example/randomforest.scala @@ -355,8 +355,8 @@ class RandomForestClassification[V, @sp(Double) F, K]( } protected def defaultOptions(size: Int): FixedOptions = { - val axes = math - .max(math.sqrt(V.dimensions.toDouble).toInt, math.min(V.dimensions, 2)) + val axes = math.max(math.sqrt(V.dimensions.toDouble).toInt, + math.min(V.dimensions, 2)) val sampleSize = math.max(size * 2 / 3, 1) FixedOptions(axes, sampleSize, size, 5, true) } diff --git a/repos/spire/tests/src/test/scala/spire/SyntaxTest.scala b/repos/spire/tests/src/test/scala/spire/SyntaxTest.scala index c0808dff8c9..3786b38ac17 100644 --- a/repos/spire/tests/src/test/scala/spire/SyntaxTest.scala +++ b/repos/spire/tests/src/test/scala/spire/SyntaxTest.scala @@ -401,8 +401,7 @@ trait BaseSyntaxTest { //((0.5 *: v) == V.timesl(A.fromDouble(0.5), v)) && //((v :* 0.5) == V.timesr(v, A.fromDouble(0.5))) && //((v :/ 2) == V.divr(v, A.fromInt(2))) && - ((v dot w) == V.dot(v, w)) && ((v ⋅ w) == V - .dot(v, w)) && (v._x == V._x(v)) && + ((v dot w) == V.dot(v, w)) && ((v ⋅ w) == V.dot(v, w)) && (v._x == V._x(v)) && (v._y == V._y(v)) && (v._z == V._z(v)) && (v.coord(0) == V.coord(v, 0)) && (v.coord(1) == V.coord(v, 1)) } diff --git a/repos/spire/tests/src/test/scala/spire/math/extras/interval/IntervalSeqArbitrary.scala b/repos/spire/tests/src/test/scala/spire/math/extras/interval/IntervalSeqArbitrary.scala index 15e09d0489f..9af11d4a252 100644 --- a/repos/spire/tests/src/test/scala/spire/math/extras/interval/IntervalSeqArbitrary.scala +++ b/repos/spire/tests/src/test/scala/spire/math/extras/interval/IntervalSeqArbitrary.scala @@ -26,8 +26,8 @@ object IntervalSeqArbitrary { count: Int): Gen[IntervalSeq[Int]] = { for { initial <- Gen.oneOf(true, false) - edges <- Gen - .resize(count, Gen.containerOf[Array, Int](Gen.choose(min, max))) + edges <- Gen.resize(count, + Gen.containerOf[Array, Int](Gen.choose(min, max))) support = edges.sorted.distinct kind <- Gen.containerOfN[Array, Int](support.length, Gen.oneOf(0, 1, 2)) } yield makeProfileXor(initial, support, kind) diff --git a/repos/spire/tests/src/test/scala/spire/math/extras/interval/IntervalTrieArbitrary.scala b/repos/spire/tests/src/test/scala/spire/math/extras/interval/IntervalTrieArbitrary.scala index 24017b4e79f..e17edbe02a4 100644 --- a/repos/spire/tests/src/test/scala/spire/math/extras/interval/IntervalTrieArbitrary.scala +++ b/repos/spire/tests/src/test/scala/spire/math/extras/interval/IntervalTrieArbitrary.scala @@ -20,8 +20,8 @@ object IntervalTrieArbitrary { count: Int): Gen[IntervalTrie[Long]] = { for { initial <- Gen.oneOf(true, false) - edges <- Gen - .resize(count, Gen.containerOf[Array, Long](Gen.choose(min, max))) + edges <- Gen.resize(count, + Gen.containerOf[Array, Long](Gen.choose(min, max))) support = edges.sorted.distinct kind <- Gen.containerOfN[Array, Int](support.length, Gen.oneOf(0, 1, 2)) } yield makeProfileXor(initial, support, kind) diff --git a/repos/summingbird/summingbird-batch/src/test/scala/com/twitter/summingbird/batch/BatchLaws.scala b/repos/summingbird/summingbird-batch/src/test/scala/com/twitter/summingbird/batch/BatchLaws.scala index 5bc1e8f7da0..1480ab677e5 100644 --- a/repos/summingbird/summingbird-batch/src/test/scala/com/twitter/summingbird/batch/BatchLaws.scala +++ b/repos/summingbird/summingbird-batch/src/test/scala/com/twitter/summingbird/batch/BatchLaws.scala @@ -35,8 +35,8 @@ object BatchLaws extends Properties("BatchID") { } property("BatchID should respect ordering") = forAll { (a: Long, b: Long) => - a.compare(b) == implicitly[Ordering[BatchID]] - .compare(BatchID(a), BatchID(b)) + a.compare(b) == implicitly[Ordering[BatchID]].compare(BatchID(a), + BatchID(b)) } property("BatchID should respect addition and subtraction") = forAll { diff --git a/repos/summingbird/summingbird-core/src/test/scala/com/twitter/summingbird/graph/LiteralTests.scala b/repos/summingbird/summingbird-core/src/test/scala/com/twitter/summingbird/graph/LiteralTests.scala index 385e6039003..647af932b4a 100644 --- a/repos/summingbird/summingbird-core/src/test/scala/com/twitter/summingbird/graph/LiteralTests.scala +++ b/repos/summingbird/summingbird-core/src/test/scala/com/twitter/summingbird/graph/LiteralTests.scala @@ -52,8 +52,8 @@ object LiteralTests extends Properties("Literal") { Box[Int]) => Box[Int] left <- genLiteral // We have to make dags, so select from the closure of left sometimes - right <- Gen - .oneOf(genLiteral, genChooseFrom(transitiveClosure[Box](left))) + right <- Gen.oneOf(genLiteral, + genChooseFrom(transitiveClosure[Box](left))) } yield BinaryLit(left, right, bfn) def genChooseFrom[N[_]](s: Set[Literal[_, N]]): Gen[Literal[Int, N]] = diff --git a/repos/summingbird/summingbird-example/src/main/scala/com/twitter/summingbird/example/Serialization.scala b/repos/summingbird/summingbird-example/src/main/scala/com/twitter/summingbird/example/Serialization.scala index f6474356f47..584997a5a78 100644 --- a/repos/summingbird/summingbird-example/src/main/scala/com/twitter/summingbird/example/Serialization.scala +++ b/repos/summingbird/summingbird-example/src/main/scala/com/twitter/summingbird/example/Serialization.scala @@ -39,8 +39,9 @@ object Serialization { * This Injection converts the twitter4j.Status objects that Storm * and Scalding will process into Strings. */ - implicit val statusCodec: Injection[Status, String] = Injection - .buildCatchInvert[Status, String](DataObjectFactory.getRawJSON(_))( + implicit val statusCodec: Injection[Status, String] = + Injection.buildCatchInvert[Status, String]( + DataObjectFactory.getRawJSON(_))( json => DataObjectFactory.createStatus(json) ) diff --git a/repos/summingbird/summingbird-online/src/main/scala/com/twitter/summingbird/online/executor/AsyncBase.scala b/repos/summingbird/summingbird-online/src/main/scala/com/twitter/summingbird/online/executor/AsyncBase.scala index ade3be37b2f..bbd57388f8f 100644 --- a/repos/summingbird/summingbird-online/src/main/scala/com/twitter/summingbird/online/executor/AsyncBase.scala +++ b/repos/summingbird/summingbird-online/src/main/scala/com/twitter/summingbird/online/executor/AsyncBase.scala @@ -118,8 +118,8 @@ abstract class AsyncBase[I, O, S, D, RC](maxWaitingFutures: MaxWaitingFutures, Await.ready(Future.collect(toForce), maxWaitingTime.get) } catch { case te: TimeoutException => - logger - .error("forceExtra failed on %d Futures".format(toForce.size), te) + logger.error("forceExtra failed on %d Futures".format(toForce.size), + te) } } } diff --git a/repos/summingbird/summingbird-online/src/main/scala/com/twitter/summingbird/online/executor/InputState.scala b/repos/summingbird/summingbird-online/src/main/scala/com/twitter/summingbird/online/executor/InputState.scala index adc32def872..ab05a002596 100644 --- a/repos/summingbird/summingbird-online/src/main/scala/com/twitter/summingbird/online/executor/InputState.scala +++ b/repos/summingbird/summingbird-online/src/main/scala/com/twitter/summingbird/online/executor/InputState.scala @@ -112,7 +112,8 @@ case class InputState[T](state: T) { override def toString: String = { val curState = stateTracking.get - "Input State Wrapper(count: %d, failed: %s)" - .format(curState.counter, curState.failed.toString) + "Input State Wrapper(count: %d, failed: %s)".format( + curState.counter, + curState.failed.toString) } } diff --git a/repos/summingbird/summingbird-scalding-test/src/test/scala/com/twitter/summingbird/scalding/BatchedStoreProperties.scala b/repos/summingbird/summingbird-scalding-test/src/test/scala/com/twitter/summingbird/scalding/BatchedStoreProperties.scala index 62d6953b690..6daddf95a06 100644 --- a/repos/summingbird/summingbird-scalding-test/src/test/scala/com/twitter/summingbird/scalding/BatchedStoreProperties.scala +++ b/repos/summingbird/summingbird-scalding-test/src/test/scala/com/twitter/summingbird/scalding/BatchedStoreProperties.scala @@ -176,8 +176,8 @@ object BatchedStoreProperties extends Properties("BatchedStore's Properties") { _)) = mergeResult val requestedEndingTimestamp: Timestamp = interval.upper.upper val readIntervalEndingTimestamp: Timestamp = readIntervalUpper - implicitly[Ordering[Timestamp]] - .lteq(readIntervalEndingTimestamp, requestedEndingTimestamp) + implicitly[Ordering[Timestamp]].lteq(readIntervalEndingTimestamp, + requestedEndingTimestamp) } } } diff --git a/repos/summingbird/summingbird-scalding/src/main/scala/com/twitter/summingbird/scalding/Service.scala b/repos/summingbird/summingbird-scalding/src/main/scala/com/twitter/summingbird/scalding/Service.scala index 097cd3d3c9a..7f6b2b83d9a 100644 --- a/repos/summingbird/summingbird-scalding/src/main/scala/com/twitter/summingbird/scalding/Service.scala +++ b/repos/summingbird/summingbird-scalding/src/main/scala/com/twitter/summingbird/scalding/Service.scala @@ -146,8 +146,9 @@ private[scalding] object InternalService { (flowMode: (FlowDef, Mode)) => val left = input(flowMode) val right = toJoin(flowMode) - LookupJoin - .rightSumming(left, right, reducers)(implicitly, implicitly, sg) + LookupJoin.rightSumming(left, right, reducers)(implicitly, + implicitly, + sg) } /** diff --git a/repos/summingbird/summingbird-scalding/src/main/scala/com/twitter/summingbird/scalding/store/InitialBatchedStore.scala b/repos/summingbird/summingbird-scalding/src/main/scala/com/twitter/summingbird/scalding/store/InitialBatchedStore.scala index 915d1e461d0..fc40de8a4cd 100644 --- a/repos/summingbird/summingbird-scalding/src/main/scala/com/twitter/summingbird/scalding/store/InitialBatchedStore.scala +++ b/repos/summingbird/summingbird-scalding/src/main/scala/com/twitter/summingbird/scalding/store/InitialBatchedStore.scala @@ -56,6 +56,7 @@ class InitialBatchedStore[K, V](val firstNonZero: BatchID, } override def toString = - "InitialBatchedStore(firstNonZero=%s, proxyingFor=%s)" - .format(firstNonZero.toString, proxy.toString) + "InitialBatchedStore(firstNonZero=%s, proxyingFor=%s)".format( + firstNonZero.toString, + proxy.toString) } diff --git a/repos/summingbird/summingbird-storm/src/main/scala/com/twitter/summingbird/storm/BaseBolt.scala b/repos/summingbird/summingbird-storm/src/main/scala/com/twitter/summingbird/storm/BaseBolt.scala index 05577e084cd..c73322b9e5c 100644 --- a/repos/summingbird/summingbird-storm/src/main/scala/com/twitter/summingbird/storm/BaseBolt.scala +++ b/repos/summingbird/summingbird-storm/src/main/scala/com/twitter/summingbird/storm/BaseBolt.scala @@ -209,8 +209,8 @@ case class BaseBolt[I, O](jobID: JobId, executor.init(context) StormStatProvider.registerMetrics(jobID, context, countersForBolt) SummingbirdRuntimeStats.addPlatformStatProvider(StormStatProvider) - logger - .debug("In Bolt prepare: added jobID stat provider for jobID {}", jobID) + logger.debug("In Bolt prepare: added jobID stat provider for jobID {}", + jobID) } override def declareOutputFields(declarer: OutputFieldsDeclarer) { diff --git a/repos/summingbird/summingbird-storm/src/main/scala/com/twitter/summingbird/storm/StormStatProvider.scala b/repos/summingbird/summingbird-storm/src/main/scala/com/twitter/summingbird/storm/StormStatProvider.scala index f8f4908369a..37d1c72bdb6 100644 --- a/repos/summingbird/summingbird-storm/src/main/scala/com/twitter/summingbird/storm/StormStatProvider.scala +++ b/repos/summingbird/summingbird-storm/src/main/scala/com/twitter/summingbird/storm/StormStatProvider.scala @@ -35,8 +35,8 @@ private[summingbird] object StormStatProvider extends PlatformStatProvider { context: TopologyContext, metrics: Seq[(Group, Name)]) { - metricsForJob - .putIfAbsent(jobID, new ConcurrentHashMap[String, CountMetric]) + metricsForJob.putIfAbsent(jobID, + new ConcurrentHashMap[String, CountMetric]) val jobMap = metricsForJob.get(jobID) metrics.foreach { diff --git a/repos/util/util-core/src/main/scala/com/twitter/util/Disposable.scala b/repos/util/util-core/src/main/scala/com/twitter/util/Disposable.scala index edc145d76e2..67409e542c9 100644 --- a/repos/util/util-core/src/main/scala/com/twitter/util/Disposable.scala +++ b/repos/util/util-core/src/main/scala/com/twitter/util/Disposable.scala @@ -139,6 +139,7 @@ object Managed { class DoubleTrouble(cause1: Throwable, cause2: Throwable) extends Exception { override def getStackTrace = cause1.getStackTrace override def getMessage = - "Double failure while disposing composite resource: %s \n %s" - .format(cause1.getMessage, cause2.getMessage) + "Double failure while disposing composite resource: %s \n %s".format( + cause1.getMessage, + cause2.getMessage) } diff --git a/repos/util/util-core/src/main/scala/com/twitter/util/Timer.scala b/repos/util/util-core/src/main/scala/com/twitter/util/Timer.scala index 15eac91649f..a9e1b4d5ccf 100644 --- a/repos/util/util-core/src/main/scala/com/twitter/util/Timer.scala +++ b/repos/util/util-core/src/main/scala/com/twitter/util/Timer.scala @@ -292,8 +292,9 @@ class ScheduledThreadPoolTimer( protected def scheduleOnce(when: Time)(f: => Unit): TimerTask = { val runnable = toRunnable(f) - val javaFuture = underlying - .schedule(runnable, when.sinceNow.inMillis, TimeUnit.MILLISECONDS) + val javaFuture = underlying.schedule(runnable, + when.sinceNow.inMillis, + TimeUnit.MILLISECONDS) new TimerTask { def cancel(): Unit = { javaFuture.cancel(true) diff --git a/repos/util/util-jvm/src/main/scala/com/twitter/jvm/Estimator.scala b/repos/util/util-jvm/src/main/scala/com/twitter/jvm/Estimator.scala index b209d0d27b9..ac9d0c8b28a 100644 --- a/repos/util/util-jvm/src/main/scala/com/twitter/jvm/Estimator.scala +++ b/repos/util/util-jvm/src/main/scala/com/twitter/jvm/Estimator.scala @@ -67,8 +67,10 @@ class Kalman(N: Int) { } override def toString = - "Kalman" - .format(estimate, weight, mvar, evar) + "Kalman".format(estimate, + weight, + mvar, + evar) } /** diff --git a/repos/util/util-jvm/src/main/scala/com/twitter/jvm/Jvm.scala b/repos/util/util-jvm/src/main/scala/com/twitter/jvm/Jvm.scala index 0ec89b9588f..b04ce4171ba 100644 --- a/repos/util/util-jvm/src/main/scala/com/twitter/jvm/Jvm.scala +++ b/repos/util/util-jvm/src/main/scala/com/twitter/jvm/Jvm.scala @@ -56,8 +56,10 @@ case class PoolState(numCollections: Long, ) override def toString = - "PoolState(n=%d,remaining=%s[%s of %s])" - .format(numCollections, capacity - used, used, capacity) + "PoolState(n=%d,remaining=%s[%s of %s])".format(numCollections, + capacity - used, + used, + capacity) } /** diff --git a/repos/util/util-logging/src/main/scala/com/twitter/logging/Formatter.scala b/repos/util/util-logging/src/main/scala/com/twitter/logging/Formatter.scala index c3901f4bc85..cda8374419a 100644 --- a/repos/util/util-logging/src/main/scala/com/twitter/logging/Formatter.scala +++ b/repos/util/util-logging/src/main/scala/com/twitter/logging/Formatter.scala @@ -198,8 +198,9 @@ class Formatter(val timezone: Option[String] = None, val prefix = formatPrefix(record.getLevel, dateFormat.format(new Date(record.getMillis)), name) - formatMessageLines(record) - .mkString(prefix, lineTerminator + prefix, lineTerminator) + formatMessageLines(record).mkString(prefix, + lineTerminator + prefix, + lineTerminator) } /** diff --git a/repos/util/util-logging/src/main/scala/com/twitter/logging/Handler.scala b/repos/util/util-logging/src/main/scala/com/twitter/logging/Handler.scala index 32cc366ca2b..49ee8ccd2ed 100644 --- a/repos/util/util-logging/src/main/scala/com/twitter/logging/Handler.scala +++ b/repos/util/util-logging/src/main/scala/com/twitter/logging/Handler.scala @@ -31,8 +31,9 @@ abstract class Handler(val formatter: Formatter, val level: Option[Level]) } override def toString = { - "<%s level=%s formatter=%s>" - .format(getClass.getName, getLevel, formatter.toString) + "<%s level=%s formatter=%s>".format(getClass.getName, + getLevel, + formatter.toString) } } diff --git a/repos/util/util-logging/src/main/scala/com/twitter/logging/SyslogHandler.scala b/repos/util/util-logging/src/main/scala/com/twitter/logging/SyslogHandler.scala index 5677b7ea85a..5ba2ed5c537 100644 --- a/repos/util/util-logging/src/main/scala/com/twitter/logging/SyslogHandler.scala +++ b/repos/util/util-logging/src/main/scala/com/twitter/logging/SyslogHandler.scala @@ -168,8 +168,11 @@ class SyslogFormatter(val hostname: String = NetUtil.getLocalHostName(), case None => "<%d>%s %s %s: ".format(priority | syslogLevel, date, hostname, name) case Some(serverName) => - "<%d>%s %s [%s] %s: " - .format(priority | syslogLevel, date, hostname, serverName, name) + "<%d>%s %s [%s] %s: ".format(priority | syslogLevel, + date, + hostname, + serverName, + name) } } } diff --git a/repos/util/util-registry/src/test/scala/com/twitter/util/registry/FormatterTest.scala b/repos/util/util-registry/src/test/scala/com/twitter/util/registry/FormatterTest.scala index 600a82b2439..183eed7bd44 100644 --- a/repos/util/util-registry/src/test/scala/com/twitter/util/registry/FormatterTest.scala +++ b/repos/util/util-registry/src/test/scala/com/twitter/util/registry/FormatterTest.scala @@ -40,8 +40,9 @@ class FormatterTest extends FunSuite { } test("add should handle colliding prefixes") { - val actual = Formatter - .add(Map("it's" -> Map("not" -> "small")), Seq("it's", "very"), "big") + val actual = Formatter.add(Map("it's" -> Map("not" -> "small")), + Seq("it's", "very"), + "big") val expected = Map("it's" -> Map("very" -> "big", "not" -> "small")) assert(actual == expected) } diff --git a/repos/util/util-stats/src/test/scala/com/twitter/finagle/stats/StatsReceiverTest.scala b/repos/util/util-stats/src/test/scala/com/twitter/finagle/stats/StatsReceiverTest.scala index d6bbc8bf6cd..7d5fec7f214 100644 --- a/repos/util/util-stats/src/test/scala/com/twitter/finagle/stats/StatsReceiverTest.scala +++ b/repos/util/util-stats/src/test/scala/com/twitter/finagle/stats/StatsReceiverTest.scala @@ -80,10 +80,11 @@ class StatsReceiverTest extends FunSuite { 1.second) verify(receiver, times(1)).stat("2", "chainz") - Await - .ready(Stat.timeFuture(receiver.stat("2", "chainz"), TimeUnit.MINUTES) { + Await.ready( + Stat.timeFuture(receiver.stat("2", "chainz"), TimeUnit.MINUTES) { Future.Unit - }, 1.second) + }, + 1.second) verify(receiver, times(2)).stat("2", "chainz") val stat = receiver.stat("2", "chainz") diff --git a/repos/util/util-zk-common/src/main/scala/com/twitter/zk/ServerSet.scala b/repos/util/util-zk-common/src/main/scala/com/twitter/zk/ServerSet.scala index 6797c68b762..47fc30ce278 100644 --- a/repos/util/util-zk-common/src/main/scala/com/twitter/zk/ServerSet.scala +++ b/repos/util/util-zk-common/src/main/scala/com/twitter/zk/ServerSet.scala @@ -30,8 +30,7 @@ class ServerSet(val underlying: ServerSetImpl, additionalEndpoints: Map[String, InetSocketAddress] = Map.empty, status: CommonStatus = CommonStatus.ALIVE): Future[EndpointStatus] = pool { - underlying - .join(serviceEndpoint, additionalEndpoints.asJava, status) // blocks + underlying.join(serviceEndpoint, additionalEndpoints.asJava, status) // blocks } map { new EndpointStatus(_, pool) } // wrap for async updates /** diff --git a/repos/util/util-zk/src/main/scala/com/twitter/zk/ZNode.scala b/repos/util/util-zk/src/main/scala/com/twitter/zk/ZNode.scala index 6a34ec1b71e..cde6debd33a 100644 --- a/repos/util/util-zk/src/main/scala/com/twitter/zk/ZNode.scala +++ b/repos/util/util-zk/src/main/scala/com/twitter/zk/ZNode.scala @@ -237,8 +237,9 @@ trait ZNode { ZNode.TreeUpdate(zparent, added = children -- knownChildren, removed = knownChildren -- children) - log - .debug("updating %s with %d children", path, treeUpdate.added.size) + log.debug("updating %s with %d children", + path, + treeUpdate.added.size) broker send (treeUpdate) sync () onSuccess { _ => log.debug("updated %s with %d children", path,