From 8dc4afeadff4fd4047ddf541190a78bd6cf99f06 Mon Sep 17 00:00:00 2001 From: "Michael G. Noll" Date: Mon, 29 Sep 2014 16:28:29 +0200 Subject: [PATCH] Clarify comment on parallelism --- .../com/miguno/kafkastorm/spark/KafkaSparkStreamingSpec.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/test/scala/com/miguno/kafkastorm/spark/KafkaSparkStreamingSpec.scala b/src/test/scala/com/miguno/kafkastorm/spark/KafkaSparkStreamingSpec.scala index 2814926..785030d 100644 --- a/src/test/scala/com/miguno/kafkastorm/spark/KafkaSparkStreamingSpec.scala +++ b/src/test/scala/com/miguno/kafkastorm/spark/KafkaSparkStreamingSpec.scala @@ -180,7 +180,7 @@ class KafkaSparkStreamingSpec extends FeatureSpec with Matchers with BeforeAndAf ).map(_._2) } val unifiedStream = ssc.union(streams) // Merge the "per-partition" DStreams - val sparkConsumerParallelism = 1 // You'd probably pick a much higher value than 1 in production. + val sparkConsumerParallelism = 1 // You'd probably pick a higher value than 1 in production. unifiedStream.repartition(sparkConsumerParallelism) // Decouple processing parallelism from #partitions }