Skip to content

Commit 8535df7

Browse files
younggyu chunsrowen
younggyu chun
authored andcommitted
[MINOR] Fix typos in comments and replace an explicit type with <>
## What changes were proposed in this pull request? This PR fixed typos in comments and replace the explicit type with '<>' for Java 8+. ## How was this patch tested? Manually tested. Closes apache#25338 from younggyuchun/younggyu. Authored-by: younggyu chun <[email protected]> Signed-off-by: Sean Owen <[email protected]>
1 parent ef80c32 commit 8535df7

File tree

17 files changed

+27
-27
lines changed

17 files changed

+27
-27
lines changed

appveyor.yml

+1-1
Original file line numberDiff line numberDiff line change
@@ -55,7 +55,7 @@ build_script:
5555

5656
environment:
5757
NOT_CRAN: true
58-
# See SPARK-27848. Currently installing some dependent packagess causes
58+
# See SPARK-27848. Currently installing some dependent packages causes
5959
# "(converted from warning) unable to identify current timezone 'C':" for an unknown reason.
6060
# This environment variable works around to test SparkR against a higher version.
6161
R_REMOTES_NO_ERRORS_FROM_WARNINGS: true

common/network-common/src/test/java/org/apache/spark/network/ChunkFetchIntegrationSuite.java

+3-3
Original file line numberDiff line numberDiff line change
@@ -151,9 +151,9 @@ private FetchResult fetchChunks(List<Integer> chunkIndices) throws Exception {
151151
clientFactory.createClient(TestUtils.getLocalHost(), server.getPort())) {
152152
final Semaphore sem = new Semaphore(0);
153153

154-
res.successChunks = Collections.synchronizedSet(new HashSet<Integer>());
155-
res.failedChunks = Collections.synchronizedSet(new HashSet<Integer>());
156-
res.buffers = Collections.synchronizedList(new LinkedList<ManagedBuffer>());
154+
res.successChunks = Collections.synchronizedSet(new HashSet<>());
155+
res.failedChunks = Collections.synchronizedSet(new HashSet<>());
156+
res.buffers = Collections.synchronizedList(new LinkedList<>());
157157

158158
ChunkReceivedCallback callback = new ChunkReceivedCallback() {
159159
@Override

common/network-common/src/test/java/org/apache/spark/network/RpcIntegrationSuite.java

+4-4
Original file line numberDiff line numberDiff line change
@@ -175,8 +175,8 @@ private RpcResult sendRPC(String ... commands) throws Exception {
175175
final Semaphore sem = new Semaphore(0);
176176

177177
final RpcResult res = new RpcResult();
178-
res.successMessages = Collections.synchronizedSet(new HashSet<String>());
179-
res.errorMessages = Collections.synchronizedSet(new HashSet<String>());
178+
res.successMessages = Collections.synchronizedSet(new HashSet<>());
179+
res.errorMessages = Collections.synchronizedSet(new HashSet<>());
180180

181181
RpcResponseCallback callback = new RpcResponseCallback() {
182182
@Override
@@ -208,8 +208,8 @@ private RpcResult sendRpcWithStream(String... streams) throws Exception {
208208
TransportClient client = clientFactory.createClient(TestUtils.getLocalHost(), server.getPort());
209209
final Semaphore sem = new Semaphore(0);
210210
RpcResult res = new RpcResult();
211-
res.successMessages = Collections.synchronizedSet(new HashSet<String>());
212-
res.errorMessages = Collections.synchronizedSet(new HashSet<String>());
211+
res.successMessages = Collections.synchronizedSet(new HashSet<>());
212+
res.errorMessages = Collections.synchronizedSet(new HashSet<>());
213213

214214
for (String stream : streams) {
215215
int idx = stream.lastIndexOf('/');

common/network-common/src/test/java/org/apache/spark/network/TransportClientFactorySuite.java

+1-1
Original file line numberDiff line numberDiff line change
@@ -84,7 +84,7 @@ private void testClientReuse(int maxConnections, boolean concurrent)
8484
try (TransportContext context = new TransportContext(conf, rpcHandler)) {
8585
TransportClientFactory factory = context.createClientFactory();
8686
Set<TransportClient> clients = Collections.synchronizedSet(
87-
new HashSet<TransportClient>());
87+
new HashSet<>());
8888

8989
AtomicInteger failed = new AtomicInteger();
9090
Thread[] attempts = new Thread[maxConnections * 10];

common/network-shuffle/src/test/java/org/apache/spark/network/shuffle/ExternalShuffleIntegrationSuite.java

+3-3
Original file line numberDiff line numberDiff line change
@@ -170,9 +170,9 @@ private FetchResult fetchBlocks(
170170
TransportConf clientConf,
171171
int port) throws Exception {
172172
final FetchResult res = new FetchResult();
173-
res.successBlocks = Collections.synchronizedSet(new HashSet<String>());
174-
res.failedBlocks = Collections.synchronizedSet(new HashSet<String>());
175-
res.buffers = Collections.synchronizedList(new LinkedList<ManagedBuffer>());
173+
res.successBlocks = Collections.synchronizedSet(new HashSet<>());
174+
res.failedBlocks = Collections.synchronizedSet(new HashSet<>());
175+
res.buffers = Collections.synchronizedList(new LinkedList<>());
176176

177177
final Semaphore requestsRemaining = new Semaphore(0);
178178

common/unsafe/src/test/java/org/apache/spark/unsafe/types/UTF8StringSuite.java

+1-1
Original file line numberDiff line numberDiff line change
@@ -467,7 +467,7 @@ public void translate() {
467467
)));
468468
assertEquals(
469469
fromString("translate"),
470-
fromString("translate").translate(new HashMap<Character, Character>()));
470+
fromString("translate").translate(new HashMap<>()));
471471
assertEquals(
472472
fromString("asae"),
473473
fromString("translate").translate(ImmutableMap.of(

core/src/test/java/org/apache/spark/shuffle/sort/UnsafeShuffleWriterSuite.java

+2-2
Original file line numberDiff line numberDiff line change
@@ -533,7 +533,7 @@ public void testPeakMemoryUsed() throws Exception {
533533
long newPeakMemory;
534534
try {
535535
for (int i = 0; i < numRecordsPerPage * 10; i++) {
536-
writer.insertRecordIntoSorter(new Tuple2<Object, Object>(1, 1));
536+
writer.insertRecordIntoSorter(new Tuple2<>(1, 1));
537537
newPeakMemory = writer.getPeakMemoryUsedBytes();
538538
if (i % numRecordsPerPage == 0) {
539539
// The first page is allocated in constructor, another page will be allocated after
@@ -550,7 +550,7 @@ public void testPeakMemoryUsed() throws Exception {
550550
newPeakMemory = writer.getPeakMemoryUsedBytes();
551551
assertEquals(previousPeakMemory, newPeakMemory);
552552
for (int i = 0; i < numRecordsPerPage; i++) {
553-
writer.insertRecordIntoSorter(new Tuple2<Object, Object>(1, 1));
553+
writer.insertRecordIntoSorter(new Tuple2<>(1, 1));
554554
}
555555
newPeakMemory = writer.getPeakMemoryUsedBytes();
556556
assertEquals(previousPeakMemory, newPeakMemory);

core/src/test/scala/org/apache/spark/metrics/source/AccumulatorSourceSuite.scala

+1-1
Original file line numberDiff line numberDiff line change
@@ -66,7 +66,7 @@ class AccumulatorSourceSuite extends SparkFunSuite {
6666
assert(gauges.get("my-accumulator-2").getValue() == 456)
6767
}
6868

69-
test("the double accumulators value propety is checked when the gauge's value is requested") {
69+
test("the double accumulators value property is checked when the gauge's value is requested") {
7070
val acc1 = new DoubleAccumulator()
7171
acc1.add(123.123)
7272
val acc2 = new DoubleAccumulator()

examples/src/main/java/org/apache/spark/examples/sql/streaming/JavaStructuredSessionization.java

+1-1
Original file line numberDiff line numberDiff line change
@@ -70,7 +70,7 @@ public static void main(String[] args) throws Exception {
7070
new FlatMapFunction<LineWithTimestamp, Event>() {
7171
@Override
7272
public Iterator<Event> call(LineWithTimestamp lineWithTimestamp) {
73-
ArrayList<Event> eventList = new ArrayList<Event>();
73+
ArrayList<Event> eventList = new ArrayList<>();
7474
for (String word : lineWithTimestamp.getLine().split(" ")) {
7575
eventList.add(new Event(word, lineWithTimestamp.getTimestamp()));
7676
}

external/kafka-0-10/src/test/java/org/apache/spark/streaming/kafka010/JavaConsumerStrategySuite.java

+1-1
Original file line numberDiff line numberDiff line change
@@ -42,7 +42,7 @@ public void testConsumerStrategyConstructors() {
4242
final Collection<TopicPartition> parts = Arrays.asList(tp1, tp2);
4343
final scala.collection.Iterable<TopicPartition> sParts =
4444
JavaConverters.collectionAsScalaIterableConverter(parts).asScala();
45-
final Map<String, Object> kafkaParams = new HashMap<String, Object>();
45+
final Map<String, Object> kafkaParams = new HashMap<>();
4646
kafkaParams.put("bootstrap.servers", "not used");
4747
final scala.collection.Map<String, Object> sKafkaParams =
4848
JavaConverters.mapAsScalaMapConverter(kafkaParams).asScala();

external/kafka-0-10/src/test/java/org/apache/spark/streaming/kafka010/JavaDirectKafkaStreamSuite.java

+1-1
Original file line numberDiff line numberDiff line change
@@ -152,7 +152,7 @@ public String call(ConsumerRecord<String, String> r) {
152152

153153
JavaDStream<String> unifiedStream = stream1.union(stream2);
154154

155-
final Set<String> result = Collections.synchronizedSet(new HashSet<String>());
155+
final Set<String> result = Collections.synchronizedSet(new HashSet<>());
156156
unifiedStream.foreachRDD(new VoidFunction<JavaRDD<String>>() {
157157
@Override
158158
public void call(JavaRDD<String> rdd) {

launcher/src/test/java/org/apache/spark/launcher/CommandBuilderUtilsSuite.java

+1-1
Original file line numberDiff line numberDiff line change
@@ -37,7 +37,7 @@ public void testValidOptionStrings() {
3737
testOpt(" a b c \\\\ ", Arrays.asList("a", "b", "c", "\\"));
3838

3939
// Following tests ported from UtilsSuite.scala.
40-
testOpt("", new ArrayList<String>());
40+
testOpt("", new ArrayList<>());
4141
testOpt("a", Arrays.asList("a"));
4242
testOpt("aaa", Arrays.asList("aaa"));
4343
testOpt("a b c", Arrays.asList("a", "b", "c"));

mllib/src/test/java/org/apache/spark/ml/stat/JavaSummarizerSuite.java

+1-1
Original file line numberDiff line numberDiff line change
@@ -40,7 +40,7 @@ public class JavaSummarizerSuite extends SharedSparkSession {
4040
@Override
4141
public void setUp() throws IOException {
4242
super.setUp();
43-
List<LabeledPoint> points = new ArrayList<LabeledPoint>();
43+
List<LabeledPoint> points = new ArrayList<>();
4444
points.add(new LabeledPoint(0.0, Vectors.dense(1.0, 2.0)));
4545
points.add(new LabeledPoint(0.0, Vectors.dense(3.0, 4.0)));
4646

mllib/src/test/java/org/apache/spark/mllib/regression/JavaRidgeRegressionSuite.java

+2-2
Original file line numberDiff line numberDiff line change
@@ -57,7 +57,7 @@ public void runRidgeRegressionUsingConstructor() {
5757
List<LabeledPoint> data = generateRidgeData(2 * numExamples, numFeatures, 10.0);
5858

5959
JavaRDD<LabeledPoint> testRDD = jsc.parallelize(
60-
new ArrayList<LabeledPoint>(data.subList(0, numExamples)));
60+
new ArrayList<>(data.subList(0, numExamples)));
6161
List<LabeledPoint> validationData = data.subList(numExamples, 2 * numExamples);
6262

6363
RidgeRegressionWithSGD ridgeSGDImpl = new RidgeRegressionWithSGD();
@@ -82,7 +82,7 @@ public void runRidgeRegressionUsingStaticMethods() {
8282
List<LabeledPoint> data = generateRidgeData(2 * numExamples, numFeatures, 10.0);
8383

8484
JavaRDD<LabeledPoint> testRDD = jsc.parallelize(
85-
new ArrayList<LabeledPoint>(data.subList(0, numExamples)));
85+
new ArrayList<>(data.subList(0, numExamples)));
8686
List<LabeledPoint> validationData = data.subList(numExamples, 2 * numExamples);
8787

8888
RidgeRegressionModel model = RidgeRegressionWithSGD.train(testRDD.rdd(), 200, 1.0, 0.0);

sql/core/src/main/scala/org/apache/spark/sql/execution/python/WindowInPandasExec.scala

+1-1
Original file line numberDiff line numberDiff line change
@@ -251,7 +251,7 @@ case class WindowInPandasExec(
251251
}
252252

253253
// Setting the window bounds argOffset for each UDF. For UDFs with bounded window, argOffset
254-
// for the UDF is (lowerBoundOffet, upperBoundOffset, inputOffset1, inputOffset2, ...)
254+
// for the UDF is (lowerBoundOffset, upperBoundOffset, inputOffset1, inputOffset2, ...)
255255
// For UDFs with unbounded window, argOffset is (inputOffset1, inputOffset2, ...)
256256
pyFuncs.indices.foreach { exprIndex =>
257257
val frameIndex = expressionIndexToFrameIndex(exprIndex)

sql/core/src/test/java/test/org/apache/spark/sql/JavaDataFrameReaderWriterSuite.java

+1-1
Original file line numberDiff line numberDiff line change
@@ -62,7 +62,7 @@ public void testFormatAPI() {
6262

6363
@Test
6464
public void testOptionsAPI() {
65-
HashMap<String, String> map = new HashMap<String, String>();
65+
HashMap<String, String> map = new HashMap<>();
6666
map.put("e", "1");
6767
spark
6868
.read()

streaming/src/test/java/org/apache/spark/streaming/JavaMapWithStateSuite.java

+2-2
Original file line numberDiff line numberDiff line change
@@ -149,10 +149,10 @@ private <K, S, T> void testOperation(
149149
inputStream.map(x -> new Tuple2<>(x, 1))).mapWithState(mapWithStateSpec);
150150

151151
List<Set<T>> collectedOutputs =
152-
Collections.synchronizedList(new ArrayList<Set<T>>());
152+
Collections.synchronizedList(new ArrayList<>());
153153
mapWithStateDStream.foreachRDD(rdd -> collectedOutputs.add(Sets.newHashSet(rdd.collect())));
154154
List<Set<Tuple2<K, S>>> collectedStateSnapshots =
155-
Collections.synchronizedList(new ArrayList<Set<Tuple2<K, S>>>());
155+
Collections.synchronizedList(new ArrayList<>());
156156
mapWithStateDStream.stateSnapshots().foreachRDD(rdd ->
157157
collectedStateSnapshots.add(Sets.newHashSet(rdd.collect())));
158158
BatchCounter batchCounter = new BatchCounter(ssc.ssc());

0 commit comments

Comments
 (0)