Skip to content

Commit 7d44d7f

Browse files
Fix typos, remove unnecessary braces, add editorconfig
1 parent 6ca13f4 commit 7d44d7f

File tree

15 files changed

+40
-24
lines changed

15 files changed

+40
-24
lines changed

.editorconfig

Lines changed: 21 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,21 @@
1+
# top-most EditorConfig file
2+
root = true
3+
4+
[*]
5+
charset = utf-8
6+
end_of_line = lf
7+
trim_trailing_whitespace = true
8+
9+
[*.xml]
10+
indent_size = 4
11+
indent_style = space
12+
insert_final_newline = true
13+
14+
[*.{java,scala,js,json,css}]
15+
indent_size = 2
16+
indent_style = space
17+
insert_final_newline = true
18+
max_line_length = 120
19+
20+
[*.md]
21+
trim_trailing_whitespace = false

component-scanner/src/test/scala/za/co/absa/hyperdrive/scanner/JarTestUtils.scala

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -22,6 +22,8 @@ import java.util.jar.{Attributes, JarEntry, JarOutputStream, Manifest}
2222

2323
object JarTestUtils {
2424

25+
val BUFFER_SIZE = 1024
26+
2527
def createJar(baseDir: File, jarName: String, filenames: List[String]): File = {
2628
val content = filenames.map(filename => new File(getClass.getClassLoader.getResource(filename).toURI) -> filename).toMap
2729
JarTestUtils.createJar(baseDir, jarName, content)
@@ -44,7 +46,7 @@ object JarTestUtils {
4446
private def addEntries(destJarFile: File, manifest: Manifest, content: Map[File, String]): Unit = {
4547
val outputJar = new JarOutputStream(new FileOutputStream(destJarFile.getAbsolutePath), manifest)
4648
content.foreach(entry => add(entry._1, entry._2, outputJar))
47-
outputJar.close();
49+
outputJar.close()
4850
}
4951

5052
@throws[IOException]
@@ -59,7 +61,7 @@ object JarTestUtils {
5961

6062
val in = new BufferedInputStream(new FileInputStream(source))
6163

62-
val buffer = new Array[Byte](1024)
64+
val buffer = new Array[Byte](BUFFER_SIZE)
6365

6466
var count = in.read(buffer)
6567
while (count != -1) {

component-scanner/src/test/scala/za/co/absa/hyperdrive/scanner/TestObjectScanner.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -18,7 +18,7 @@
1818
package za.co.absa.hyperdrive.scanner
1919

2020
import java.io.File
21-
import java.nio.file.{Files, Path}
21+
import java.nio.file.Files
2222

2323
import org.scalatest.{FlatSpec, Matchers}
2424
import za.co.absa.hyperdrive.ingestor.api.decoder.StreamDecoderFactory

component-scanner/src/test/scala/za/co/absa/hyperdrive/scanner/dummyjar/AbstractDummyStreamReaderFactory.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -19,4 +19,4 @@ package za.co.absa.hyperdrive.scanner.dummyjar
1919

2020
import za.co.absa.hyperdrive.ingestor.api.reader.StreamReaderFactory
2121

22-
abstract class AbstractDummyStreamReaderFactory extends StreamReaderFactory {}
22+
abstract class AbstractDummyStreamReaderFactory extends StreamReaderFactory

component-scanner/src/test/scala/za/co/absa/hyperdrive/scanner/dummyjar/DummyStreamReaderOne.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -20,7 +20,7 @@ package za.co.absa.hyperdrive.scanner.dummyjar
2020
import org.apache.commons.configuration2.Configuration
2121
import org.apache.spark.sql.SparkSession
2222
import org.apache.spark.sql.streaming.DataStreamReader
23-
import za.co.absa.hyperdrive.ingestor.api.reader.{StreamReader, StreamReaderFactory}
23+
import za.co.absa.hyperdrive.ingestor.api.reader.StreamReader
2424

2525
class DummyStreamReaderOne extends DummyTrait {
2626
override def read(spark: SparkSession): DataStreamReader = ???

component-scanner/src/test/scala/za/co/absa/hyperdrive/scanner/dummyjar/DummyStreamReaderTwo.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -20,7 +20,7 @@ package za.co.absa.hyperdrive.scanner.dummyjar
2020
import org.apache.commons.configuration2.Configuration
2121
import org.apache.spark.sql.SparkSession
2222
import org.apache.spark.sql.streaming.DataStreamReader
23-
import za.co.absa.hyperdrive.ingestor.api.reader.{StreamReader, StreamReaderFactory}
23+
import za.co.absa.hyperdrive.ingestor.api.reader.StreamReader
2424

2525
class DummyStreamReaderTwo extends DummyTrait {
2626
override def read(spark: SparkSession): DataStreamReader = ???

component-scanner/src/test/scala/za/co/absa/hyperdrive/scanner/dummyjar/DummyTrait.scala

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -19,6 +19,4 @@ package za.co.absa.hyperdrive.scanner.dummyjar
1919

2020
import za.co.absa.hyperdrive.ingestor.api.reader.StreamReader
2121

22-
trait DummyTrait extends StreamReader {
23-
24-
}
22+
trait DummyTrait extends StreamReader

driver/src/main/scala/za/co/absa/hyperdrive/driver/SparkIngestor.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -85,7 +85,7 @@ object SparkIngestor {
8585
ingestionQuery.stop()
8686
} catch {
8787
case NonFatal(e) =>
88-
throw new IngestionException(message = s"PROBABLY FAILED INGESTION $ingestionId. The was no error in the query plan, but something when wrong. " +
88+
throw new IngestionException(message = s"PROBABLY FAILED INGESTION $ingestionId. There was no error in the query plan, but something when wrong. " +
8989
s"Pay attention to this exception since the query has been started, which might lead to duplicate data or similar issues. " +
9090
s"The logs should have enough detail, but a possible course of action is to replay this ingestion and overwrite the destination.", e)
9191
}

driver/src/main/scala/za/co/absa/hyperdrive/driver/drivers/PropertiesIngestionDriver.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -35,7 +35,7 @@ object PropertiesIngestionDriver extends IngestionDriver {
3535
def main(args: Array[String]): Unit = {
3636
val propertiesFile = getPropertiesFilePath(args)
3737
if (propertiesFile.isEmpty) {
38-
throw new IllegalArgumentException("No properties file informed.")
38+
throw new IllegalArgumentException("No properties file supplied.")
3939
}
4040

4141
if (isInvalid(propertiesFile.get)) {

ingestor-default/src/main/scala/za/co/absa/hyperdrive/ingestor/implementation/reader/kafka/KafkaStreamReader.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -140,7 +140,7 @@ object KafkaStreamReader extends StreamReaderFactory {
140140
}
141141
}
142142

143-
private def findMissingKeys(keys: Seq[String], map: Map[String,String]): Seq[String] = keys.filterNot(map.contains(_))
143+
private def findMissingKeys(keys: Seq[String], map: Map[String,String]): Seq[String] = keys.filterNot(map.contains)
144144

145145
private def tweakKeyName(key: String): String = {
146146
key.replace(s"$rootComponentConfKey.", "") // remove the component root configuration key

shared/src/test/scala/za/co/absa/hyperdrive/shared/utils/classloaderutils/TestClassLoaderUtilsNoSingleton.scala

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -17,6 +17,4 @@
1717

1818
package za.co.absa.hyperdrive.shared.utils.classloaderutils
1919

20-
class TestClassLoaderUtilsNoSingleton {
21-
22-
}
20+
class TestClassLoaderUtilsNoSingleton

shared/src/test/scala/za/co/absa/hyperdrive/shared/utils/classloaderutils/TestClassLoaderUtilsNotInstanceOf.scala

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -17,6 +17,4 @@
1717

1818
package za.co.absa.hyperdrive.shared.utils.classloaderutils
1919

20-
object TestClassLoaderUtilsNotInstanceOf {
21-
22-
}
20+
object TestClassLoaderUtilsNotInstanceOf

shared/src/test/scala/za/co/absa/hyperdrive/shared/utils/classloaderutils/TestClassLoaderUtilsSingletonInstanceOf.scala

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -17,6 +17,4 @@
1717

1818
package za.co.absa.hyperdrive.shared.utils.classloaderutils
1919

20-
object TestClassLoaderUtilsSingletonInstanceOf extends TestClassLoaderUtilsTestTrait {
21-
22-
}
20+
object TestClassLoaderUtilsSingletonInstanceOf extends TestClassLoaderUtilsTestTrait

shared/src/test/scala/za/co/absa/hyperdrive/shared/utils/classloaderutils/TestClassLoaderUtilsTestTrait.scala

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -17,6 +17,4 @@
1717

1818
package za.co.absa.hyperdrive.shared.utils.classloaderutils
1919

20-
trait TestClassLoaderUtilsTestTrait {
21-
22-
}
20+
trait TestClassLoaderUtilsTestTrait

testutils/src/main/scala/za/co/absa/hyperdrive/testutils/SparkTestUtils.scala

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -18,6 +18,8 @@ package za.co.absa.hyperdrive.testutils
1818

1919
import org.apache.spark.sql.types.{ArrayType, StructField, StructType}
2020

21+
import scala.annotation.tailrec
22+
2123
object SparkTestUtils {
2224

2325
def areAllFieldsNullable(schema: StructType): Boolean = {
@@ -40,6 +42,7 @@ object SparkTestUtils {
4042
}
4143
}
4244

45+
@tailrec
4346
private def isExpectedNullabilityForAll(arrayType: ArrayType, expectedNullability: Boolean): Boolean = {
4447
arrayType.elementType match {
4548
case dt: ArrayType => isExpectedNullabilityForAll(dt, expectedNullability)

0 commit comments

Comments
 (0)