Skip to content

Commit 87fcba7

Browse files
committed
lint
1 parent 97dba10 commit 87fcba7

File tree

2 files changed

+13
-14
lines changed

2 files changed

+13
-14
lines changed

parquet-column/src/main/java/org/apache/parquet/io/ValidatingRecordConsumer.java

Lines changed: 7 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -48,6 +48,9 @@
4848
public class ValidatingRecordConsumer extends RecordConsumer {
4949
private static final Logger LOG = LoggerFactory.getLogger(ValidatingRecordConsumer.class);
5050

51+
private static final int UINT_8_MAX_VALUE = 255;
52+
private static final int UINT_16_MAX_VALUE = 65535;
53+
5154
private final RecordConsumer delegate;
5255

5356
private Deque<Type> types = new ArrayDeque<>();
@@ -265,16 +268,16 @@ public Optional<Void> visit(LogicalTypeAnnotation.IntLogicalTypeAnnotation intTy
265268
if (!intType.isSigned()) {
266269
switch (intType.getBitWidth()) {
267270
case 8:
268-
if (value < 0 || value > 255) {
271+
if (value < 0 || value > UINT_8_MAX_VALUE) {
269272
throw new InvalidRecordException("Value " + value
270-
+ " is out of range for UINT_8 (0-255) in field "
273+
+ " is out of range for UINT_8 (0-" + UINT_8_MAX_VALUE + ") in field "
271274
+ currentType.getName());
272275
}
273276
break;
274277
case 16:
275-
if (value < 0 || value > 65535) {
278+
if (value < 0 || value > UINT_16_MAX_VALUE) {
276279
throw new InvalidRecordException("Value " + value
277-
+ " is out of range for UINT_16 (0-65535) in field "
280+
+ " is out of range for UINT_16 (0-" + UINT_16_MAX_VALUE + ") in field "
278281
+ currentType.getName());
279282
}
280283
break;

parquet-hadoop/src/test/java/org/apache/parquet/hadoop/example/TestStrictUnsignedIntegerValidation.java

Lines changed: 6 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -19,11 +19,10 @@
1919
package org.apache.parquet.hadoop.example;
2020

2121
import static org.apache.parquet.schema.LogicalTypeAnnotation.intType;
22+
import static org.apache.parquet.schema.PrimitiveType.PrimitiveTypeName.BINARY;
2223
import static org.apache.parquet.schema.PrimitiveType.PrimitiveTypeName.INT32;
2324
import static org.apache.parquet.schema.PrimitiveType.PrimitiveTypeName.INT64;
24-
import static org.apache.parquet.schema.PrimitiveType.PrimitiveTypeName.BINARY;
2525
import static org.junit.Assert.assertThrows;
26-
import org.apache.parquet.io.api.Binary;
2726

2827
import java.io.File;
2928
import java.io.IOException;
@@ -32,6 +31,7 @@
3231
import org.apache.parquet.example.data.simple.SimpleGroupFactory;
3332
import org.apache.parquet.hadoop.ParquetWriter;
3433
import org.apache.parquet.io.InvalidRecordException;
34+
import org.apache.parquet.io.api.Binary;
3535
import org.apache.parquet.schema.MessageType;
3636
import org.apache.parquet.schema.Types;
3737
import org.junit.Rule;
@@ -277,10 +277,8 @@ public void testValidationCanBeExplicitlyDisabled() throws IOException {
277277

278278
@Test
279279
public void testBasicValidation() throws IOException {
280-
MessageType schema = Types.buildMessage()
281-
.required(INT32)
282-
.named("int32_field")
283-
.named("test_schema");
280+
MessageType schema =
281+
Types.buildMessage().required(INT32).named("int32_field").named("test_schema");
284282

285283
File tempFile = new File(tempFolder.getRoot(), "basic_validation.parquet");
286284
Path outputPath = new Path(tempFile.getAbsolutePath());
@@ -295,10 +293,8 @@ public void testBasicValidation() throws IOException {
295293
Group validGroup = groupFactory.newGroup().append("int32_field", 42);
296294
writer.write(validGroup);
297295

298-
MessageType stringSchema = Types.buildMessage()
299-
.required(BINARY)
300-
.named("int32_field")
301-
.named("test_schema");
296+
MessageType stringSchema =
297+
Types.buildMessage().required(BINARY).named("int32_field").named("test_schema");
302298

303299
SimpleGroupFactory stringGroupFactory = new SimpleGroupFactory(stringSchema);
304300
Group invalidGroup = stringGroupFactory.newGroup().append("int32_field", Binary.fromString("not_an_int"));

0 commit comments

Comments
 (0)