Skip to content

Commit b107ae8

Browse files
ArnavBalyanrahulketch
authored andcommitted
GH-3310: Clean up JIRA references and move to GH issues (#3309)
1 parent d9648f4 commit b107ae8

File tree

10 files changed

+13
-13
lines changed

10 files changed

+13
-13
lines changed

parquet-column/src/main/java/org/apache/parquet/CorruptStatistics.java

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -27,7 +27,7 @@
2727
import org.slf4j.LoggerFactory;
2828

2929
/**
30-
* There was a bug (PARQUET-251) that caused the statistics metadata
30+
* There was a bug (https://github.com/apache/parquet-java/issues/1433) that caused the statistics metadata
3131
* for binary columns to be corrupted in the write path.
3232
* <p>
3333
* This class is used to detect whether a file was written with this bug,
@@ -38,7 +38,7 @@ public class CorruptStatistics {
3838

3939
private static final Logger LOG = LoggerFactory.getLogger(CorruptStatistics.class);
4040

41-
// the version in which the bug described by jira: PARQUET-251 was fixed
41+
// the version in which the bug described by jira: (https://github.com/apache/parquet-java/issues/1433) was fixed
4242
// the bug involved writing invalid binary statistics, so stats written prior to this
4343
// fix must be ignored / assumed invalid
4444
private static final SemanticVersion PARQUET_251_FIXED_VERSION = new SemanticVersion(1, 8, 0);

parquet-column/src/main/java/org/apache/parquet/filter2/predicate/FilterApi.java

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -59,13 +59,13 @@
5959
* FilterPredicate pred = or(eq(foo, 10), ltEq(bar, 17.0));
6060
* </pre>
6161
*/
62-
// TODO: Support repeated columns (https://issues.apache.org/jira/browse/PARQUET-34)
62+
// TODO: Support repeated columns (https://github.com/apache/parquet-java/issues/1452)
6363
//
6464
// TODO: Support filtering on groups (eg, filter where this group is / isn't null)
65-
// TODO: (https://issues.apache.org/jira/browse/PARQUET-43)
65+
// TODO: (https://github.com/apache/parquet-format/issues/261)
6666

6767
// TODO: Consider adding support for more column types that aren't coupled with parquet types, eg Column<String>
68-
// TODO: (https://issues.apache.org/jira/browse/PARQUET-35)
68+
// TODO: (https://github.com/apache/parquet-java/issues/1453)
6969
public final class FilterApi {
7070
private FilterApi() {}
7171

parquet-column/src/main/java/org/apache/parquet/filter2/predicate/SchemaCompatibilityValidator.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -54,7 +54,7 @@
5454
* <p>
5555
* TODO: detect if a column is optional or required and validate that eq(null)
5656
* TODO: is not called on required fields (is that too strict?)
57-
* TODO: (https://issues.apache.org/jira/browse/PARQUET-44)
57+
* TODO: (https://github.com/apache/parquet-java/issues/1472)
5858
*/
5959
public class SchemaCompatibilityValidator implements FilterPredicate.Visitor<Void> {
6060

parquet-column/src/main/java/org/apache/parquet/filter2/predicate/ValidTypeMap.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -34,7 +34,7 @@
3434
* when there are type mismatches.
3535
* <p>
3636
* TODO: this has some overlap with {@link PrimitiveTypeName#javaType}
37-
* TODO: (https://issues.apache.org/jira/browse/PARQUET-30)
37+
* TODO: (https://github.com/apache/parquet-java/issues/1447)
3838
*/
3939
public class ValidTypeMap {
4040
private ValidTypeMap() {}

parquet-column/src/main/java/org/apache/parquet/filter2/recordlevel/FilteringPrimitiveConverter.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -42,7 +42,7 @@ public FilteringPrimitiveConverter(PrimitiveConverter delegate, ValueInspector[]
4242
// TODO: this works, but
4343
// TODO: essentially turns off the benefits of dictionary support
4444
// TODO: even if the underlying delegate supports it.
45-
// TODO: we should support it here. (https://issues.apache.org/jira/browse/PARQUET-36)
45+
// TODO: we should support it here. (https://github.com/apache/parquet-java/issues/1392)
4646
@Override
4747
public boolean hasDictionarySupport() {
4848
return false;

parquet-column/src/main/java/org/apache/parquet/filter2/recordlevel/IncrementallyUpdatedFilterPredicateEvaluator.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -30,7 +30,7 @@
3030
* represent columns with a null value, and updates them accordingly.
3131
* <p>
3232
* TODO: We could also build an evaluator that detects if enough values are known to determine the outcome
33-
* TODO: of the predicate and quit the record assembly early. (https://issues.apache.org/jira/browse/PARQUET-37)
33+
* TODO: of the predicate and quit the record assembly early. (https://github.com/apache/parquet-java/issues/1455)
3434
*/
3535
public class IncrementallyUpdatedFilterPredicateEvaluator implements Visitor {
3636
private static final IncrementallyUpdatedFilterPredicateEvaluator INSTANCE =

parquet-hadoop/src/main/java/org/apache/parquet/filter2/statisticslevel/StatisticsFilter.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -63,7 +63,7 @@
6363
* false otherwise (including when it is not known, which is often the case).
6464
*/
6565
// TODO: this belongs in the parquet-column project, but some of the classes here need to be moved too
66-
// TODO: (https://issues.apache.org/jira/browse/PARQUET-38)
66+
// TODO: (https://github.com/apache/parquet-java/issues/1458)
6767
public class StatisticsFilter implements FilterPredicate.Visitor<Boolean> {
6868

6969
private static final boolean BLOCK_MIGHT_MATCH = false;

parquet-hadoop/src/main/java/org/apache/parquet/format/converter/ParquetMetadataConverter.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -142,7 +142,7 @@
142142
import org.slf4j.LoggerFactory;
143143

144144
// TODO: This file has become too long!
145-
// TODO: Lets split it up: https://issues.apache.org/jira/browse/PARQUET-310
145+
// TODO: Lets split it up: https://github.com/apache/parquet-java/issues/1835
146146
public class ParquetMetadataConverter {
147147

148148
private static final TypeDefinedOrder TYPE_DEFINED_ORDER = new TypeDefinedOrder();

parquet-hadoop/src/main/java/org/apache/parquet/hadoop/CodecFactory.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -171,7 +171,7 @@ public BytesInput decompress(BytesInput bytes, int decompressedSize) throws IOEx
171171
InputStream is = codec.createInputStream(bytes.toInputStream(), decompressor);
172172

173173
// We need to explicitly close the ZstdDecompressorStream here to release the resources it holds to
174-
// avoid off-heap memory fragmentation issue, see https://issues.apache.org/jira/browse/PARQUET-2160.
174+
// avoid off-heap memory fragmentation issue, see https://github.com/apache/parquet-format/issues/398.
175175
// This change will load the decompressor stream into heap a little earlier, since the problem it solves
176176
// only happens in the ZSTD codec, so this modification is only made for ZSTD streams.
177177
if (codec instanceof ZstandardCodec) {

parquet-hadoop/src/main/java/org/apache/parquet/hadoop/ParquetReader.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -49,7 +49,7 @@
4949

5050
/**
5151
* Read records from a Parquet file.
52-
* TODO: too many constructors (https://issues.apache.org/jira/browse/PARQUET-39)
52+
* TODO: too many constructors (https://github.com/apache/parquet-java/issues/1466)
5353
*/
5454
public class ParquetReader<T> implements Closeable {
5555

0 commit comments

Comments
 (0)