Skip to content

Commit

Permalink
Update Iceberg to 1.7.0
Browse files Browse the repository at this point in the history
  • Loading branch information
ebyhr committed Nov 10, 2024
1 parent 4b90c99 commit ead6d9f
Show file tree
Hide file tree
Showing 9 changed files with 18 additions and 9 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -166,6 +166,8 @@ public static Object getValue(JsonNode partitionValue, Type type)
return rescale(
partitionValue.decimalValue(),
createDecimalType(decimalType.precision(), decimalType.scale()));
// TODO https://github.com/trinodb/trino/issues/19753 Support Iceberg timestamp types with nanosecond precision
case TIMESTAMP_NANO:
case LIST:
case MAP:
case STRUCT:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -85,6 +85,9 @@ public static Type toTrinoType(org.apache.iceberg.types.Type type, TypeManager t
return TIME_MICROS;
case TIMESTAMP:
return ((Types.TimestampType) type).shouldAdjustToUTC() ? TIMESTAMP_TZ_MICROS : TIMESTAMP_MICROS;
case TIMESTAMP_NANO:
// TODO https://github.com/trinodb/trino/issues/19753 Support Iceberg timestamp types with nanosecond precision
break;
case STRING:
return VarcharType.createUnboundedVarcharType();
case UUID:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -98,6 +98,7 @@ public synchronized TrinoCatalog create(ConnectorIdentity identity)
properties.put(CatalogProperties.URI, serverUri.toString());
warehouse.ifPresent(location -> properties.put(CatalogProperties.WAREHOUSE_LOCATION, location));
prefix.ifPresent(prefix -> properties.put("prefix", prefix));
properties.put("view-endpoints-supported", "true");
properties.put("trino-version", trinoVersion);
properties.putAll(securityProperties.get());

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -14,10 +14,12 @@
package io.trino.plugin.iceberg.util;

import io.trino.metastore.type.TypeInfo;
import io.trino.spi.TrinoException;
import org.apache.iceberg.types.Type;
import org.apache.iceberg.types.Types.DecimalType;

import static io.trino.metastore.type.TypeInfoUtils.getTypeInfoFromTypeString;
import static io.trino.spi.StandardErrorCode.NOT_SUPPORTED;
import static java.util.stream.Collectors.joining;

// based on org.apache.iceberg.hive.HiveSchemaUtil
Expand All @@ -41,6 +43,8 @@ private static String convertToTypeString(Type type)
case DATE -> "date";
case TIME, STRING, UUID -> "string";
case TIMESTAMP -> "timestamp";
// TODO https://github.com/trinodb/trino/issues/19753 Support Iceberg timestamp types with nanosecond precision
case TIMESTAMP_NANO -> throw new TrinoException(NOT_SUPPORTED, "Unsupported Iceberg type: TIMESTAMP_NANO");
case FIXED, BINARY -> "binary";
case DECIMAL -> "decimal(%s,%s)".formatted(((DecimalType) type).precision(), ((DecimalType) type).scale());
case LIST -> "array<%s>".formatted(convert(type.asListType().elementType()));
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@
import io.trino.orc.metadata.OrcColumnId;
import io.trino.orc.metadata.OrcType;
import io.trino.orc.metadata.OrcType.OrcTypeKind;
import io.trino.spi.TrinoException;
import org.apache.iceberg.Schema;
import org.apache.iceberg.types.Type;
import org.apache.iceberg.types.Types.DecimalType;
Expand All @@ -33,6 +34,8 @@
import java.util.Map;
import java.util.Optional;

import static io.trino.spi.StandardErrorCode.NOT_SUPPORTED;

public final class OrcTypeConverter
{
public static final String ORC_ICEBERG_ID_KEY = "iceberg.id";
Expand Down Expand Up @@ -67,6 +70,8 @@ private static List<OrcType> toOrcType(int nextFieldTypeIndex, Type type, Map<St
OrcTypeKind timestampKind = ((TimestampType) type).shouldAdjustToUTC() ? OrcTypeKind.TIMESTAMP_INSTANT : OrcTypeKind.TIMESTAMP;
yield ImmutableList.of(new OrcType(timestampKind, ImmutableList.of(), ImmutableList.of(), Optional.empty(), Optional.empty(), Optional.empty(), attributes));
}
// TODO https://github.com/trinodb/trino/issues/19753 Support Iceberg timestamp types with nanosecond precision
case TIMESTAMP_NANO -> throw new TrinoException(NOT_SUPPORTED, "Unsupported Iceberg type: TIMESTAMP_NANO");
case STRING -> ImmutableList.of(new OrcType(OrcTypeKind.STRING, ImmutableList.of(), ImmutableList.of(), Optional.empty(), Optional.empty(), Optional.empty(), attributes));
case FIXED, BINARY -> ImmutableList.of(new OrcType(OrcTypeKind.BINARY, ImmutableList.of(), ImmutableList.of(), Optional.empty(), Optional.empty(), Optional.empty(), attributes));
case DECIMAL -> {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1062,7 +1062,7 @@ public void testCreateTableWithUnsupportedNestedFieldPartitioning()
"\\QUnable to parse partitioning value: Cannot partition by non-primitive source field: struct<3: child: optional string>");
assertQueryFails(
"CREATE TABLE test_partitioned_table_nested_field_inside_array (parent ARRAY(ROW(child VARCHAR))) WITH (partitioning = ARRAY['\"parent.child\"'])",
"\\QPartitioning field [parent.child] cannot be contained in a array");
"\\QPartitioning field [parent.element.child] cannot be contained in a array");
assertQueryFails(
"CREATE TABLE test_partitioned_table_nested_field_inside_map (parent MAP(ROW(child INTEGER), ARRAY(VARCHAR))) WITH (partitioning = ARRAY['\"parent.key.child\"'])",
"\\QPartitioning field [parent.key.child] cannot be contained in a map");
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -124,8 +124,6 @@ public void testCreateTable()
assertFileSystemAccesses("CREATE TABLE test_create (id VARCHAR, age INT)",
ImmutableMultiset.<FileOperation>builder()
.add(new FileOperation(METADATA_JSON, "OutputFile.create"))
.add(new FileOperation(SNAPSHOT, "InputFile.length"))
.add(new FileOperation(SNAPSHOT, "InputFile.newStream"))
.add(new FileOperation(SNAPSHOT, "OutputFile.create"))
.build());
}
Expand All @@ -136,16 +134,12 @@ public void testCreateOrReplaceTable()
assertFileSystemAccesses("CREATE OR REPLACE TABLE test_create_or_replace (id VARCHAR, age INT)",
ImmutableMultiset.<FileOperation>builder()
.add(new FileOperation(METADATA_JSON, "OutputFile.create"))
.add(new FileOperation(SNAPSHOT, "InputFile.length"))
.add(new FileOperation(SNAPSHOT, "InputFile.newStream"))
.add(new FileOperation(SNAPSHOT, "OutputFile.create"))
.build());
assertFileSystemAccesses("CREATE OR REPLACE TABLE test_create_or_replace (id VARCHAR, age INT)",
ImmutableMultiset.<FileOperation>builder()
.add(new FileOperation(METADATA_JSON, "OutputFile.create"))
.add(new FileOperation(METADATA_JSON, "InputFile.newStream"))
.add(new FileOperation(SNAPSHOT, "InputFile.length"))
.add(new FileOperation(SNAPSHOT, "InputFile.newStream"))
.add(new FileOperation(SNAPSHOT, "OutputFile.create"))
.build());
}
Expand Down
2 changes: 1 addition & 1 deletion pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -196,7 +196,7 @@
<dep.frontend-node.version>v20.17.0</dep.frontend-node.version>
<dep.frontend-npm.version>10.8.3</dep.frontend-npm.version>
<dep.google.http.client.version>1.45.0</dep.google.http.client.version>
<dep.iceberg.version>1.6.1</dep.iceberg.version>
<dep.iceberg.version>1.7.0</dep.iceberg.version>
<dep.jna.version>5.15.0</dep.jna.version>
<dep.joda.version>2.12.7</dep.joda.version>
<dep.jsonwebtoken.version>0.12.6</dep.jsonwebtoken.version>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,7 @@ public class EnvSinglenodeSparkIcebergRest
private static final int REST_SERVER_PORT = 8181;
private static final String SPARK_CONTAINER_NAME = "spark";
private static final String REST_CONTAINER_NAME = "iceberg-with-rest";
private static final String REST_SERVER_IMAGE = "tabulario/iceberg-rest:0.4.0";
private static final String REST_SERVER_IMAGE = "tabulario/iceberg-rest:1.5.0";
private static final String CATALOG_WAREHOUSE = "hdfs://hadoop-master:9000/user/hive/warehouse";

private final DockerFiles dockerFiles;
Expand Down

0 comments on commit ead6d9f

Please sign in to comment.