diff --git a/.github/workflows/product-tests-specific-environment.yml b/.github/workflows/product-tests-specific-environment.yml
index cbc0dde9e0e4c..4a4718b1be495 100644
--- a/.github/workflows/product-tests-specific-environment.yml
+++ b/.github/workflows/product-tests-specific-environment.yml
@@ -71,7 +71,7 @@ jobs:
OVERRIDE_JDK_DIR: ${{ env.JAVA_HOME }}
run: presto-product-tests/bin/run_on_docker.sh singlenode -g hdfs_no_impersonation,avro,mixed_case
- name: Product Tests Specific 1.2
- if: needs.changes.outputs.codechange == 'true'
+ if: needs.changes.outputs.codechange == 'true' && ${{ always() }}
env:
OVERRIDE_JDK_DIR: ${{ env.JAVA_HOME }}
run: presto-product-tests/bin/run_on_docker.sh singlenode-kerberos-hdfs-no-impersonation -g hdfs_no_impersonation
@@ -79,17 +79,17 @@ jobs:
# - name: Product Tests Specific 1.3
# run: presto-product-tests/bin/run_on_docker.sh singlenode-hdfs-impersonation -g storage_formats,cli,hdfs_impersonation
- name: Product Tests Specific 1.4
- if: needs.changes.outputs.codechange == 'true'
+ if: needs.changes.outputs.codechange == 'true' && ${{ always() }}
env:
OVERRIDE_JDK_DIR: ${{ env.JAVA_HOME }}
run: presto-product-tests/bin/run_on_docker.sh singlenode-kerberos-hdfs-impersonation -g storage_formats,cli,hdfs_impersonation,authorization,hive_file_header
- name: Product Tests Specific 1.5
- if: needs.changes.outputs.codechange == 'true'
+ if: needs.changes.outputs.codechange == 'true' && ${{ always() }}
env:
OVERRIDE_JDK_DIR: ${{ env.JAVA_HOME }}
run: presto-product-tests/bin/run_on_docker.sh singlenode-kerberos-hdfs-impersonation-cross-realm -g storage_formats,cli,hdfs_impersonation
- name: Product Tests Specific 1.6
- if: needs.changes.outputs.codechange == 'true'
+ if: needs.changes.outputs.codechange == 'true' && ${{ always() }}
env:
OVERRIDE_JDK_DIR: ${{ env.JAVA_HOME }}
run: presto-product-tests/bin/run_on_docker.sh multinode-tls-kerberos -g cli,group-by,join,tls
@@ -138,22 +138,22 @@ jobs:
OVERRIDE_JDK_DIR: ${{ env.JAVA_HOME }}
run: presto-product-tests/bin/run_on_docker.sh singlenode-ldap -g ldap -x simba_jdbc
- name: Product Tests Specific 2.2
- if: needs.changes.outputs.codechange == 'true'
+ if: needs.changes.outputs.codechange == 'true' && ${{ always() }}
env:
OVERRIDE_JDK_DIR: ${{ env.JAVA_HOME }}
run: presto-product-tests/bin/run_on_docker.sh multinode-tls -g smoke,cli,group-by,join,tls
- name: Product Tests Specific 2.3
- if: needs.changes.outputs.codechange == 'true'
+ if: needs.changes.outputs.codechange == 'true' && ${{ always() }}
env:
OVERRIDE_JDK_DIR: ${{ env.JAVA_HOME }}
run: presto-product-tests/bin/run_on_docker.sh singlenode-mysql -g mysql_connector,mysql
- name: Product Tests Specific 2.4
- if: needs.changes.outputs.codechange == 'true'
+ if: needs.changes.outputs.codechange == 'true' && ${{ always() }}
env:
OVERRIDE_JDK_DIR: ${{ env.JAVA_HOME }}
run: presto-product-tests/bin/run_on_docker.sh singlenode-postgresql -g postgresql_connector
- name: Product Tests Specific 2.5
- if: needs.changes.outputs.codechange == 'true'
+ if: needs.changes.outputs.codechange == 'true' && ${{ always() }}
env:
OVERRIDE_JDK_DIR: ${{ env.JAVA_HOME }}
run: presto-product-tests/bin/run_on_docker.sh singlenode-cassandra -g cassandra
@@ -161,12 +161,12 @@ jobs:
# - name: Product Tests Specific 2.6
# run: presto-product-tests/bin/run_on_docker.sh singlenode-kerberos-hdfs-impersonation-with-wire-encryption -g storage_formats,cli,hdfs_impersonation,authorization
- name: Product Tests Specific 2.7
- if: needs.changes.outputs.codechange == 'true'
+ if: needs.changes.outputs.codechange == 'true' && ${{ always() }}
env:
OVERRIDE_JDK_DIR: ${{ env.JAVA_HOME }}
run: presto-product-tests/bin/run_on_docker.sh singlenode-kafka -g kafka
- name: Product Tests Specific 2.8
- if: needs.changes.outputs.codechange == 'true'
+ if: needs.changes.outputs.codechange == 'true' && ${{ always() }}
env:
OVERRIDE_JDK_DIR: ${{ env.JAVA_HOME }}
run: presto-product-tests/bin/run_on_docker.sh singlenode-sqlserver -g sqlserver
diff --git a/pom.xml b/pom.xml
index a24df5896face..682cea34999b5 100644
--- a/pom.xml
+++ b/pom.xml
@@ -56,7 +56,7 @@
2.14.0
- 1.55
+ 1.57
7.5
9.12.0
3.8.0
@@ -1108,33 +1108,13 @@
com.facebook.presto.hive
hive-apache
- 3.0.0-12
-
-
-
- org.apache.hive
- hive-llap-common
- 2.3.4
-
-
- org.apache.hive
- hive-common
-
-
- org.apache.hive
- hive-serde
-
-
- org.slf4j
- slf4j-api
-
-
+ 4.0.1-1
com.facebook.presto.orc
orc-protobuf
- 13
+ 14
@@ -1232,7 +1212,7 @@
com.facebook.hive
hive-dwrf
- 0.8.7
+ 0.8.8
diff --git a/presto-base-arrow-flight/src/test/java/com/facebook/plugin/arrow/TestArrowFlightQueries.java b/presto-base-arrow-flight/src/test/java/com/facebook/plugin/arrow/TestArrowFlightQueries.java
index 2bdf8508b8a1b..3b49602beab9f 100644
--- a/presto-base-arrow-flight/src/test/java/com/facebook/plugin/arrow/TestArrowFlightQueries.java
+++ b/presto-base-arrow-flight/src/test/java/com/facebook/plugin/arrow/TestArrowFlightQueries.java
@@ -131,11 +131,12 @@ public void testSelectTime()
{
MaterializedResult actualRow = computeActual("SELECT * from event WHERE id = 1");
Session session = getSession();
+ TimeZoneKey timeZoneKey = session.getSqlFunctionProperties().isLegacyTimestamp() ? session.getTimeZoneKey() : TimeZoneKey.UTC_KEY;
MaterializedResult expectedRow = resultBuilder(session, INTEGER, DATE, TIME, TIMESTAMP)
.row(1,
getDate("2004-12-31"),
- getTimeAtZone("23:59:59", session.getTimeZoneKey()),
- getDateTimeAtZone("2005-12-31 23:59:59", session.getTimeZoneKey()))
+ getTimeAtZone("23:59:59", timeZoneKey),
+ getDateTimeAtZone("2005-12-31 23:59:59", timeZoneKey))
.build();
assertTrue(actualRow.equals(expectedRow));
}
diff --git a/presto-cassandra/src/main/java/com/facebook/presto/cassandra/CassandraMetadata.java b/presto-cassandra/src/main/java/com/facebook/presto/cassandra/CassandraMetadata.java
index b4ea65a7ecb48..11e6285da2893 100644
--- a/presto-cassandra/src/main/java/com/facebook/presto/cassandra/CassandraMetadata.java
+++ b/presto-cassandra/src/main/java/com/facebook/presto/cassandra/CassandraMetadata.java
@@ -104,7 +104,7 @@ public CassandraTableHandle getTableHandle(ConnectorSession session, SchemaTable
{
requireNonNull(tableName, "tableName is null");
try {
- return cassandraSession.getTable(tableName).getTableHandle();
+ return cassandraSession.getTable(session, tableName).getTableHandle();
}
catch (TableNotFoundException | SchemaNotFoundException e) {
// table was not found
@@ -126,7 +126,7 @@ public ConnectorTableMetadata getTableMetadata(ConnectorSession session, Connect
private ConnectorTableMetadata getTableMetadata(ConnectorSession session, SchemaTableName tableName)
{
- CassandraTable table = cassandraSession.getTable(tableName);
+ CassandraTable table = cassandraSession.getTable(session, tableName);
List columns = table.getColumns().stream()
.map(column -> column.getColumnMetadata(normalizeIdentifier(session, cqlNameToSqlName(column.getName()))))
.collect(toImmutableList());
@@ -164,7 +164,7 @@ public Map getColumnHandles(ConnectorSession session, Conn
{
requireNonNull(session, "session is null");
requireNonNull(tableHandle, "tableHandle is null");
- CassandraTable table = cassandraSession.getTable(getTableName(tableHandle));
+ CassandraTable table = cassandraSession.getTable(session, getTableName(tableHandle));
ImmutableMap.Builder columnHandles = ImmutableMap.builder();
for (CassandraColumnHandle columnHandle : table.getColumns()) {
String columnName = cqlNameToSqlName(columnHandle.getName());
@@ -212,7 +212,7 @@ public ConnectorTableLayoutResult getTableLayoutForConstraint(
Optional> desiredColumns)
{
CassandraTableHandle handle = (CassandraTableHandle) table;
- CassandraPartitionResult partitionResult = partitionManager.getPartitions(handle, constraint.getSummary());
+ CassandraPartitionResult partitionResult = partitionManager.getPartitions(handle, session, constraint.getSummary());
String clusteringKeyPredicates = "";
TupleDomain unenforcedConstraint;
@@ -221,7 +221,7 @@ public ConnectorTableLayoutResult getTableLayoutForConstraint(
}
else {
CassandraClusteringPredicatesExtractor clusteringPredicatesExtractor = new CassandraClusteringPredicatesExtractor(
- cassandraSession.getTable(getTableName(handle)).getClusteringKeyColumns(),
+ cassandraSession.getTable(session, getTableName(handle)).getClusteringKeyColumns(),
partitionResult.getUnenforcedConstraint(),
cassandraSession.getCassandraVersion());
clusteringKeyPredicates = clusteringPredicatesExtractor.getClusteringKeyPredicates();
@@ -347,7 +347,7 @@ public ConnectorInsertTableHandle beginInsert(ConnectorSession session, Connecto
}
SchemaTableName schemaTableName = new SchemaTableName(table.getSchemaName(), table.getTableName());
- List columns = cassandraSession.getTable(schemaTableName).getColumns();
+ List columns = cassandraSession.getTable(session, schemaTableName).getColumns();
List columnNames = columns.stream().map(CassandraColumnHandle::getName).collect(Collectors.toList());
List columnTypes = columns.stream().map(CassandraColumnHandle::getType).collect(Collectors.toList());
diff --git a/presto-cassandra/src/main/java/com/facebook/presto/cassandra/CassandraPageSink.java b/presto-cassandra/src/main/java/com/facebook/presto/cassandra/CassandraPageSink.java
index ee51ec707a8e6..048f09a867e29 100644
--- a/presto-cassandra/src/main/java/com/facebook/presto/cassandra/CassandraPageSink.java
+++ b/presto-cassandra/src/main/java/com/facebook/presto/cassandra/CassandraPageSink.java
@@ -21,6 +21,7 @@
import com.facebook.presto.common.block.Block;
import com.facebook.presto.common.type.Type;
import com.facebook.presto.spi.ConnectorPageSink;
+import com.facebook.presto.spi.ConnectorSession;
import com.facebook.presto.spi.PrestoException;
import com.google.common.collect.ImmutableList;
import io.airlift.slice.Slice;
@@ -44,12 +45,14 @@
import static com.facebook.presto.cassandra.util.CassandraCqlUtils.validTableName;
import static com.facebook.presto.common.type.BigintType.BIGINT;
import static com.facebook.presto.common.type.BooleanType.BOOLEAN;
+import static com.facebook.presto.common.type.DateTimeEncoding.unpackMillisUtc;
import static com.facebook.presto.common.type.DateType.DATE;
import static com.facebook.presto.common.type.DoubleType.DOUBLE;
import static com.facebook.presto.common.type.IntegerType.INTEGER;
import static com.facebook.presto.common.type.RealType.REAL;
import static com.facebook.presto.common.type.SmallintType.SMALLINT;
import static com.facebook.presto.common.type.TimestampType.TIMESTAMP;
+import static com.facebook.presto.common.type.TimestampWithTimeZoneType.TIMESTAMP_WITH_TIME_ZONE;
import static com.facebook.presto.common.type.TinyintType.TINYINT;
import static com.facebook.presto.common.type.VarbinaryType.VARBINARY;
import static com.facebook.presto.common.type.Varchars.isVarcharType;
@@ -67,6 +70,7 @@ public class CassandraPageSink
private static final DateTimeFormatter DATE_FORMATTER = DateTimeFormatter.ISO_LOCAL_DATE.withZone(ZoneId.of("UTC"));
private final CassandraSession cassandraSession;
+ private final ConnectorSession session;
private final PreparedStatement insert;
private final List columnTypes;
private final boolean generateUUID;
@@ -74,6 +78,7 @@ public class CassandraPageSink
public CassandraPageSink(
CassandraSession cassandraSession,
+ ConnectorSession connectorSession,
ProtocolVersion protocolVersion,
String schemaName,
String tableName,
@@ -82,6 +87,7 @@ public CassandraPageSink(
boolean generateUUID)
{
this.cassandraSession = requireNonNull(cassandraSession, "cassandraSession");
+ this.session = requireNonNull(connectorSession, "connectorSession is null");
requireNonNull(schemaName, "schemaName is null");
requireNonNull(tableName, "tableName is null");
requireNonNull(columnNames, "columnNames is null");
@@ -156,9 +162,12 @@ else if (REAL.equals(type)) {
else if (DATE.equals(type)) {
values.add(toCassandraDate.apply(type.getLong(block, position)));
}
- else if (TIMESTAMP.equals(type)) {
+ else if (session.getSqlFunctionProperties().isLegacyTimestamp() && TIMESTAMP.equals(type)) {
values.add(new Timestamp(type.getLong(block, position)));
}
+ else if (!session.getSqlFunctionProperties().isLegacyTimestamp() && TIMESTAMP_WITH_TIME_ZONE.equals(type)) {
+ values.add(new Timestamp(unpackMillisUtc(type.getLong(block, position))));
+ }
else if (isVarcharType(type)) {
values.add(type.getSlice(block, position).toStringUtf8());
}
diff --git a/presto-cassandra/src/main/java/com/facebook/presto/cassandra/CassandraPageSinkProvider.java b/presto-cassandra/src/main/java/com/facebook/presto/cassandra/CassandraPageSinkProvider.java
index 37c4d9e0710cc..c48145ba6a199 100644
--- a/presto-cassandra/src/main/java/com/facebook/presto/cassandra/CassandraPageSinkProvider.java
+++ b/presto-cassandra/src/main/java/com/facebook/presto/cassandra/CassandraPageSinkProvider.java
@@ -49,6 +49,7 @@ public ConnectorPageSink createPageSink(ConnectorTransactionHandle transactionHa
return new CassandraPageSink(
cassandraSession,
+ session,
protocolVersion,
handle.getSchemaName(),
handle.getTableName(),
@@ -67,6 +68,7 @@ public ConnectorPageSink createPageSink(ConnectorTransactionHandle transactionHa
return new CassandraPageSink(
cassandraSession,
+ session,
protocolVersion,
handle.getSchemaName(),
handle.getTableName(),
diff --git a/presto-cassandra/src/main/java/com/facebook/presto/cassandra/CassandraPartitionManager.java b/presto-cassandra/src/main/java/com/facebook/presto/cassandra/CassandraPartitionManager.java
index 20034e925a8c1..326e18dd738bb 100644
--- a/presto-cassandra/src/main/java/com/facebook/presto/cassandra/CassandraPartitionManager.java
+++ b/presto-cassandra/src/main/java/com/facebook/presto/cassandra/CassandraPartitionManager.java
@@ -19,6 +19,7 @@
import com.facebook.presto.common.predicate.Range;
import com.facebook.presto.common.predicate.TupleDomain;
import com.facebook.presto.spi.ColumnHandle;
+import com.facebook.presto.spi.ConnectorSession;
import com.facebook.presto.spi.ConnectorTableHandle;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableSet;
@@ -49,11 +50,11 @@ public CassandraPartitionManager(CassandraSession cassandraSession)
this.cassandraSession = requireNonNull(cassandraSession, "cassandraSession is null");
}
- public CassandraPartitionResult getPartitions(ConnectorTableHandle tableHandle, TupleDomain tupleDomain)
+ public CassandraPartitionResult getPartitions(ConnectorTableHandle tableHandle, ConnectorSession connectorSession, TupleDomain tupleDomain)
{
CassandraTableHandle cassandraTableHandle = (CassandraTableHandle) tableHandle;
- CassandraTable table = cassandraSession.getTable(cassandraTableHandle.getSchemaTableName());
+ CassandraTable table = cassandraSession.getTable(connectorSession, cassandraTableHandle.getSchemaTableName());
List partitionKeys = table.getPartitionKeyColumns();
// fetch the partitions
diff --git a/presto-cassandra/src/main/java/com/facebook/presto/cassandra/CassandraRecordCursor.java b/presto-cassandra/src/main/java/com/facebook/presto/cassandra/CassandraRecordCursor.java
index 6b9def7e8f0aa..47f00b3dedbbc 100644
--- a/presto-cassandra/src/main/java/com/facebook/presto/cassandra/CassandraRecordCursor.java
+++ b/presto-cassandra/src/main/java/com/facebook/presto/cassandra/CassandraRecordCursor.java
@@ -16,25 +16,31 @@
import com.datastax.driver.core.ResultSet;
import com.datastax.driver.core.Row;
import com.facebook.presto.common.predicate.NullableValue;
+import com.facebook.presto.common.type.TimeZoneKey;
import com.facebook.presto.common.type.Type;
+import com.facebook.presto.spi.ConnectorSession;
import com.facebook.presto.spi.RecordCursor;
import io.airlift.slice.Slice;
import java.util.List;
+import static com.facebook.presto.common.type.DateTimeEncoding.packDateTimeWithZone;
import static io.airlift.slice.Slices.utf8Slice;
import static java.lang.Float.floatToRawIntBits;
+import static java.util.Objects.requireNonNull;
public class CassandraRecordCursor
implements RecordCursor
{
private final List fullCassandraTypes;
+ private ConnectorSession session;
private final ResultSet rs;
private Row currentRow;
private long count;
- public CassandraRecordCursor(CassandraSession cassandraSession, List fullCassandraTypes, String cql)
+ public CassandraRecordCursor(CassandraSession cassandraSession, ConnectorSession connectorSession, List fullCassandraTypes, String cql)
{
+ this.session = requireNonNull(connectorSession, "connectorSession is null");
this.fullCassandraTypes = fullCassandraTypes;
rs = cassandraSession.execute(cql);
currentRow = null;
@@ -104,6 +110,8 @@ public long getLong(int i)
return currentRow.getLong(i);
case TIMESTAMP:
return currentRow.getTimestamp(i).getTime();
+ case TIMESTAMP_WITH_TIMEZONE:
+ return packDateTimeWithZone(currentRow.getTimestamp(i).getTime(), TimeZoneKey.UTC_KEY);
case DATE:
return currentRow.getDate(i).getDaysSinceEpoch();
case FLOAT:
diff --git a/presto-cassandra/src/main/java/com/facebook/presto/cassandra/CassandraRecordSet.java b/presto-cassandra/src/main/java/com/facebook/presto/cassandra/CassandraRecordSet.java
index d273f68ff3b8d..87fc440e93ca4 100644
--- a/presto-cassandra/src/main/java/com/facebook/presto/cassandra/CassandraRecordSet.java
+++ b/presto-cassandra/src/main/java/com/facebook/presto/cassandra/CassandraRecordSet.java
@@ -14,6 +14,7 @@
package com.facebook.presto.cassandra;
import com.facebook.presto.common.type.Type;
+import com.facebook.presto.spi.ConnectorSession;
import com.facebook.presto.spi.RecordCursor;
import com.facebook.presto.spi.RecordSet;
import com.google.common.collect.ImmutableList;
@@ -28,13 +29,15 @@ public class CassandraRecordSet
implements RecordSet
{
private final CassandraSession cassandraSession;
+ private final ConnectorSession session;
private final String cql;
private final List cassandraTypes;
private final List columnTypes;
- public CassandraRecordSet(CassandraSession cassandraSession, String cql, List cassandraColumns)
+ public CassandraRecordSet(CassandraSession cassandraSession, ConnectorSession connectorSession, String cql, List cassandraColumns)
{
this.cassandraSession = requireNonNull(cassandraSession, "cassandraSession is null");
+ this.session = requireNonNull(connectorSession, "connectorSession is null");
this.cql = requireNonNull(cql, "cql is null");
requireNonNull(cassandraColumns, "cassandraColumns is null");
@@ -51,7 +54,7 @@ public List getColumnTypes()
@Override
public RecordCursor cursor()
{
- return new CassandraRecordCursor(cassandraSession, cassandraTypes, cql);
+ return new CassandraRecordCursor(cassandraSession, session, cassandraTypes, cql);
}
private static List transformList(List list, Function function)
diff --git a/presto-cassandra/src/main/java/com/facebook/presto/cassandra/CassandraRecordSetProvider.java b/presto-cassandra/src/main/java/com/facebook/presto/cassandra/CassandraRecordSetProvider.java
index 93d4a519e8934..3f9afe9bab4c7 100644
--- a/presto-cassandra/src/main/java/com/facebook/presto/cassandra/CassandraRecordSetProvider.java
+++ b/presto-cassandra/src/main/java/com/facebook/presto/cassandra/CassandraRecordSetProvider.java
@@ -62,7 +62,7 @@ public RecordSet getRecordSet(ConnectorTransactionHandle transaction, ConnectorS
String cql = sb.toString();
log.debug("Creating record set: %s", cql);
- return new CassandraRecordSet(cassandraSession, cql, cassandraColumns);
+ return new CassandraRecordSet(cassandraSession, session, cql, cassandraColumns);
}
@Override
diff --git a/presto-cassandra/src/main/java/com/facebook/presto/cassandra/CassandraSession.java b/presto-cassandra/src/main/java/com/facebook/presto/cassandra/CassandraSession.java
index 214a0d0787b2d..7a770469cf0b5 100644
--- a/presto-cassandra/src/main/java/com/facebook/presto/cassandra/CassandraSession.java
+++ b/presto-cassandra/src/main/java/com/facebook/presto/cassandra/CassandraSession.java
@@ -20,6 +20,7 @@
import com.datastax.driver.core.Statement;
import com.datastax.driver.core.TokenRange;
import com.datastax.driver.core.VersionNumber;
+import com.facebook.presto.spi.ConnectorSession;
import com.facebook.presto.spi.SchemaNotFoundException;
import com.facebook.presto.spi.SchemaTableName;
import com.facebook.presto.spi.TableNotFoundException;
@@ -49,7 +50,7 @@ public interface CassandraSession
List getCaseSensitiveTableNames(String caseInsensitiveSchemaName)
throws SchemaNotFoundException;
- CassandraTable getTable(SchemaTableName schemaTableName)
+ CassandraTable getTable(ConnectorSession connectorSession, SchemaTableName schemaTableName)
throws TableNotFoundException;
/**
diff --git a/presto-cassandra/src/main/java/com/facebook/presto/cassandra/CassandraSplitManager.java b/presto-cassandra/src/main/java/com/facebook/presto/cassandra/CassandraSplitManager.java
index e94faf013066d..944ec11acc9e3 100644
--- a/presto-cassandra/src/main/java/com/facebook/presto/cassandra/CassandraSplitManager.java
+++ b/presto-cassandra/src/main/java/com/facebook/presto/cassandra/CassandraSplitManager.java
@@ -78,7 +78,7 @@ public ConnectorSplitSource getSplits(
if (partitions.size() == 1) {
CassandraPartition cassandraPartition = partitions.get(0);
if (cassandraPartition.isUnpartitioned() || cassandraPartition.isIndexedColumnPredicatePushdown()) {
- CassandraTable table = cassandraSession.getTable(cassandraTableHandle.getSchemaTableName());
+ CassandraTable table = cassandraSession.getTable(session, cassandraTableHandle.getSchemaTableName());
List splits = getSplitsByTokenRange(table, cassandraPartition.getPartitionId(), getSplitsPerNode(session));
return new FixedSplitSource(splits);
}
diff --git a/presto-cassandra/src/main/java/com/facebook/presto/cassandra/CassandraType.java b/presto-cassandra/src/main/java/com/facebook/presto/cassandra/CassandraType.java
index 6ddee8fd53524..1234925498ef0 100644
--- a/presto-cassandra/src/main/java/com/facebook/presto/cassandra/CassandraType.java
+++ b/presto-cassandra/src/main/java/com/facebook/presto/cassandra/CassandraType.java
@@ -29,10 +29,13 @@
import com.facebook.presto.common.type.IntegerType;
import com.facebook.presto.common.type.RealType;
import com.facebook.presto.common.type.SmallintType;
+import com.facebook.presto.common.type.TimeZoneKey;
import com.facebook.presto.common.type.TimestampType;
+import com.facebook.presto.common.type.TimestampWithTimeZoneType;
import com.facebook.presto.common.type.TinyintType;
import com.facebook.presto.common.type.Type;
import com.facebook.presto.common.type.VarbinaryType;
+import com.facebook.presto.spi.ConnectorSession;
import com.facebook.presto.spi.PrestoException;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.net.InetAddresses;
@@ -47,6 +50,7 @@
import java.util.List;
import java.util.Map;
+import static com.facebook.presto.common.type.DateTimeEncoding.packDateTimeWithZone;
import static com.facebook.presto.common.type.VarcharType.createUnboundedVarcharType;
import static com.facebook.presto.common.type.VarcharType.createVarcharType;
import static com.facebook.presto.common.type.Varchars.isVarcharType;
@@ -78,6 +82,7 @@ public enum CassandraType
TEXT(createUnboundedVarcharType(), String.class),
DATE(DateType.DATE, LocalDate.class),
TIMESTAMP(TimestampType.TIMESTAMP, Date.class),
+ TIMESTAMP_WITH_TIMEZONE(TimestampWithTimeZoneType.TIMESTAMP_WITH_TIME_ZONE, Date.class),
UUID(createVarcharType(Constants.UUID_STRING_MAX_LENGTH), java.util.UUID.class),
TIMEUUID(createVarcharType(Constants.UUID_STRING_MAX_LENGTH), java.util.UUID.class),
VARCHAR(createUnboundedVarcharType(), String.class),
@@ -123,7 +128,7 @@ public int getTypeArgumentSize()
}
}
- public static CassandraType getCassandraType(DataType.Name name)
+ public static CassandraType getCassandraType(ConnectorSession connectorSession, DataType.Name name)
{
switch (name) {
case ASCII:
@@ -161,7 +166,10 @@ public static CassandraType getCassandraType(DataType.Name name)
case TEXT:
return TEXT;
case TIMESTAMP:
- return TIMESTAMP;
+ if (connectorSession.getSqlFunctionProperties().isLegacyTimestamp()) {
+ return TIMESTAMP;
+ }
+ return TIMESTAMP_WITH_TIMEZONE;
case TIMEUUID:
return TIMEUUID;
case TINYINT:
@@ -219,6 +227,8 @@ public static NullableValue getColumnValue(Row row, int position, CassandraType
return NullableValue.of(nativeType, utf8Slice(row.getUUID(position).toString()));
case TIMESTAMP:
return NullableValue.of(nativeType, row.getTimestamp(position).getTime());
+ case TIMESTAMP_WITH_TIMEZONE:
+ return NullableValue.of(nativeType, packDateTimeWithZone(row.getTimestamp(position).getTime(), TimeZoneKey.UTC_KEY));
case DATE:
return NullableValue.of(nativeType, (long) row.getDate(position).getDaysSinceEpoch());
case INET:
@@ -347,6 +357,7 @@ public static String getColumnValueForCql(Row row, int position, CassandraType c
case TIMEUUID:
return row.getUUID(position).toString();
case TIMESTAMP:
+ case TIMESTAMP_WITH_TIMEZONE:
return Long.toString(row.getTimestamp(position).getTime());
case DATE:
return row.getDate(position).toString();
@@ -373,6 +384,7 @@ private static String objectToString(Object object, CassandraType elemType)
case UUID:
case TIMEUUID:
case TIMESTAMP:
+ case TIMESTAMP_WITH_TIMEZONE:
case DATE:
case INET:
case VARINT:
@@ -447,6 +459,7 @@ public Object getJavaValue(Object nativeValue)
// Otherwise partition id doesn't match
return new BigDecimal(nativeValue.toString());
case TIMESTAMP:
+ case TIMESTAMP_WITH_TIMEZONE:
return new Date((Long) nativeValue);
case DATE:
return LocalDate.fromDaysSinceEpoch(((Long) nativeValue).intValue());
@@ -480,6 +493,7 @@ public boolean isSupportedPartitionKey()
case FLOAT:
case DECIMAL:
case TIMESTAMP:
+ case TIMESTAMP_WITH_TIMEZONE:
case UUID:
case TIMEUUID:
return true;
@@ -511,6 +525,7 @@ public Object validateClusteringKey(Object value)
case FLOAT:
case DECIMAL:
case TIMESTAMP:
+ case TIMESTAMP_WITH_TIMEZONE:
case DATE:
case UUID:
case TIMEUUID:
@@ -560,7 +575,7 @@ else if (type.equals(DateType.DATE)) {
else if (type.equals(VarbinaryType.VARBINARY)) {
return BLOB;
}
- else if (type.equals(TimestampType.TIMESTAMP)) {
+ else if (type.equals(TimestampType.TIMESTAMP) || type.equals(TimestampWithTimeZoneType.TIMESTAMP_WITH_TIME_ZONE)) {
return TIMESTAMP;
}
throw new IllegalArgumentException("unsupported type: " + type);
diff --git a/presto-cassandra/src/main/java/com/facebook/presto/cassandra/NativeCassandraSession.java b/presto-cassandra/src/main/java/com/facebook/presto/cassandra/NativeCassandraSession.java
index 44c9b91b1f478..7489c3190426e 100644
--- a/presto-cassandra/src/main/java/com/facebook/presto/cassandra/NativeCassandraSession.java
+++ b/presto-cassandra/src/main/java/com/facebook/presto/cassandra/NativeCassandraSession.java
@@ -44,6 +44,7 @@
import com.facebook.presto.common.predicate.NullableValue;
import com.facebook.presto.common.predicate.TupleDomain;
import com.facebook.presto.spi.ColumnHandle;
+import com.facebook.presto.spi.ConnectorSession;
import com.facebook.presto.spi.PrestoException;
import com.facebook.presto.spi.SchemaNotFoundException;
import com.facebook.presto.spi.SchemaTableName;
@@ -206,7 +207,7 @@ public List getCaseSensitiveTableNames(String caseSensitiveSchemaName)
}
@Override
- public CassandraTable getTable(SchemaTableName schemaTableName)
+ public CassandraTable getTable(ConnectorSession connectorSession, SchemaTableName schemaTableName)
throws TableNotFoundException
{
KeyspaceMetadata keyspace = getKeyspaceByCaseSensitiveName(schemaTableName.getSchemaName());
@@ -247,7 +248,7 @@ public CassandraTable getTable(SchemaTableName schemaTableName)
for (ColumnMetadata columnMeta : tableMeta.getPartitionKey()) {
primaryKeySet.add(columnMeta.getName());
boolean hidden = hiddenColumns.contains(columnMeta.getName());
- CassandraColumnHandle columnHandle = buildColumnHandle(tableMeta, columnMeta, true, false, columnNames.indexOf(columnMeta.getName()), hidden);
+ CassandraColumnHandle columnHandle = buildColumnHandle(connectorSession, tableMeta, columnMeta, true, false, columnNames.indexOf(columnMeta.getName()), hidden);
columnHandles.add(columnHandle);
}
@@ -255,7 +256,7 @@ public CassandraTable getTable(SchemaTableName schemaTableName)
for (ColumnMetadata columnMeta : tableMeta.getClusteringColumns()) {
primaryKeySet.add(columnMeta.getName());
boolean hidden = hiddenColumns.contains(columnMeta.getName());
- CassandraColumnHandle columnHandle = buildColumnHandle(tableMeta, columnMeta, false, true, columnNames.indexOf(columnMeta.getName()), hidden);
+ CassandraColumnHandle columnHandle = buildColumnHandle(connectorSession, tableMeta, columnMeta, false, true, columnNames.indexOf(columnMeta.getName()), hidden);
columnHandles.add(columnHandle);
}
@@ -263,7 +264,7 @@ public CassandraTable getTable(SchemaTableName schemaTableName)
for (ColumnMetadata columnMeta : columns) {
if (!primaryKeySet.contains(columnMeta.getName())) {
boolean hidden = hiddenColumns.contains(columnMeta.getName());
- CassandraColumnHandle columnHandle = buildColumnHandle(tableMeta, columnMeta, false, false, columnNames.indexOf(columnMeta.getName()), hidden);
+ CassandraColumnHandle columnHandle = buildColumnHandle(connectorSession, tableMeta, columnMeta, false, false, columnNames.indexOf(columnMeta.getName()), hidden);
columnHandles.add(columnHandle);
}
}
@@ -376,18 +377,18 @@ private static void checkColumnNames(List columns)
}
}
- private CassandraColumnHandle buildColumnHandle(AbstractTableMetadata tableMetadata, ColumnMetadata columnMeta, boolean partitionKey, boolean clusteringKey, int ordinalPosition, boolean hidden)
+ private CassandraColumnHandle buildColumnHandle(ConnectorSession connectorSession, AbstractTableMetadata tableMetadata, ColumnMetadata columnMeta, boolean partitionKey, boolean clusteringKey, int ordinalPosition, boolean hidden)
{
- CassandraType cassandraType = CassandraType.getCassandraType(columnMeta.getType().getName());
+ CassandraType cassandraType = CassandraType.getCassandraType(connectorSession, columnMeta.getType().getName());
List typeArguments = null;
if (cassandraType.getTypeArgumentSize() > 0) {
List typeArgs = columnMeta.getType().getTypeArguments();
switch (cassandraType.getTypeArgumentSize()) {
case 1:
- typeArguments = ImmutableList.of(CassandraType.getCassandraType(typeArgs.get(0).getName()));
+ typeArguments = ImmutableList.of(CassandraType.getCassandraType(connectorSession, typeArgs.get(0).getName()));
break;
case 2:
- typeArguments = ImmutableList.of(CassandraType.getCassandraType(typeArgs.get(0).getName()), CassandraType.getCassandraType(typeArgs.get(1).getName()));
+ typeArguments = ImmutableList.of(CassandraType.getCassandraType(connectorSession, typeArgs.get(0).getName()), CassandraType.getCassandraType(connectorSession, typeArgs.get(1).getName()));
break;
default:
throw new IllegalArgumentException("Invalid type arguments: " + typeArgs);
diff --git a/presto-cassandra/src/main/java/com/facebook/presto/cassandra/util/CassandraCqlUtils.java b/presto-cassandra/src/main/java/com/facebook/presto/cassandra/util/CassandraCqlUtils.java
index 4aa9631d48b27..72841826c9fd1 100644
--- a/presto-cassandra/src/main/java/com/facebook/presto/cassandra/util/CassandraCqlUtils.java
+++ b/presto-cassandra/src/main/java/com/facebook/presto/cassandra/util/CassandraCqlUtils.java
@@ -28,6 +28,8 @@
import java.util.List;
import java.util.Set;
+import static com.facebook.presto.common.type.DateTimeEncoding.unpackMillisUtc;
+
public final class CassandraCqlUtils
{
private CassandraCqlUtils()
@@ -153,6 +155,9 @@ public static Select selectCountAllFrom(CassandraTableHandle tableHandle)
public static String cqlValue(String value, CassandraType cassandraType)
{
switch (cassandraType) {
+ case TIMESTAMP_WITH_TIMEZONE:
+ long millis = Long.parseLong(value);
+ return String.valueOf(unpackMillisUtc(millis));
case ASCII:
case TEXT:
case VARCHAR:
@@ -167,6 +172,9 @@ public static String cqlValue(String value, CassandraType cassandraType)
public static String toCQLCompatibleString(Object value)
{
+ if (value instanceof Long) {
+ return value.toString();
+ }
if (value instanceof Slice) {
return ((Slice) value).toStringUtf8();
}
diff --git a/presto-cassandra/src/test/java/com/facebook/presto/cassandra/TestCassandraConnector.java b/presto-cassandra/src/test/java/com/facebook/presto/cassandra/TestCassandraConnector.java
index a3535c0e46b4f..913e848f4137c 100644
--- a/presto-cassandra/src/test/java/com/facebook/presto/cassandra/TestCassandraConnector.java
+++ b/presto-cassandra/src/test/java/com/facebook/presto/cassandra/TestCassandraConnector.java
@@ -58,11 +58,13 @@
import static com.facebook.presto.cassandra.CassandraTestingUtils.createTestTables;
import static com.facebook.presto.common.type.BigintType.BIGINT;
import static com.facebook.presto.common.type.BooleanType.BOOLEAN;
+import static com.facebook.presto.common.type.DateTimeEncoding.packDateTimeWithZone;
import static com.facebook.presto.common.type.DoubleType.DOUBLE;
import static com.facebook.presto.common.type.IntegerType.INTEGER;
import static com.facebook.presto.common.type.RealType.REAL;
import static com.facebook.presto.common.type.TimeZoneKey.UTC_KEY;
import static com.facebook.presto.common.type.TimestampType.TIMESTAMP;
+import static com.facebook.presto.common.type.TimestampWithTimeZoneType.TIMESTAMP_WITH_TIME_ZONE;
import static com.facebook.presto.common.type.VarbinaryType.VARBINARY;
import static com.facebook.presto.common.type.Varchars.isVarcharType;
import static com.facebook.presto.spi.connector.ConnectorSplitManager.SplitSchedulingStrategy.UNGROUPED_SCHEDULING;
@@ -91,7 +93,7 @@ public class TestCassandraConnector
System.currentTimeMillis(),
new CassandraSessionProperties(new CassandraClientConfig()).getSessionProperties(),
ImmutableMap.of(),
- true,
+ false,
Optional.empty(),
ImmutableSet.of(),
Optional.empty(),
@@ -231,7 +233,7 @@ public void testGetRecords()
assertEquals(cursor.getSlice(columnIndex.get("typeuuid")).toStringUtf8(), String.format("00000000-0000-0000-0000-%012d", rowId));
- assertEquals(cursor.getSlice(columnIndex.get("typetimestamp")).toStringUtf8(), Long.valueOf(DATE.getTime()).toString());
+ assertEquals(cursor.getLong(columnIndex.get("typetimestamp")), packDateTimeWithZone(DATE.getTime(), UTC_KEY));
long newCompletedBytes = cursor.getCompletedBytes();
assertTrue(newCompletedBytes >= completedBytes);
@@ -293,6 +295,9 @@ else if (BIGINT.equals(type)) {
else if (TIMESTAMP.equals(type)) {
cursor.getLong(columnIndex);
}
+ else if (TIMESTAMP_WITH_TIME_ZONE.equals(type)) {
+ cursor.getLong(columnIndex);
+ }
else if (DOUBLE.equals(type)) {
cursor.getDouble(columnIndex);
}
diff --git a/presto-cassandra/src/test/java/com/facebook/presto/cassandra/TestCassandraIntegrationSmokeTest.java b/presto-cassandra/src/test/java/com/facebook/presto/cassandra/TestCassandraIntegrationSmokeTest.java
index c7f9a7ecdb510..d177126644e0e 100644
--- a/presto-cassandra/src/test/java/com/facebook/presto/cassandra/TestCassandraIntegrationSmokeTest.java
+++ b/presto-cassandra/src/test/java/com/facebook/presto/cassandra/TestCassandraIntegrationSmokeTest.java
@@ -29,6 +29,8 @@
import java.nio.ByteBuffer;
import java.sql.Timestamp;
import java.time.LocalDateTime;
+import java.time.ZoneId;
+import java.time.ZonedDateTime;
import java.util.List;
import static com.datastax.driver.core.utils.Bytes.toRawHexString;
@@ -47,7 +49,7 @@
import static com.facebook.presto.common.type.DoubleType.DOUBLE;
import static com.facebook.presto.common.type.IntegerType.INTEGER;
import static com.facebook.presto.common.type.RealType.REAL;
-import static com.facebook.presto.common.type.TimestampType.TIMESTAMP;
+import static com.facebook.presto.common.type.TimestampWithTimeZoneType.TIMESTAMP_WITH_TIME_ZONE;
import static com.facebook.presto.common.type.VarbinaryType.VARBINARY;
import static com.facebook.presto.common.type.VarcharType.createUnboundedVarcharType;
import static com.facebook.presto.common.type.VarcharType.createVarcharType;
@@ -66,6 +68,7 @@ public class TestCassandraIntegrationSmokeTest
private static final String KEYSPACE = "smoke_test";
private static final Session SESSION = createCassandraSession(KEYSPACE);
+ private static final ZonedDateTime TIMESTAMP_VALUE = ZonedDateTime.of(1970, 1, 1, 3, 4, 5, 0, ZoneId.of("UTC"));
private static final Timestamp DATE_TIME_LOCAL = Timestamp.valueOf(LocalDateTime.of(1970, 1, 1, 3, 4, 5, 0));
private static final LocalDateTime TIMESTAMP_LOCAL = LocalDateTime.of(1969, 12, 31, 23, 4, 5); // TODO #7122 should match DATE_TIME_LOCAL
@@ -97,7 +100,7 @@ protected QueryRunner createQueryRunner()
CassandraServer server = new CassandraServer();
this.server = server;
this.session = server.getSession();
- createTestTables(session, server.getMetadata(), KEYSPACE, DATE_TIME_LOCAL);
+ createTestTables(session, server.getMetadata(), KEYSPACE, Timestamp.from(TIMESTAMP_VALUE.toInstant()));
return createCassandraQueryRunner(server, ImmutableMap.of());
}
@@ -111,7 +114,7 @@ public void testPartitionKeyPredicate()
" AND typeinteger = 7" +
" AND typelong = 1007" +
" AND typebytes = from_hex('" + toRawHexString(ByteBuffer.wrap(toByteArray(7))) + "')" +
- " AND typetimestamp = TIMESTAMP '1969-12-31 23:04:05'" +
+ " AND typetimestamp = TIMESTAMP '1970-01-01 03:04:05Z'" +
" AND typeansi = 'ansi 7'" +
" AND typeboolean = false" +
" AND typedecimal = 128.0" +
@@ -246,17 +249,17 @@ public void testClusteringKeyPushdownInequality()
assertEquals(execute(sql).getRowCount(), 4);
sql = "SELECT * FROM " + TABLE_CLUSTERING_KEYS_INEQUALITY + " WHERE key='key_1' AND clust_one='clust_one' AND clust_two=2";
assertEquals(execute(sql).getRowCount(), 1);
- sql = "SELECT * FROM " + TABLE_CLUSTERING_KEYS_INEQUALITY + " WHERE key='key_1' AND clust_one='clust_one' AND clust_two=2 AND clust_three = timestamp '1969-12-31 23:04:05.020'";
+ sql = "SELECT * FROM " + TABLE_CLUSTERING_KEYS_INEQUALITY + " WHERE key='key_1' AND clust_one='clust_one' AND clust_two=2 AND clust_three = timestamp '1970-01-01 03:04:05.020Z'";
assertEquals(execute(sql).getRowCount(), 1);
- sql = "SELECT * FROM " + TABLE_CLUSTERING_KEYS_INEQUALITY + " WHERE key='key_1' AND clust_one='clust_one' AND clust_two=2 AND clust_three = timestamp '1969-12-31 23:04:05.010'";
+ sql = "SELECT * FROM " + TABLE_CLUSTERING_KEYS_INEQUALITY + " WHERE key='key_1' AND clust_one='clust_one' AND clust_two=2 AND clust_three = timestamp '1970-01-01 03:04:05.010Z'";
assertEquals(execute(sql).getRowCount(), 0);
sql = "SELECT * FROM " + TABLE_CLUSTERING_KEYS_INEQUALITY + " WHERE key='key_1' AND clust_one='clust_one' AND clust_two IN (1,2)";
assertEquals(execute(sql).getRowCount(), 2);
sql = "SELECT * FROM " + TABLE_CLUSTERING_KEYS_INEQUALITY + " WHERE key='key_1' AND clust_one='clust_one' AND clust_two > 1 AND clust_two < 3";
assertEquals(execute(sql).getRowCount(), 1);
- sql = "SELECT * FROM " + TABLE_CLUSTERING_KEYS_INEQUALITY + " WHERE key='key_1' AND clust_one='clust_one' AND clust_two=2 AND clust_three >= timestamp '1969-12-31 23:04:05.010' AND clust_three <= timestamp '1969-12-31 23:04:05.020'";
+ sql = "SELECT * FROM " + TABLE_CLUSTERING_KEYS_INEQUALITY + " WHERE key='key_1' AND clust_one='clust_one' AND clust_two=2 AND clust_three >= timestamp '1970-01-01 03:04:05.010Z' AND clust_three <= timestamp '1970-01-01 03:04:05.020Z'";
assertEquals(execute(sql).getRowCount(), 1);
- sql = "SELECT * FROM " + TABLE_CLUSTERING_KEYS_INEQUALITY + " WHERE key='key_1' AND clust_one='clust_one' AND clust_two IN (1,2) AND clust_three >= timestamp '1969-12-31 23:04:05.010' AND clust_three <= timestamp '1969-12-31 23:04:05.020'";
+ sql = "SELECT * FROM " + TABLE_CLUSTERING_KEYS_INEQUALITY + " WHERE key='key_1' AND clust_one='clust_one' AND clust_two IN (1,2) AND clust_three >= timestamp '1970-01-01 03:04:05.010Z' AND clust_three <= timestamp '1970-01-01 03:04:05.020Z'";
assertEquals(execute(sql).getRowCount(), 2);
sql = "SELECT * FROM " + TABLE_CLUSTERING_KEYS_INEQUALITY + " WHERE key='key_1' AND clust_one='clust_one' AND clust_two IN (1,2,3) AND clust_two < 2";
assertEquals(execute(sql).getRowCount(), 1);
@@ -464,7 +467,7 @@ public void testInsert()
"1, " +
"1000, " +
"null, " +
- "timestamp '1970-01-01 08:34:05.0', " +
+ "timestamp '1970-01-01 08:34:05.0Z', " +
"'ansi1', " +
"true, " +
"null, " +
@@ -488,7 +491,7 @@ public void testInsert()
1,
1000L,
null,
- LocalDateTime.of(1970, 1, 1, 8, 34, 5),
+ ZonedDateTime.of(1970, 1, 1, 8, 34, 5, 0, ZoneId.of("UTC")),
"ansi1",
true,
null,
@@ -568,7 +571,7 @@ private void assertSelect(String tableName, boolean createdByPresto)
INTEGER,
BIGINT,
VARBINARY,
- TIMESTAMP,
+ TIMESTAMP_WITH_TIME_ZONE,
createUnboundedVarcharType(),
BOOLEAN,
DOUBLE,
@@ -593,7 +596,7 @@ private void assertSelect(String tableName, boolean createdByPresto)
rowNumber,
rowNumber + 1000L,
ByteBuffer.wrap(toByteArray(rowNumber)),
- TIMESTAMP_LOCAL,
+ TIMESTAMP_VALUE,
"ansi " + rowNumber,
rowNumber % 2 == 0,
Math.pow(2, rowNumber),
diff --git a/presto-delta/pom.xml b/presto-delta/pom.xml
index 1f50b6f6a437a..031274997f9ad 100644
--- a/presto-delta/pom.xml
+++ b/presto-delta/pom.xml
@@ -141,6 +141,11 @@
jackson-databind
+
+ joda-time
+ joda-time
+
+
com.facebook.presto
presto-spi
diff --git a/presto-delta/src/main/java/com/facebook/presto/delta/DeltaPageSourceProvider.java b/presto-delta/src/main/java/com/facebook/presto/delta/DeltaPageSourceProvider.java
index a47b6de549a88..69ac165fde600 100644
--- a/presto-delta/src/main/java/com/facebook/presto/delta/DeltaPageSourceProvider.java
+++ b/presto-delta/src/main/java/com/facebook/presto/delta/DeltaPageSourceProvider.java
@@ -64,6 +64,7 @@
import org.apache.parquet.io.MessageColumnIO;
import org.apache.parquet.schema.GroupType;
import org.apache.parquet.schema.MessageType;
+import org.joda.time.DateTimeZone;
import java.io.FileNotFoundException;
import java.io.IOException;
@@ -200,7 +201,7 @@ private Map convertPartitionValues(List allCol
}));
}
- private static ConnectorPageSource createParquetPageSource(
+ private ConnectorPageSource createParquetPageSource(
HdfsEnvironment hdfsEnvironment,
ConnectorSession session,
Configuration configuration,
@@ -279,6 +280,9 @@ private static ConnectorPageSource createParquetPageSource(
}
}
MessageColumnIO messageColumnIO = getColumnIO(fileSchema, requestedSchema);
+
+ Optional timezone = Optional.ofNullable(fileMetaData.getKeyValueMetaData().get("writer.time.zone")).map(DateTimeZone::forID);
+
ParquetReader parquetReader = new ParquetReader(
messageColumnIO,
blocks.build(),
@@ -291,7 +295,8 @@ private static ConnectorPageSource createParquetPageSource(
parquetPredicate,
blockIndexStores,
false,
- fileDecryptor);
+ fileDecryptor,
+ timezone);
ImmutableList.Builder namesBuilder = ImmutableList.builder();
ImmutableList.Builder typesBuilder = ImmutableList.builder();
diff --git a/presto-druid/src/test/java/com/facebook/presto/druid/TestDruidQueryGenerator.java b/presto-druid/src/test/java/com/facebook/presto/druid/TestDruidQueryGenerator.java
index 4d05f9cad8959..1fbf70cd7dbae 100644
--- a/presto-druid/src/test/java/com/facebook/presto/druid/TestDruidQueryGenerator.java
+++ b/presto-druid/src/test/java/com/facebook/presto/druid/TestDruidQueryGenerator.java
@@ -176,9 +176,6 @@ public void testDistinctCountGroupByPushdown()
@Test
public void testTimestampLiteralPushdown()
{
- //the timezone of the session is Pacific/Apia UTC+13
- //the timezone of the connector session is UTC
- //so the time needs to be adjust for 13 hours if the timezone not specified
testDQL(
planBuilder -> project(
planBuilder,
@@ -187,7 +184,7 @@ public void testTimestampLiteralPushdown()
tableScan(planBuilder, druidTable, regionId, city, fare, datetime),
getRowExpression("datetime = timestamp '2016-06-26 19:00:00.000'", defaultSessionHolder)),
ImmutableList.of("city", "datetime")),
- "SELECT \"city\", \"datetime\" FROM \"realtimeOnly\" WHERE (\"datetime\" = TIMESTAMP '2016-06-26 06:00:00.000')");
+ "SELECT \"city\", \"datetime\" FROM \"realtimeOnly\" WHERE (\"datetime\" = TIMESTAMP '2016-06-26 19:00:00.000')");
//test timestamp with timezone
testDQL(
planBuilder -> project(
diff --git a/presto-elasticsearch/src/main/java/com/facebook/presto/elasticsearch/ElasticsearchQueryBuilder.java b/presto-elasticsearch/src/main/java/com/facebook/presto/elasticsearch/ElasticsearchQueryBuilder.java
index 4548fe3b7ea0d..117d609e5a1ed 100644
--- a/presto-elasticsearch/src/main/java/com/facebook/presto/elasticsearch/ElasticsearchQueryBuilder.java
+++ b/presto-elasticsearch/src/main/java/com/facebook/presto/elasticsearch/ElasticsearchQueryBuilder.java
@@ -29,6 +29,7 @@
import java.time.Instant;
import java.time.ZoneId;
+import java.time.ZoneOffset;
import java.util.HashSet;
import java.util.Map;
import java.util.Optional;
@@ -156,10 +157,10 @@ private static Object getValue(ConnectorSession session, Type type, Object value
}
if (type.equals(TIMESTAMP)) {
- checkState(session.getSqlFunctionProperties().isLegacyTimestamp(), "New timestamp semantics not yet supported");
-
return Instant.ofEpochMilli((Long) value)
- .atZone(ZoneId.of(session.getSqlFunctionProperties().getTimeZoneKey().getId()))
+ .atZone(session.getSqlFunctionProperties().isLegacyTimestamp() ?
+ ZoneId.of(session.getSqlFunctionProperties().getTimeZoneKey().getId()) :
+ ZoneOffset.UTC)
.toLocalDateTime()
.format(ISO_DATE_TIME);
}
diff --git a/presto-elasticsearch/src/main/java/com/facebook/presto/elasticsearch/decoders/TimestampDecoder.java b/presto-elasticsearch/src/main/java/com/facebook/presto/elasticsearch/decoders/TimestampDecoder.java
index 0102c0d094fbe..37a5afce97a67 100644
--- a/presto-elasticsearch/src/main/java/com/facebook/presto/elasticsearch/decoders/TimestampDecoder.java
+++ b/presto-elasticsearch/src/main/java/com/facebook/presto/elasticsearch/decoders/TimestampDecoder.java
@@ -22,6 +22,7 @@
import java.time.Instant;
import java.time.LocalDateTime;
import java.time.ZoneId;
+import java.time.ZoneOffset;
import java.util.function.Supplier;
import static com.facebook.presto.common.type.TimestampType.TIMESTAMP;
@@ -42,7 +43,9 @@ public class TimestampDecoder
public TimestampDecoder(ConnectorSession session, String path)
{
this.path = requireNonNull(path, "path is null");
- this.zoneId = ZoneId.of(session.getSqlFunctionProperties().getTimeZoneKey().getId());
+ this.zoneId = session.getSqlFunctionProperties().isLegacyTimestamp() ?
+ ZoneId.of(session.getSqlFunctionProperties().getTimeZoneKey().getId()) :
+ ZoneOffset.UTC;
}
@Override
diff --git a/presto-hive-function-namespace/pom.xml b/presto-hive-function-namespace/pom.xml
index 8118f490721cb..6b82fe5107d40 100644
--- a/presto-hive-function-namespace/pom.xml
+++ b/presto-hive-function-namespace/pom.xml
@@ -43,13 +43,6 @@
hadoop-apache
-
-
-
- org.apache.hive
- hive-llap-common
-
-
com.fasterxml.jackson.core
jackson-annotations
diff --git a/presto-hive-function-namespace/src/main/java/com/facebook/presto/hive/functions/FunctionRegistry.java b/presto-hive-function-namespace/src/main/java/com/facebook/presto/hive/functions/FunctionRegistry.java
index ad697a5f1cd7a..a312d8d4d87c9 100644
--- a/presto-hive-function-namespace/src/main/java/com/facebook/presto/hive/functions/FunctionRegistry.java
+++ b/presto-hive-function-namespace/src/main/java/com/facebook/presto/hive/functions/FunctionRegistry.java
@@ -64,6 +64,7 @@
import org.apache.hadoop.hive.ql.udf.generic.GenericUDFCoalesce;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDFConcat;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDFConcatWS;
+import org.apache.hadoop.hive.ql.udf.generic.GenericUDFCurrentDatabase;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDFCurrentDate;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDFCurrentGroups;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDFCurrentTimestamp;
@@ -114,7 +115,6 @@
import org.apache.hadoop.hive.ql.udf.generic.GenericUDFNamedStruct;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDFNextDay;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDFNullif;
-import org.apache.hadoop.hive.ql.udf.generic.GenericUDFNvl;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPFalse;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPNegative;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPNotFalse;
@@ -165,7 +165,6 @@
import org.apache.hadoop.hive.ql.udf.generic.GenericUDFUpper;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDFWhen;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDFWidthBucket;
-import org.apache.hadoop.hive.ql.udf.generic.UDFCurrentDB;
import org.apache.hadoop.hive.ql.udf.xml.GenericUDFXPath;
import java.util.Set;
@@ -221,7 +220,7 @@ private FunctionRegistry() {}
system.registerGenericUDF("likeall", GenericUDFLikeAll.class);
system.registerGenericUDF("rlike", GenericUDFRegExp.class);
system.registerGenericUDF("regexp", GenericUDFRegExp.class);
- system.registerGenericUDF("nvl", GenericUDFNvl.class);
+ system.registerGenericUDF("nvl", GenericUDFCoalesce.class); //HIVE-20961
system.registerGenericUDF("split", GenericUDFSplit.class);
system.registerGenericUDF("str_to_map", GenericUDFStringToMap.class);
system.registerGenericUDF("translate", GenericUDFTranslate.class);
@@ -240,7 +239,7 @@ private FunctionRegistry() {}
system.registerGenericUDF("months_between", GenericUDFMonthsBetween.class);
system.registerGenericUDF("xpath", GenericUDFXPath.class);
system.registerGenericUDF("grouping", GenericUDFGrouping.class);
- system.registerGenericUDF("current_database", UDFCurrentDB.class);
+ system.registerGenericUDF("current_database", GenericUDFCurrentDatabase.class);
system.registerGenericUDF("current_date", GenericUDFCurrentDate.class);
system.registerGenericUDF("current_timestamp", GenericUDFCurrentTimestamp.class);
system.registerGenericUDF("current_user", GenericUDFCurrentUser.class);
diff --git a/presto-hive-function-namespace/src/main/java/com/facebook/presto/hive/functions/type/BlockInputDecoders.java b/presto-hive-function-namespace/src/main/java/com/facebook/presto/hive/functions/type/BlockInputDecoders.java
index 5c5f43dc8ee64..52cea0f3e1042 100644
--- a/presto-hive-function-namespace/src/main/java/com/facebook/presto/hive/functions/type/BlockInputDecoders.java
+++ b/presto-hive-function-namespace/src/main/java/com/facebook/presto/hive/functions/type/BlockInputDecoders.java
@@ -22,12 +22,14 @@
import com.facebook.presto.common.type.RowType;
import com.facebook.presto.common.type.Type;
import com.google.common.collect.Streams;
+import org.apache.hadoop.hive.common.type.Date;
+import org.apache.hadoop.hive.common.type.Timestamp;
import org.apache.hadoop.hive.serde2.io.ByteWritable;
-import org.apache.hadoop.hive.serde2.io.DateWritable;
+import org.apache.hadoop.hive.serde2.io.DateWritableV2;
import org.apache.hadoop.hive.serde2.io.DoubleWritable;
import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
import org.apache.hadoop.hive.serde2.io.ShortWritable;
-import org.apache.hadoop.hive.serde2.io.TimestampWritable;
+import org.apache.hadoop.hive.serde2.io.TimestampWritableV2;
import org.apache.hadoop.hive.serde2.objectinspector.ConstantObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector;
@@ -62,8 +64,6 @@
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
-import java.sql.Date;
-import java.sql.Timestamp;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
@@ -166,10 +166,10 @@ else if (inspector instanceof JavaHiveDecimalObjectInspector) {
return (b, i) -> b.isNull(i) ? null : readHiveDecimal(((DecimalType) type), b, i);
}
else if (inspector instanceof JavaDateObjectInspector) {
- return (b, i) -> b.isNull(i) ? null : new Date(TimeUnit.DAYS.toMillis(type.getLong(b, i)));
+ return (b, i) -> b.isNull(i) ? null : Date.ofEpochMilli(TimeUnit.DAYS.toMillis(type.getLong(b, i)));
}
else if (inspector instanceof JavaTimestampObjectInspector) {
- return (b, i) -> b.isNull(i) ? null : new Timestamp(type.getLong(b, i));
+ return (b, i) -> b.isNull(i) ? null : Timestamp.ofEpochMilli(type.getLong(b, i));
}
else if (inspector instanceof HiveDecimalObjectInspector) {
checkArgument(type instanceof DecimalType);
@@ -184,13 +184,13 @@ else if (inspector instanceof BinaryObjectInspector) {
}
else if (inspector instanceof DateObjectInspector) {
return preferWritable ?
- (b, i) -> b.isNull(i) ? null : new DateWritable(((int) type.getLong(b, i))) :
+ (b, i) -> b.isNull(i) ? null : new DateWritableV2(((int) type.getLong(b, i))) :
(b, i) -> b.isNull(i) ? null : createDate(((int) type.getLong(b, i)));
}
else if (inspector instanceof TimestampObjectInspector) {
return preferWritable ?
- (b, i) -> b.isNull(i) ? null : new TimestampWritable(new Timestamp(type.getLong(b, i))) :
- (b, i) -> b.isNull(i) ? null : new Timestamp(type.getLong(b, i));
+ (b, i) -> b.isNull(i) ? null : new TimestampWritableV2(Timestamp.ofEpochMilli(type.getLong(b, i))) :
+ (b, i) -> b.isNull(i) ? null : Timestamp.ofEpochMilli(type.getLong(b, i));
}
else if (inspector instanceof VoidObjectInspector) {
return (b, i) -> null;
diff --git a/presto-hive-function-namespace/src/main/java/com/facebook/presto/hive/functions/type/DateTimeUtils.java b/presto-hive-function-namespace/src/main/java/com/facebook/presto/hive/functions/type/DateTimeUtils.java
index 25160274ee779..ba282ff0cb6ff 100644
--- a/presto-hive-function-namespace/src/main/java/com/facebook/presto/hive/functions/type/DateTimeUtils.java
+++ b/presto-hive-function-namespace/src/main/java/com/facebook/presto/hive/functions/type/DateTimeUtils.java
@@ -14,10 +14,8 @@
package com.facebook.presto.hive.functions.type;
-import java.sql.Date;
-import java.time.Instant;
-import java.time.OffsetDateTime;
-import java.time.ZoneId;
+import org.apache.hadoop.hive.common.type.Date;
+
import java.util.concurrent.TimeUnit;
public final class DateTimeUtils
@@ -27,9 +25,6 @@ private DateTimeUtils() {}
public static Date createDate(Object days)
{
long millis = TimeUnit.DAYS.toMillis(((long) days));
- Instant instant = Instant.ofEpochMilli((millis));
- OffsetDateTime dt = OffsetDateTime.ofInstant(instant, ZoneId.of("UTC"));
- // A trick to prevent including zone info
- return new Date(dt.getYear() - 1900, dt.getMonthValue() - 1, dt.getDayOfMonth());
+ return Date.ofEpochMilli(millis);
}
}
diff --git a/presto-hive-function-namespace/src/main/java/com/facebook/presto/hive/functions/type/ObjectEncoders.java b/presto-hive-function-namespace/src/main/java/com/facebook/presto/hive/functions/type/ObjectEncoders.java
index b9be6e3418fa3..987c32c589bbd 100644
--- a/presto-hive-function-namespace/src/main/java/com/facebook/presto/hive/functions/type/ObjectEncoders.java
+++ b/presto-hive-function-namespace/src/main/java/com/facebook/presto/hive/functions/type/ObjectEncoders.java
@@ -30,6 +30,8 @@
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Streams;
import io.airlift.slice.Slices;
+import org.apache.hadoop.hive.common.type.Date;
+import org.apache.hadoop.hive.common.type.Timestamp;
import org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
@@ -43,8 +45,6 @@
import org.apache.hadoop.hive.serde2.objectinspector.primitive.StringObjectInspector;
import java.math.BigDecimal;
-import java.sql.Date;
-import java.sql.Timestamp;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
@@ -99,7 +99,7 @@ public static ObjectEncoder createEncoder(Type type, ObjectInspector inspector)
return compose(primitive(inspector), o -> ((Boolean) o));
case DATE:
checkArgument(inspector instanceof PrimitiveObjectInspector);
- return compose(primitive(inspector), o -> ((Date) o).getTime());
+ return compose(primitive(inspector), o -> ((Date) o).toEpochMilli());
case DECIMAL:
if (Decimals.isShortDecimal(type)) {
DecimalType decimalType = (DecimalType) type;
@@ -118,7 +118,7 @@ else if (Decimals.isLongDecimal(type)) {
return compose(primitive(inspector), o -> (Double) o);
case TIMESTAMP:
checkArgument(inspector instanceof PrimitiveObjectInspector);
- return compose(primitive(inspector), o -> ((Timestamp) o).getTime());
+ return compose(primitive(inspector), o -> ((Timestamp) o).toEpochMilli());
case VARBINARY:
if (inspector instanceof BinaryObjectInspector) {
return compose(primitive(inspector), o -> Slices.wrappedBuffer(((byte[]) o)));
diff --git a/presto-hive-function-namespace/src/main/java/com/facebook/presto/hive/functions/type/ObjectInputDecoders.java b/presto-hive-function-namespace/src/main/java/com/facebook/presto/hive/functions/type/ObjectInputDecoders.java
index 00084c0b661f5..78970a3460d29 100644
--- a/presto-hive-function-namespace/src/main/java/com/facebook/presto/hive/functions/type/ObjectInputDecoders.java
+++ b/presto-hive-function-namespace/src/main/java/com/facebook/presto/hive/functions/type/ObjectInputDecoders.java
@@ -25,9 +25,9 @@
import com.facebook.presto.common.type.UnknownType;
import io.airlift.slice.Slice;
import org.apache.hadoop.hive.common.type.HiveDecimal;
+import org.apache.hadoop.hive.common.type.Timestamp;
import java.math.BigInteger;
-import java.sql.Timestamp;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
@@ -91,7 +91,7 @@ else if (Decimals.isLongDecimal(type)) {
case DOUBLE:
return o -> ((Double) o);
case TIMESTAMP:
- return o -> new Timestamp(((long) o));
+ return o -> Timestamp.ofEpochMilli(((long) o));
case VARBINARY:
return o -> ((Slice) o).getBytes();
case VARCHAR:
diff --git a/presto-hive-function-namespace/src/test/java/com/facebook/presto/hive/functions/type/TestObjectEncoders.java b/presto-hive-function-namespace/src/test/java/com/facebook/presto/hive/functions/type/TestObjectEncoders.java
index 95a6b02dac786..2fd4567e09e1a 100644
--- a/presto-hive-function-namespace/src/test/java/com/facebook/presto/hive/functions/type/TestObjectEncoders.java
+++ b/presto-hive-function-namespace/src/test/java/com/facebook/presto/hive/functions/type/TestObjectEncoders.java
@@ -22,7 +22,7 @@
import com.facebook.presto.server.testing.TestingPrestoServer;
import com.google.inject.Key;
import io.airlift.slice.Slice;
-import org.apache.hadoop.hive.serde2.io.DateWritable;
+import org.apache.hadoop.hive.serde2.io.DateWritableV2;
import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
import org.apache.hadoop.hive.serde2.io.ShortWritable;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
@@ -110,7 +110,7 @@ public void testPrimitiveObjectEncoders()
inspector = writableDateObjectInspector;
encoder = createEncoder(DATE, inspector);
- assertTrue(encoder.encode(new DateWritable(DateTimeUtils.createDate(18380L))) instanceof Long);
+ assertTrue(encoder.encode(new DateWritableV2(DateTimeUtils.createDate(18380L))) instanceof Long);
inspector = writableHiveDecimalObjectInspector;
encoder = createEncoder(createDecimalType(11, 10), inspector);
diff --git a/presto-hive-function-namespace/src/test/java/com/facebook/presto/hive/functions/type/TestObjectInputDecoders.java b/presto-hive-function-namespace/src/test/java/com/facebook/presto/hive/functions/type/TestObjectInputDecoders.java
index f136e87762676..6b28dda633340 100644
--- a/presto-hive-function-namespace/src/test/java/com/facebook/presto/hive/functions/type/TestObjectInputDecoders.java
+++ b/presto-hive-function-namespace/src/test/java/com/facebook/presto/hive/functions/type/TestObjectInputDecoders.java
@@ -22,11 +22,11 @@
import com.facebook.presto.server.testing.TestingPrestoServer;
import com.google.inject.Key;
import io.airlift.slice.Slices;
+import org.apache.hadoop.hive.common.type.Date;
import org.apache.hadoop.hive.common.type.HiveDecimal;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.Test;
-import java.sql.Date;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Optional;
@@ -66,9 +66,9 @@ public void setup()
public void testToDate()
{
Date date = DateTimeUtils.createDate(18380L);
- assertEquals(date.getYear(), 2020 - 1900);
- assertEquals(date.getMonth(), 4 - 1);
- assertEquals(date.getDate(), 28);
+ assertEquals(date.getYear(), 2020);
+ assertEquals(date.getMonth(), 4);
+ assertEquals(date.getDay(), 28);
}
@Test
diff --git a/presto-hive-hadoop2/src/test/java/com/facebook/presto/hive/TestHiveClient.java b/presto-hive-hadoop2/src/test/java/com/facebook/presto/hive/TestHiveClient.java
index 0543298e49dbb..85ad09c5de7bc 100644
--- a/presto-hive-hadoop2/src/test/java/com/facebook/presto/hive/TestHiveClient.java
+++ b/presto-hive-hadoop2/src/test/java/com/facebook/presto/hive/TestHiveClient.java
@@ -22,7 +22,6 @@
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.base.Preconditions.checkState;
-import static org.assertj.core.api.Assertions.assertThatThrownBy;
public class TestHiveClient
extends AbstractTestHiveClient
@@ -48,9 +47,10 @@ public void initialize(String host, int port, String databaseName, int hiveVersi
NetUtils.addStaticResolution("hadoop-master", hadoopMasterIp);
}
- setup(host, port, databaseName, timeZone);
-
checkArgument(hiveVersionMajor > 0, "Invalid hiveVersionMajor: %s", hiveVersionMajor);
+
+ setup(host, port, databaseName, hiveVersionMajor >= 3 ? "UTC" : timeZone);
+
this.hiveVersionMajor = hiveVersionMajor;
}
@@ -70,34 +70,6 @@ public void testGetPartitionSplitsTableOfflinePartition()
super.testGetPartitionSplitsTableOfflinePartition();
}
- @Override
- public void testTypesRcBinary()
- throws Exception
- {
- if (getHiveVersionMajor() >= 3) {
- // TODO (https://github.com/prestosql/presto/issues/1218) requires https://issues.apache.org/jira/browse/HIVE-22167
- assertThatThrownBy(super::testTypesRcBinary)
- .isInstanceOf(AssertionError.class)
- .hasMessage("expected [2011-05-06 01:23:09.123] but found [2011-05-06 07:08:09.123]");
- return;
- }
- super.testTypesRcBinary();
- }
-
- @Override
- public void testTypesParquet()
- throws Exception
- {
- if (getHiveVersionMajor() >= 3) {
- // TODO (https://github.com/prestosql/presto/issues/1218) requires https://issues.apache.org/jira/browse/HIVE-21002
- assertThatThrownBy(super::testTypesParquet)
- .isInstanceOf(AssertionError.class)
- .hasMessage("expected [2011-05-06 01:23:09.123] but found [2011-05-06 07:08:09.123]");
- return;
- }
- super.testTypesParquet();
- }
-
@Override
public void testMismatchSchemaTable()
throws Exception
diff --git a/presto-hive-metastore/src/main/java/com/facebook/presto/hive/metastore/MetastoreUtil.java b/presto-hive-metastore/src/main/java/com/facebook/presto/hive/metastore/MetastoreUtil.java
index 9608947a9f037..4326cfef92fd6 100644
--- a/presto-hive-metastore/src/main/java/com/facebook/presto/hive/metastore/MetastoreUtil.java
+++ b/presto-hive-metastore/src/main/java/com/facebook/presto/hive/metastore/MetastoreUtil.java
@@ -74,7 +74,9 @@
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.permission.FsPermission;
import org.apache.hadoop.hive.common.FileUtils;
+import org.apache.hadoop.hive.common.type.Date;
import org.apache.hadoop.hive.common.type.HiveDecimal;
+import org.apache.hadoop.hive.common.type.Timestamp;
import org.apache.hadoop.hive.metastore.ProtectMode;
import org.apache.hadoop.io.Text;
import org.joda.time.DateTimeZone;
@@ -83,8 +85,6 @@
import java.io.IOException;
import java.math.BigInteger;
-import java.sql.Date;
-import java.sql.Timestamp;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
@@ -155,7 +155,6 @@
import static org.apache.hadoop.hive.metastore.api.hive_metastoreConstants.META_TABLE_NAME;
import static org.apache.hadoop.hive.metastore.api.hive_metastoreConstants.META_TABLE_PARTITION_COLUMNS;
import static org.apache.hadoop.hive.metastore.api.hive_metastoreConstants.META_TABLE_PARTITION_COLUMN_TYPES;
-import static org.apache.hadoop.hive.serde.serdeConstants.SERIALIZATION_DDL;
import static org.apache.hadoop.hive.serde.serdeConstants.SERIALIZATION_LIB;
import static org.joda.time.DateTimeZone.UTC;
@@ -310,8 +309,6 @@ public static Properties getHiveSchema(
schema.setProperty(META_TABLE_COLUMN_TYPES, columnTypes);
schema.setProperty("columns.comments", columnCommentBuilder.toString());
- schema.setProperty(SERIALIZATION_DDL, toThriftDdl(tableName, partitionDataColumns));
-
String partString = "";
String partStringSep = "";
String partTypesString = "";
@@ -679,11 +676,11 @@ public static Object getField(Type type, Block block, int position)
}
if (DateType.DATE.equals(type)) {
long days = type.getLong(block, position);
- return new Date(UTC.getMillisKeepLocal(DateTimeZone.getDefault(), TimeUnit.DAYS.toMillis(days)));
+ return Date.ofEpochMilli(UTC.getMillisKeepLocal(DateTimeZone.getDefault(), TimeUnit.DAYS.toMillis(days)));
}
if (TimestampType.TIMESTAMP.equals(type)) {
long millisUtc = type.getLong(block, position);
- return new Timestamp(millisUtc);
+ return Timestamp.ofEpochMilli(millisUtc);
}
if (type instanceof DecimalType) {
DecimalType decimalType = (DecimalType) type;
diff --git a/presto-hive-metastore/src/main/java/com/facebook/presto/hive/metastore/Statistics.java b/presto-hive-metastore/src/main/java/com/facebook/presto/hive/metastore/Statistics.java
index edd52ff2f7fc9..090a1739158d9 100644
--- a/presto-hive-metastore/src/main/java/com/facebook/presto/hive/metastore/Statistics.java
+++ b/presto-hive-metastore/src/main/java/com/facebook/presto/hive/metastore/Statistics.java
@@ -411,7 +411,7 @@ else if (type.equals(DATE)) {
result.setDateStatistics(new DateStatistics(getDateValue(session, type, min), getDateValue(session, type, max)));
}
else if (type.equals(TIMESTAMP)) {
- result.setIntegerStatistics(new IntegerStatistics(getTimestampValue(timeZone, min), getTimestampValue(timeZone, max)));
+ result.setIntegerStatistics(new IntegerStatistics(getTimestampValue(session, timeZone, min), getTimestampValue(session, timeZone, max)));
}
else if (type instanceof DecimalType) {
result.setDecimalStatistics(new DecimalStatistics(getDecimalValue(session, type, min), getDecimalValue(session, type, max)));
@@ -437,10 +437,13 @@ private static Optional getDateValue(ConnectorSession session, Type t
return block.isNull(0) ? Optional.empty() : Optional.of(LocalDate.ofEpochDay(((SqlDate) type.getObjectValue(session.getSqlFunctionProperties(), block, 0)).getDays()));
}
- private static OptionalLong getTimestampValue(DateTimeZone timeZone, Block block)
+ private static OptionalLong getTimestampValue(ConnectorSession session, DateTimeZone timeZone, Block block)
{
- // TODO #7122
- return block.isNull(0) ? OptionalLong.empty() : OptionalLong.of(MILLISECONDS.toSeconds(timeZone.convertUTCToLocal(block.getLong(0))));
+ return block.isNull(0) ?
+ OptionalLong.empty() :
+ session.getSqlFunctionProperties().isLegacyTimestamp() ?
+ OptionalLong.of(MILLISECONDS.toSeconds(timeZone.convertUTCToLocal(block.getLong(0)))) :
+ OptionalLong.of(MILLISECONDS.toSeconds(block.getLong(0)));
}
private static Optional getDecimalValue(ConnectorSession session, Type type, Block block)
diff --git a/presto-hive-metastore/src/main/java/com/facebook/presto/hive/metastore/thrift/ThriftHiveMetastore.java b/presto-hive-metastore/src/main/java/com/facebook/presto/hive/metastore/thrift/ThriftHiveMetastore.java
index c3bb696979e17..6d77cbd0c60a5 100644
--- a/presto-hive-metastore/src/main/java/com/facebook/presto/hive/metastore/thrift/ThriftHiveMetastore.java
+++ b/presto-hive-metastore/src/main/java/com/facebook/presto/hive/metastore/thrift/ThriftHiveMetastore.java
@@ -1879,7 +1879,8 @@ private PrivilegeBag buildPrivilegeBag(
new HiveObjectRef(TABLE, databaseName, tableName, null, null),
grantee.getName(),
fromPrestoPrincipalType(grantee.getType()),
- privilegeGrantInfo));
+ privilegeGrantInfo,
+ null)); // TODO: Add support for authorizers in Presto
}
return new PrivilegeBag(privilegeBagBuilder.build());
}
diff --git a/presto-hive-metastore/src/main/java/com/facebook/presto/hive/metastore/thrift/ThriftHiveMetastoreClient.java b/presto-hive-metastore/src/main/java/com/facebook/presto/hive/metastore/thrift/ThriftHiveMetastoreClient.java
index 93ed690cc87a6..8efd7eb12298c 100644
--- a/presto-hive-metastore/src/main/java/com/facebook/presto/hive/metastore/thrift/ThriftHiveMetastoreClient.java
+++ b/presto-hive-metastore/src/main/java/com/facebook/presto/hive/metastore/thrift/ThriftHiveMetastoreClient.java
@@ -27,6 +27,7 @@
import org.apache.hadoop.hive.metastore.api.FieldSchema;
import org.apache.hadoop.hive.metastore.api.GetRoleGrantsForPrincipalRequest;
import org.apache.hadoop.hive.metastore.api.GetRoleGrantsForPrincipalResponse;
+import org.apache.hadoop.hive.metastore.api.GetTableRequest;
import org.apache.hadoop.hive.metastore.api.GrantRevokeRoleRequest;
import org.apache.hadoop.hive.metastore.api.GrantRevokeRoleResponse;
import org.apache.hadoop.hive.metastore.api.GrantRevokeType;
@@ -211,7 +212,11 @@ public void alterTableWithEnvironmentContext(String databaseName, String tableNa
public Table getTable(String databaseName, String tableName)
throws TException
{
- return client.get_table(constructSchemaName(catalogName, databaseName), tableName);
+ GetTableRequest getTableRequest = new GetTableRequest(databaseName, tableName);
+ if (catalogName.isPresent()) {
+ getTableRequest.setCatName(catalogName.get());
+ }
+ return client.get_table_req(getTableRequest).getTable();
}
@Override
@@ -248,7 +253,8 @@ public void setTableColumnStatistics(String databaseName, String tableName, List
public void deleteTableColumnStatistics(String databaseName, String tableName, String columnName)
throws TException
{
- client.delete_table_column_statistics(constructSchemaName(catalogName, databaseName), tableName, columnName);
+ // TODO: This is not backwards compatible
+ client.delete_table_column_statistics(constructSchemaName(catalogName, databaseName), tableName, columnName, "hive");
}
@Override
@@ -279,7 +285,8 @@ public void setPartitionColumnStatistics(String databaseName, String tableName,
public void deletePartitionColumnStatistics(String databaseName, String tableName, String partitionName, String columnName)
throws TException
{
- client.delete_partition_column_statistics(constructSchemaName(catalogName, databaseName), tableName, partitionName, columnName);
+ // TODO: This is not backwards compatible
+ client.delete_partition_column_statistics(constructSchemaName(catalogName, databaseName), tableName, partitionName, columnName, "hive");
}
@Override
diff --git a/presto-hive-metastore/src/main/java/com/facebook/presto/hive/metastore/thrift/Transport.java b/presto-hive-metastore/src/main/java/com/facebook/presto/hive/metastore/thrift/Transport.java
index ea75e9479828c..10c177b9a9d01 100644
--- a/presto-hive-metastore/src/main/java/com/facebook/presto/hive/metastore/thrift/Transport.java
+++ b/presto-hive-metastore/src/main/java/com/facebook/presto/hive/metastore/thrift/Transport.java
@@ -219,25 +219,34 @@ public void flush()
}
}
- // Methods added in libthrift 0.14.0 and not present in Hive Metastore <= 3.1.2
@Override
public TConfiguration getConfiguration()
{
- return TConfiguration.DEFAULT;
+ return transport.getConfiguration();
}
@Override
public void updateKnownMessageSize(long size)
throws TTransportException
{
- // noop: method added in libthrift 0.14.0 and not present in Hive Metastore <= 3.1.2
+ try {
+ transport.updateKnownMessageSize(size);
+ }
+ catch (TTransportException e) {
+ throw rewriteException(e, address);
+ }
}
@Override
public void checkReadBytesAvailable(long numBytes)
throws TTransportException
{
- // noop: method added in libthrift 0.14.0 and not present in Hive Metastore <= 3.1.2
+ try {
+ transport.checkReadBytesAvailable(numBytes);
+ }
+ catch (TTransportException e) {
+ throw rewriteException(e, address);
+ }
}
}
}
diff --git a/presto-hive/pom.xml b/presto-hive/pom.xml
index 6147588b23007..550fdd0ff10b1 100644
--- a/presto-hive/pom.xml
+++ b/presto-hive/pom.xml
@@ -576,6 +576,7 @@
**/TestParquetDistributedQueries.java
**/TestHive2InsertOverwrite.java
**/TestHive3InsertOverwrite.java
+ **/TestHive4InsertOverwrite.java
**/TestHiveSslWithKeyStore.java
**/TestHiveSslWithTrustStore.java
**/TestHiveSslWithTrustStoreKeyStore.java
@@ -725,6 +726,7 @@
**/TestHive2InsertOverwrite.java
**/TestHive3InsertOverwrite.java
+ **/TestHive4InsertOverwrite.java
diff --git a/presto-hive/src/main/java/com/facebook/presto/hive/GenericHiveRecordCursor.java b/presto-hive/src/main/java/com/facebook/presto/hive/GenericHiveRecordCursor.java
index 9f573500a87d3..2b192fc3e5144 100644
--- a/presto-hive/src/main/java/com/facebook/presto/hive/GenericHiveRecordCursor.java
+++ b/presto-hive/src/main/java/com/facebook/presto/hive/GenericHiveRecordCursor.java
@@ -19,15 +19,18 @@
import com.facebook.presto.common.type.Type;
import com.facebook.presto.common.type.TypeManager;
import com.facebook.presto.hadoop.TextLineLengthLimitExceededException;
+import com.facebook.presto.spi.ConnectorSession;
import com.facebook.presto.spi.PrestoException;
import com.facebook.presto.spi.RecordCursor;
import io.airlift.slice.Slice;
import io.airlift.slice.Slices;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hive.common.type.Date;
import org.apache.hadoop.hive.common.type.HiveChar;
import org.apache.hadoop.hive.common.type.HiveDecimal;
import org.apache.hadoop.hive.common.type.HiveVarchar;
+import org.apache.hadoop.hive.common.type.Timestamp;
import org.apache.hadoop.hive.serde2.Deserializer;
import org.apache.hadoop.hive.serde2.SerDeException;
import org.apache.hadoop.hive.serde2.io.HiveCharWritable;
@@ -46,8 +49,6 @@
import java.io.IOException;
import java.io.UncheckedIOException;
import java.math.BigInteger;
-import java.sql.Date;
-import java.sql.Timestamp;
import java.time.ZoneId;
import java.util.Arrays;
import java.util.List;
@@ -117,8 +118,10 @@ public class GenericHiveRecordCursor
private long completedBytes;
private Object rowData;
private boolean closed;
+ private final boolean legacyTimestampEnabled;
public GenericHiveRecordCursor(
+ ConnectorSession connectorSession,
Configuration configuration,
Path path,
RecordReader recordReader,
@@ -128,7 +131,7 @@ public GenericHiveRecordCursor(
ZoneId hiveStorageTimeZoneId,
TypeManager typeManager)
{
- this(configuration, path, recordReader, totalBytes, splitSchema, columns, getDateTimeZone(hiveStorageTimeZoneId), typeManager);
+ this(connectorSession, configuration, path, recordReader, totalBytes, splitSchema, columns, getDateTimeZone(hiveStorageTimeZoneId), typeManager);
}
private static DateTimeZone getDateTimeZone(ZoneId hiveStorageTimeZoneId)
@@ -138,6 +141,7 @@ private static DateTimeZone getDateTimeZone(ZoneId hiveStorageTimeZoneId)
}
public GenericHiveRecordCursor(
+ ConnectorSession connectorSession,
Configuration configuration,
Path path,
RecordReader recordReader,
@@ -160,6 +164,7 @@ public GenericHiveRecordCursor(
this.key = recordReader.createKey();
this.value = recordReader.createValue();
this.hiveStorageTimeZone = hiveStorageTimeZone;
+ this.legacyTimestampEnabled = connectorSession.getSqlFunctionProperties().isLegacyTimestamp();
this.deserializer = getDeserializer(configuration, splitSchema);
this.rowInspector = getTableObjectInspector(deserializer);
@@ -304,32 +309,25 @@ private void parseLongColumn(int column)
else {
Object fieldValue = ((PrimitiveObjectInspector) fieldInspectors[column]).getPrimitiveJavaObject(fieldData);
checkState(fieldValue != null, "fieldValue should not be null");
- longs[column] = getLongExpressedValue(fieldValue, hiveStorageTimeZone);
+ longs[column] = getLongExpressedValue(fieldValue, hiveStorageTimeZone, legacyTimestampEnabled);
nulls[column] = false;
}
}
- private static long getLongExpressedValue(Object value, DateTimeZone hiveTimeZone)
+ private static long getLongExpressedValue(Object value, DateTimeZone hiveTimeZone, boolean legacyTimestampEnabled)
{
if (value instanceof Date) {
- long storageTime = ((Date) value).getTime();
- // convert date from VM current time zone to UTC
- long utcMillis = storageTime + JVM_TIME_ZONE.getOffset(storageTime);
+ long utcMillis = ((Date) value).toEpochMilli();
return TimeUnit.MILLISECONDS.toDays(utcMillis);
}
if (value instanceof Timestamp) {
- // The Hive SerDe parses timestamps using the default time zone of
- // this JVM, but the data might have been written using a different
- // time zone. We need to convert it to the configured time zone.
+ long hiveMillis = ((Timestamp) value).toEpochMilli();
- // the timestamp that Hive parsed using the JVM time zone
- long parsedJvmMillis = ((Timestamp) value).getTime();
-
- // remove the JVM time zone correction from the timestamp
- long hiveMillis = JVM_TIME_ZONE.convertUTCToLocal(parsedJvmMillis);
-
- // convert to UTC using the real time zone for the underlying data
- return hiveTimeZone.convertLocalToUTC(hiveMillis, false);
+ if (legacyTimestampEnabled) {
+ // convert to UTC using the real time zone for the underlying data
+ return hiveTimeZone.convertLocalToUTC(hiveMillis, false);
+ }
+ return hiveMillis;
}
if (value instanceof Float) {
return floatToRawIntBits(((Float) value));
@@ -513,7 +511,7 @@ private void parseObjectColumn(int column)
nulls[column] = true;
}
else {
- objects[column] = getBlockObject(types[column], fieldData, fieldInspectors[column], hiveStorageTimeZone);
+ objects[column] = getBlockObject(types[column], fieldData, fieldInspectors[column], hiveStorageTimeZone, legacyTimestampEnabled);
nulls[column] = false;
}
}
diff --git a/presto-hive/src/main/java/com/facebook/presto/hive/GenericHiveRecordCursorProvider.java b/presto-hive/src/main/java/com/facebook/presto/hive/GenericHiveRecordCursorProvider.java
index 48613955a6ae4..2936d2b4a6af0 100644
--- a/presto-hive/src/main/java/com/facebook/presto/hive/GenericHiveRecordCursorProvider.java
+++ b/presto-hive/src/main/java/com/facebook/presto/hive/GenericHiveRecordCursorProvider.java
@@ -80,6 +80,7 @@ public Optional createRecordCursor(
() -> HiveUtil.createRecordReader(actualConfiguration, path, fileSplit.getStart(), fileSplit.getLength(), schema, columns, fileSplit.getCustomSplitInfo()));
return hdfsEnvironment.doAs(session.getUser(),
() -> Optional.of(new GenericHiveRecordCursor<>(
+ session,
actualConfiguration,
path,
genericRecordReader(recordReader),
diff --git a/presto-hive/src/main/java/com/facebook/presto/hive/HiveMaterializedViewUtils.java b/presto-hive/src/main/java/com/facebook/presto/hive/HiveMaterializedViewUtils.java
index 63b10a2b69034..e1fb6079de522 100644
--- a/presto-hive/src/main/java/com/facebook/presto/hive/HiveMaterializedViewUtils.java
+++ b/presto-hive/src/main/java/com/facebook/presto/hive/HiveMaterializedViewUtils.java
@@ -21,6 +21,7 @@
import com.facebook.presto.hive.metastore.MetastoreContext;
import com.facebook.presto.hive.metastore.SemiTransactionalHiveMetastore;
import com.facebook.presto.hive.metastore.Table;
+import com.facebook.presto.spi.ConnectorSession;
import com.facebook.presto.spi.MaterializedViewDefinition;
import com.facebook.presto.spi.PrestoException;
import com.facebook.presto.spi.SchemaTableName;
@@ -136,6 +137,7 @@ private static boolean isCommonPartitionFound(
}
public static MaterializedDataPredicates getMaterializedDataPredicates(
+ ConnectorSession session,
SemiTransactionalHiveMetastore metastore,
MetastoreContext metastoreContext,
TypeManager typeManager,
@@ -174,7 +176,7 @@ public static MaterializedDataPredicates getMaterializedDataPredicates(
throw new PrestoException(HIVE_INVALID_PARTITION_VALUE, String.format("partition key value cannot be null for field: %s", name));
}
- partitionNameAndValuesMap.put(name, parsePartitionValue(name, value, type, timeZone));
+ partitionNameAndValuesMap.put(name, parsePartitionValue(Optional.of(session), name, value, type, timeZone));
});
TupleDomain tupleDomain = TupleDomain.fromFixedValues(partitionNameAndValuesMap.build());
diff --git a/presto-hive/src/main/java/com/facebook/presto/hive/HiveMetadata.java b/presto-hive/src/main/java/com/facebook/presto/hive/HiveMetadata.java
index 2018ef82e7dd8..95183a8fc84bd 100644
--- a/presto-hive/src/main/java/com/facebook/presto/hive/HiveMetadata.java
+++ b/presto-hive/src/main/java/com/facebook/presto/hive/HiveMetadata.java
@@ -1675,7 +1675,7 @@ public void finishStatisticsCollection(ConnectorSession session, ConnectorTableH
List partitionValues = partitionValuesList.get(i);
ComputedStatistics collectedStatistics = computedStatisticsMap.containsKey(partitionValues)
? computedStatisticsMap.get(partitionValues)
- : computedStatisticsMap.get(canonicalizePartitionValues(partitionName, partitionValues, partitionTypes));
+ : computedStatisticsMap.get(canonicalizePartitionValues(session, partitionName, partitionValues, partitionTypes));
if (collectedStatistics == null) {
partitionStatistics.put(partitionValues, emptyPartitionStatistics.get());
}
@@ -1700,6 +1700,7 @@ private static Map getColumnStatistics(Map getColumnStatistics(
+ ConnectorSession session,
Map, ComputedStatistics> statistics,
String partitionName,
List partitionValues,
@@ -1708,17 +1709,17 @@ private Map getColumnStatistics(
Optional
+
+ joda-time
+ joda-time
+
+
com.facebook.airlift
bootstrap
diff --git a/presto-hudi/src/main/java/com/facebook/presto/hive/HudiRecordCursors.java b/presto-hudi/src/main/java/com/facebook/presto/hive/HudiRecordCursors.java
index 6a4218115a1e6..7d75f98718497 100644
--- a/presto-hudi/src/main/java/com/facebook/presto/hive/HudiRecordCursors.java
+++ b/presto-hudi/src/main/java/com/facebook/presto/hive/HudiRecordCursors.java
@@ -16,6 +16,7 @@
import com.facebook.presto.common.type.TypeManager;
import com.facebook.presto.hudi.HudiColumnHandle;
+import com.facebook.presto.spi.ConnectorSession;
import com.facebook.presto.spi.RecordCursor;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
@@ -34,6 +35,7 @@ public final class HudiRecordCursors
private HudiRecordCursors() {}
public static RecordCursor createRecordCursor(
+ ConnectorSession connectorSession,
Configuration configuration,
Path path,
RecordReader, ? extends Writable> recordReader,
@@ -43,15 +45,14 @@ public static RecordCursor createRecordCursor(
ZoneId hiveStorageTimeZone,
TypeManager typeManager)
{
- return new GenericHiveRecordCursor<>(
+ return new GenericHiveRecordCursor<>(connectorSession,
configuration,
path,
recordReader,
totalBytes,
hiveSchema,
toHiveColumnHandles(hiveColumnHandles),
- hiveStorageTimeZone,
- typeManager);
+ hiveStorageTimeZone, typeManager);
}
private static List toHiveColumnHandles(List columns)
diff --git a/presto-hudi/src/main/java/com/facebook/presto/hudi/HudiParquetPageSources.java b/presto-hudi/src/main/java/com/facebook/presto/hudi/HudiParquetPageSources.java
index 5e4a0290bf263..c76de319c3950 100644
--- a/presto-hudi/src/main/java/com/facebook/presto/hudi/HudiParquetPageSources.java
+++ b/presto-hudi/src/main/java/com/facebook/presto/hudi/HudiParquetPageSources.java
@@ -49,6 +49,7 @@
import org.apache.parquet.io.MessageColumnIO;
import org.apache.parquet.schema.MessageType;
import org.apache.parquet.schema.Type;
+import org.joda.time.DateTimeZone;
import java.io.IOException;
import java.util.ArrayList;
@@ -149,6 +150,9 @@ public static ConnectorPageSource createParquetPageSource(
}
MessageColumnIO messageColumnIO = getColumnIO(fileSchema, requestedSchema);
+
+ Optional timezone = Optional.ofNullable(fileMetaData.getKeyValueMetaData().get("writer.time.zone")).map(DateTimeZone::forID);
+
ParquetReader parquetReader = new ParquetReader(
messageColumnIO,
blocks,
@@ -161,7 +165,8 @@ public static ConnectorPageSource createParquetPageSource(
parquetPredicate,
blockIndexStores,
false,
- fileDecryptor);
+ fileDecryptor,
+ timezone);
ImmutableList.Builder namesBuilder = ImmutableList.builder();
ImmutableList.Builder prestoTypes = ImmutableList.builder();
diff --git a/presto-hudi/src/main/java/com/facebook/presto/hudi/HudiPartitionManager.java b/presto-hudi/src/main/java/com/facebook/presto/hudi/HudiPartitionManager.java
index ba80fd2f516c7..a46bc2c784a8e 100644
--- a/presto-hudi/src/main/java/com/facebook/presto/hudi/HudiPartitionManager.java
+++ b/presto-hudi/src/main/java/com/facebook/presto/hudi/HudiPartitionManager.java
@@ -93,6 +93,7 @@ public List getEffectivePartitions(
.map(PartitionNameWithVersion::getPartitionName)
// Apply extra filters which could not be done by getPartitionNamesByFilter, similar to filtering in HivePartitionManager#getPartitionsIterator
.filter(partitionName -> parseValuesAndFilterPartition(
+ connectorSession,
partitionName,
hudiColumnHandles,
partitionTypes,
@@ -101,6 +102,7 @@ public List getEffectivePartitions(
}
private boolean parseValuesAndFilterPartition(
+ ConnectorSession session,
String partitionName,
List partitionColumns,
List partitionColumnTypes,
@@ -111,7 +113,7 @@ private boolean parseValuesAndFilterPartition(
}
Map domains = constraintSummary.getDomains().orElseGet(ImmutableMap::of);
- Map partitionValues = parsePartition(partitionName, partitionColumns, partitionColumnTypes);
+ Map partitionValues = parsePartition(session, partitionName, partitionColumns, partitionColumnTypes);
for (HudiColumnHandle column : partitionColumns) {
NullableValue value = partitionValues.get(column);
Domain allowedDomain = domains.get(column);
@@ -124,6 +126,7 @@ private boolean parseValuesAndFilterPartition(
}
private static Map parsePartition(
+ ConnectorSession session,
String partitionName,
List partitionColumns,
List partitionColumnTypes)
@@ -135,7 +138,7 @@ private static Map parsePartition(
ImmutableMap.Builder builder = ImmutableMap.builder();
for (int i = 0; i < partitionColumns.size(); i++) {
HudiColumnHandle column = partitionColumns.get(i);
- NullableValue parsedValue = parsePartitionValue(partitionName, partitionValues.get(i), partitionColumnTypes.get(i), ZoneId.of(TimeZone.getDefault().getID()));
+ NullableValue parsedValue = parsePartitionValue(session, partitionName, partitionValues.get(i), partitionColumnTypes.get(i), ZoneId.of(TimeZone.getDefault().getID()));
builder.put(column, parsedValue);
}
return builder.build();
diff --git a/presto-hudi/src/main/java/com/facebook/presto/hudi/HudiRecordCursors.java b/presto-hudi/src/main/java/com/facebook/presto/hudi/HudiRecordCursors.java
index fbab341ade696..3a2f547158a70 100644
--- a/presto-hudi/src/main/java/com/facebook/presto/hudi/HudiRecordCursors.java
+++ b/presto-hudi/src/main/java/com/facebook/presto/hudi/HudiRecordCursors.java
@@ -91,7 +91,7 @@ public static RecordCursor createRealtimeRecordCursor(
return hdfsEnvironment.doAs(session.getUser(), () -> {
RecordReader, ?> recordReader = createRecordReader(configuration, schema, split, dataColumns);
@SuppressWarnings("unchecked") RecordReader, ? extends Writable> reader = (RecordReader, ? extends Writable>) recordReader;
- return createRecordCursor(configuration, path, reader, baseFile.getLength(), schema, dataColumns, hiveStorageTimeZone, typeManager);
+ return createRecordCursor(session, configuration, path, reader, baseFile.getLength(), schema, dataColumns, hiveStorageTimeZone, typeManager);
});
}
diff --git a/presto-iceberg/pom.xml b/presto-iceberg/pom.xml
index 1b0d79001419e..fb162dfae6d30 100644
--- a/presto-iceberg/pom.xml
+++ b/presto-iceberg/pom.xml
@@ -784,6 +784,12 @@
commons-math3
test
+
+
+ com.h2database
+ h2
+ test
+
diff --git a/presto-iceberg/src/main/java/com/facebook/presto/iceberg/IcebergFileWriterFactory.java b/presto-iceberg/src/main/java/com/facebook/presto/iceberg/IcebergFileWriterFactory.java
index 62c02c881a950..ac27ac0dd5c98 100644
--- a/presto-iceberg/src/main/java/com/facebook/presto/iceberg/IcebergFileWriterFactory.java
+++ b/presto-iceberg/src/main/java/com/facebook/presto/iceberg/IcebergFileWriterFactory.java
@@ -20,6 +20,7 @@
import com.facebook.presto.hive.FileFormatDataSourceStats;
import com.facebook.presto.hive.HdfsContext;
import com.facebook.presto.hive.HdfsEnvironment;
+import com.facebook.presto.hive.HiveClientConfig;
import com.facebook.presto.hive.HiveDwrfEncryptionProvider;
import com.facebook.presto.hive.NodeVersion;
import com.facebook.presto.hive.OrcFileWriterConfig;
@@ -40,6 +41,7 @@
import org.apache.iceberg.MetricsConfig;
import org.apache.iceberg.Schema;
import org.apache.iceberg.types.Types;
+import org.joda.time.DateTimeZone;
import java.io.IOException;
import java.util.List;
@@ -86,6 +88,7 @@ public class IcebergFileWriterFactory
private final NoOpOrcWriterStats orcWriterStats = NOOP_WRITER_STATS;
private final OrcFileWriterConfig orcFileWriterConfig;
private final DwrfEncryptionProvider dwrfEncryptionProvider;
+ private final DateTimeZone writerTimezone;
@Inject
public IcebergFileWriterFactory(
@@ -94,7 +97,8 @@ public IcebergFileWriterFactory(
FileFormatDataSourceStats readStats,
NodeVersion nodeVersion,
OrcFileWriterConfig orcFileWriterConfig,
- HiveDwrfEncryptionProvider dwrfEncryptionProvider)
+ HiveDwrfEncryptionProvider dwrfEncryptionProvider,
+ HiveClientConfig hiveClientConfig)
{
this.hdfsEnvironment = requireNonNull(hdfsEnvironment, "hdfsEnvironment is null");
this.typeManager = requireNonNull(typeManager, "typeManager is null");
@@ -102,6 +106,7 @@ public IcebergFileWriterFactory(
this.nodeVersion = requireNonNull(nodeVersion, "nodeVersion is null");
this.orcFileWriterConfig = requireNonNull(orcFileWriterConfig, "orcFileWriterConfig is null");
this.dwrfEncryptionProvider = requireNonNull(dwrfEncryptionProvider, "DwrfEncryptionProvider is null").toDwrfEncryptionProvider();
+ this.writerTimezone = requireNonNull(hiveClientConfig, "hiveClientConfig is null").getDateTimeZone();
}
public IcebergFileWriter createFileWriter(
@@ -164,7 +169,9 @@ private IcebergFileWriter createParquetWriter(
outputPath,
hdfsEnvironment,
hdfsContext,
- metricsConfig);
+ metricsConfig,
+ writerTimezone,
+ nodeVersion.toString());
}
catch (IOException e) {
throw new PrestoException(ICEBERG_WRITER_OPEN_ERROR, "Error creating Parquet file", e);
diff --git a/presto-iceberg/src/main/java/com/facebook/presto/iceberg/IcebergPageSourceProvider.java b/presto-iceberg/src/main/java/com/facebook/presto/iceberg/IcebergPageSourceProvider.java
index 62d0d01c7b7b4..ddd9f5e37c713 100644
--- a/presto-iceberg/src/main/java/com/facebook/presto/iceberg/IcebergPageSourceProvider.java
+++ b/presto-iceberg/src/main/java/com/facebook/presto/iceberg/IcebergPageSourceProvider.java
@@ -108,6 +108,7 @@
import org.apache.parquet.io.ColumnIO;
import org.apache.parquet.io.MessageColumnIO;
import org.apache.parquet.schema.MessageType;
+import org.joda.time.DateTimeZone;
import org.roaringbitmap.longlong.LongBitmapDataProvider;
import org.roaringbitmap.longlong.Roaring64Bitmap;
@@ -246,7 +247,7 @@ public IcebergPageSourceProvider(
this.sortParameters = requireNonNull(sortParameters, "sortParameters is null");
}
- private static ConnectorPageSourceWithRowPositions createParquetPageSource(
+ private ConnectorPageSourceWithRowPositions createParquetPageSource(
HdfsEnvironment hdfsEnvironment,
ConnectorSession session,
Configuration configuration,
@@ -337,6 +338,9 @@ private static ConnectorPageSourceWithRowPositions createParquetPageSource(
}
MessageColumnIO messageColumnIO = getColumnIO(fileSchema, requestedSchema);
+
+ Optional timezone = Optional.ofNullable(fileMetaData.getKeyValueMetaData().get("writer.time.zone")).map(DateTimeZone::forID);
+
ParquetReader parquetReader = new ParquetReader(
messageColumnIO,
blocks,
@@ -349,7 +353,8 @@ private static ConnectorPageSourceWithRowPositions createParquetPageSource(
parquetPredicate,
blockIndexStores,
false,
- fileDecryptor);
+ fileDecryptor,
+ timezone);
ImmutableList.Builder namesBuilder = ImmutableList.builder();
ImmutableList.Builder prestoTypes = ImmutableList.builder();
diff --git a/presto-iceberg/src/main/java/com/facebook/presto/iceberg/IcebergParquetFileWriter.java b/presto-iceberg/src/main/java/com/facebook/presto/iceberg/IcebergParquetFileWriter.java
index 64660cfd59a53..533d5421d9103 100644
--- a/presto-iceberg/src/main/java/com/facebook/presto/iceberg/IcebergParquetFileWriter.java
+++ b/presto-iceberg/src/main/java/com/facebook/presto/iceberg/IcebergParquetFileWriter.java
@@ -24,6 +24,7 @@
import org.apache.iceberg.parquet.ParquetUtil;
import org.apache.parquet.hadoop.metadata.CompressionCodecName;
import org.apache.parquet.schema.MessageType;
+import org.joda.time.DateTimeZone;
import java.io.OutputStream;
import java.util.List;
@@ -54,7 +55,9 @@ public IcebergParquetFileWriter(
Path outputPath,
HdfsEnvironment hdfsEnvironment,
HdfsContext hdfsContext,
- MetricsConfig metricsConfig)
+ MetricsConfig metricsConfig,
+ DateTimeZone writerTimezone,
+ String prestoVersion)
{
super(outputStream,
rollbackAction,
@@ -64,7 +67,9 @@ public IcebergParquetFileWriter(
primitiveTypes,
parquetWriterOptions,
fileInputColumnIndexes,
- compressionCodecName);
+ compressionCodecName,
+ writerTimezone,
+ prestoVersion);
this.outputPath = requireNonNull(outputPath, "outputPath is null");
this.hdfsEnvironment = requireNonNull(hdfsEnvironment, "hdfsEnvironment is null");
this.hdfsContext = requireNonNull(hdfsContext, "hdfsContext is null");
diff --git a/presto-iceberg/src/test/java/com/facebook/presto/iceberg/IcebergDistributedSmokeTestBase.java b/presto-iceberg/src/test/java/com/facebook/presto/iceberg/IcebergDistributedSmokeTestBase.java
index 51845c5d22668..c5b0cb974460c 100644
--- a/presto-iceberg/src/test/java/com/facebook/presto/iceberg/IcebergDistributedSmokeTestBase.java
+++ b/presto-iceberg/src/test/java/com/facebook/presto/iceberg/IcebergDistributedSmokeTestBase.java
@@ -1766,12 +1766,10 @@ private void testBucketTransformsOnTimeForFormat(Session session, FileFormat for
"(time '07:31:55.425', 7)";
assertUpdate(session, insertSql, 7);
- assertQuery(session, "SELECT COUNT(*) FROM \"test_bucket_transform_on_time$partitions\"", "SELECT 4");
+ assertQuery(session, "SELECT COUNT(*) FROM \"test_bucket_transform_on_time$partitions\"", "SELECT 2");
- assertQuery(session, select + " WHERE a_bucket = 0", "VALUES(0, 2, time '00:00:00.000', time '12:13:14.345', 3, 6)");
- assertQuery(session, select + " WHERE a_bucket = 1", "VALUES(1, 1, time '23:23:59.999', time '23:23:59.999', 5, 5)");
- assertQuery(session, select + " WHERE a_bucket = 2", "VALUES(2, 1, time '21:22:50.002', time '21:22:50.002', 2, 2)");
- assertQuery(session, select + " WHERE a_bucket = 3", "VALUES(3, 3, time '00:00:01.001', time '07:31:55.425', 1, 7)");
+ assertQuery(session, select + " WHERE a_bucket = 0", "VALUES(0, 3, time '00:00:00.000', time '12:13:14.345', 3, 6)");
+ assertQuery(session, select + " WHERE a_bucket = 2", "VALUES(2, 4, time '01:02:03.123', time '23:23:59.999', 1, 7)");
assertQuery(session, "select * from test_bucket_transform_on_time where a = time '01:02:03.123'",
"VALUES(time '01:02:03.123', 1)");
diff --git a/presto-iceberg/src/test/java/com/facebook/presto/iceberg/TestIcebergFileWriter.java b/presto-iceberg/src/test/java/com/facebook/presto/iceberg/TestIcebergFileWriter.java
index 31f0729907da2..7ab16e8a51694 100644
--- a/presto-iceberg/src/test/java/com/facebook/presto/iceberg/TestIcebergFileWriter.java
+++ b/presto-iceberg/src/test/java/com/facebook/presto/iceberg/TestIcebergFileWriter.java
@@ -63,6 +63,7 @@
import static com.facebook.presto.common.type.TimestampType.TIMESTAMP;
import static com.facebook.presto.common.type.VarbinaryType.VARBINARY;
import static com.facebook.presto.common.type.VarcharType.VARCHAR;
+import static com.facebook.presto.hive.parquet.ParquetTester.HIVE_STORAGE_TIME_ZONE;
import static com.facebook.presto.iceberg.IcebergAbstractMetadata.toIcebergSchema;
import static com.facebook.presto.iceberg.IcebergDistributedTestBase.getHdfsEnvironment;
import static com.facebook.presto.iceberg.IcebergQueryRunner.ICEBERG_CATALOG;
@@ -120,7 +121,8 @@ public void setup()
this.hdfsContext = new HdfsContext(connectorSession);
HdfsEnvironment hdfsEnvironment = getHdfsEnvironment(new HiveClientConfig(), new MetastoreClientConfig(), new HiveS3Config());
this.icebergFileWriterFactory = new IcebergFileWriterFactory(hdfsEnvironment, typeManager,
- new FileFormatDataSourceStats(), new NodeVersion("test"), new OrcFileWriterConfig(), HiveDwrfEncryptionProvider.NO_ENCRYPTION);
+ new FileFormatDataSourceStats(), new NodeVersion("test"), new OrcFileWriterConfig(), HiveDwrfEncryptionProvider.NO_ENCRYPTION,
+ new HiveClientConfig().setTimeZone(HIVE_STORAGE_TIME_ZONE.getID()));
}
@Test
diff --git a/presto-iceberg/src/test/java/com/facebook/presto/iceberg/rest/IcebergRestTestUtil.java b/presto-iceberg/src/test/java/com/facebook/presto/iceberg/rest/IcebergRestTestUtil.java
index 34a88837ce696..1268f9805c7c6 100644
--- a/presto-iceberg/src/test/java/com/facebook/presto/iceberg/rest/IcebergRestTestUtil.java
+++ b/presto-iceberg/src/test/java/com/facebook/presto/iceberg/rest/IcebergRestTestUtil.java
@@ -64,12 +64,14 @@ public static Map restConnectorProperties(String serverUri)
return ImmutableMap.of("iceberg.rest.uri", serverUri);
}
- public static TestingHttpServer getRestServer(String location)
+ public static TestingHttpServer getRestServer(String location) throws ClassNotFoundException
{
JdbcCatalog backingCatalog = new JdbcCatalog();
HdfsEnvironment hdfsEnvironment = getHdfsEnvironment(new HiveClientConfig(), new MetastoreClientConfig(), new HiveS3Config());
backingCatalog.setConf(hdfsEnvironment.getConfiguration(new HdfsContext(SESSION), new Path(location)));
+ Class.forName("org.h2.Driver");
+
Map properties = ImmutableMap.builder()
.put(URI, "jdbc:h2:mem:test_" + System.nanoTime() + "_" + ThreadLocalRandom.current().nextInt())
.put(WAREHOUSE_LOCATION, location)
diff --git a/presto-main-base/src/main/java/com/facebook/presto/operator/scalar/JsonOperators.java b/presto-main-base/src/main/java/com/facebook/presto/operator/scalar/JsonOperators.java
index 576aefaafdb1d..af08f12247df1 100644
--- a/presto-main-base/src/main/java/com/facebook/presto/operator/scalar/JsonOperators.java
+++ b/presto-main-base/src/main/java/com/facebook/presto/operator/scalar/JsonOperators.java
@@ -338,7 +338,12 @@ public static Slice castFromTimestamp(SqlFunctionProperties properties, @SqlType
try {
SliceOutput output = new DynamicSliceOutput(25);
try (JsonGenerator jsonGenerator = createJsonGenerator(JSON_FACTORY, output)) {
- jsonGenerator.writeString(printTimestampWithoutTimeZone(properties.getTimeZoneKey(), value));
+ if (properties.isLegacyTimestamp()) {
+ jsonGenerator.writeString(printTimestampWithoutTimeZone(properties.getTimeZoneKey(), value));
+ }
+ else {
+ jsonGenerator.writeString(printTimestampWithoutTimeZone(value));
+ }
}
return output.slice();
}
diff --git a/presto-main-base/src/main/java/com/facebook/presto/sql/analyzer/FunctionsConfig.java b/presto-main-base/src/main/java/com/facebook/presto/sql/analyzer/FunctionsConfig.java
index fbb20beccd208..8d185c61c1701 100644
--- a/presto-main-base/src/main/java/com/facebook/presto/sql/analyzer/FunctionsConfig.java
+++ b/presto-main-base/src/main/java/com/facebook/presto/sql/analyzer/FunctionsConfig.java
@@ -41,7 +41,7 @@ public class FunctionsConfig
private ArrayAggGroupImplementation arrayAggGroupImplementation = ArrayAggGroupImplementation.NEW;
private MultimapAggGroupImplementation multimapAggGroupImplementation = MultimapAggGroupImplementation.NEW;
private boolean legacyRowFieldOrdinalAccess;
- private boolean legacyTimestamp = true;
+ private boolean legacyTimestamp;
private boolean parseDecimalLiteralsAsDouble;
private boolean fieldNamesInJsonCastEnabled;
private boolean warnOnPossibleNans;
diff --git a/presto-main-base/src/main/java/com/facebook/presto/util/JsonUtil.java b/presto-main-base/src/main/java/com/facebook/presto/util/JsonUtil.java
index e8e7cb4802de2..2f2763a3c0d5f 100644
--- a/presto-main-base/src/main/java/com/facebook/presto/util/JsonUtil.java
+++ b/presto-main-base/src/main/java/com/facebook/presto/util/JsonUtil.java
@@ -71,6 +71,7 @@
import static com.facebook.presto.common.type.JsonType.JSON;
import static com.facebook.presto.common.type.RealType.REAL;
import static com.facebook.presto.common.type.SmallintType.SMALLINT;
+import static com.facebook.presto.common.type.TimeZoneKey.UTC_KEY;
import static com.facebook.presto.common.type.TimestampType.TIMESTAMP;
import static com.facebook.presto.common.type.TinyintType.TINYINT;
import static com.facebook.presto.common.type.TypeUtils.isDistinctType;
@@ -544,7 +545,7 @@ public void writeJsonValue(JsonGenerator jsonGenerator, Block block, int positio
}
else {
long value = TIMESTAMP.getLong(block, position);
- jsonGenerator.writeString(printTimestampWithoutTimeZone(properties.getTimeZoneKey(), value));
+ jsonGenerator.writeString(printTimestampWithoutTimeZone(properties.isLegacyTimestamp() ? properties.getTimeZoneKey() : UTC_KEY, value));
}
}
}
diff --git a/presto-main-base/src/test/java/com/facebook/presto/sql/TestRowExpressionSerde.java b/presto-main-base/src/test/java/com/facebook/presto/sql/TestRowExpressionSerde.java
index 70216b1a2c294..37f98e808e656 100644
--- a/presto-main-base/src/test/java/com/facebook/presto/sql/TestRowExpressionSerde.java
+++ b/presto-main-base/src/test/java/com/facebook/presto/sql/TestRowExpressionSerde.java
@@ -131,7 +131,7 @@ public void testSimpleLiteral()
assertLiteral("CAST(NULL AS VARCHAR)", constant(null, VARCHAR));
assertLiteral("DATE '1991-01-01'", constant(7670L, DATE));
- assertLiteral("TIMESTAMP '1991-01-01 00:00:00.000'", constant(662727600000L, TIMESTAMP));
+ assertLiteral("TIMESTAMP '1991-01-01 00:00:00.000'", constant(662688000000L, TIMESTAMP));
}
@Test
diff --git a/presto-main-base/src/test/java/com/facebook/presto/sql/analyzer/TestFunctionsConfig.java b/presto-main-base/src/test/java/com/facebook/presto/sql/analyzer/TestFunctionsConfig.java
index ad7fc277d2658..98ca361371f0e 100644
--- a/presto-main-base/src/test/java/com/facebook/presto/sql/analyzer/TestFunctionsConfig.java
+++ b/presto-main-base/src/test/java/com/facebook/presto/sql/analyzer/TestFunctionsConfig.java
@@ -48,7 +48,7 @@ public void testDefaults()
.setArrayAggGroupImplementation(ArrayAggGroupImplementation.NEW)
.setMultimapAggGroupImplementation(MultimapAggGroupImplementation.NEW)
.setLegacyRowFieldOrdinalAccess(false)
- .setLegacyTimestamp(true)
+ .setLegacyTimestamp(false)
.setParseDecimalLiteralsAsDouble(false)
.setFieldNamesInJsonCastEnabled(false)
.setWarnOnCommonNanPatterns(false)
@@ -77,7 +77,7 @@ public void testExplicitPropertyMappings()
.put("arrayagg.implementation", "LEGACY")
.put("multimapagg.implementation", "LEGACY")
.put("deprecated.legacy-row-field-ordinal-access", "true")
- .put("deprecated.legacy-timestamp", "false")
+ .put("deprecated.legacy-timestamp", "true")
.put("parse-decimal-literals-as-double", "true")
.put("field-names-in-json-cast-enabled", "true")
.put("warn-on-common-nan-patterns", "true")
@@ -103,7 +103,7 @@ public void testExplicitPropertyMappings()
.setArrayAggGroupImplementation(ArrayAggGroupImplementation.LEGACY)
.setMultimapAggGroupImplementation(MultimapAggGroupImplementation.LEGACY)
.setLegacyRowFieldOrdinalAccess(true)
- .setLegacyTimestamp(false)
+ .setLegacyTimestamp(true)
.setParseDecimalLiteralsAsDouble(true)
.setFieldNamesInJsonCastEnabled(true)
.setWarnOnCommonNanPatterns(true)
diff --git a/presto-main-base/src/test/java/com/facebook/presto/sql/expressions/AbstractTestExpressionInterpreter.java b/presto-main-base/src/test/java/com/facebook/presto/sql/expressions/AbstractTestExpressionInterpreter.java
index f87f946601061..5b1fbd1a1b0a9 100644
--- a/presto-main-base/src/test/java/com/facebook/presto/sql/expressions/AbstractTestExpressionInterpreter.java
+++ b/presto-main-base/src/test/java/com/facebook/presto/sql/expressions/AbstractTestExpressionInterpreter.java
@@ -59,7 +59,6 @@
import io.airlift.slice.Slices;
import org.intellij.lang.annotations.Language;
import org.joda.time.DateTime;
-import org.joda.time.DateTimeZone;
import org.joda.time.LocalDate;
import org.joda.time.LocalTime;
import org.testng.annotations.Test;
@@ -89,10 +88,10 @@
import static com.facebook.presto.sql.ExpressionFormatter.formatExpression;
import static com.facebook.presto.type.IntervalDayTimeType.INTERVAL_DAY_TIME;
import static com.facebook.presto.util.AnalyzerUtil.createParsingOptions;
-import static com.facebook.presto.util.DateTimeZoneIndex.getDateTimeZone;
import static io.airlift.slice.Slices.utf8Slice;
import static java.lang.String.format;
import static java.util.Locale.ENGLISH;
+import static org.joda.time.DateTimeZone.UTC;
import static org.testng.Assert.assertEquals;
import static org.testng.Assert.assertThrows;
import static org.testng.Assert.assertTrue;
@@ -435,7 +434,7 @@ public void testBetween()
@Test
public void testExtract()
{
- DateTime dateTime = new DateTime(2001, 8, 22, 3, 4, 5, 321, getDateTimeZone(TEST_SESSION.getTimeZoneKey()));
+ DateTime dateTime = new DateTime(2001, 8, 22, 3, 4, 5, 321, UTC);
double seconds = dateTime.getMillis() / 1000.0;
assertOptimizedEquals("extract (YEAR from from_unixtime(" + seconds + "))", "2001");
@@ -455,10 +454,10 @@ public void testExtract()
assertOptimizedEquals("extract (QUARTER from bound_timestamp)", "3");
assertOptimizedEquals("extract (MONTH from bound_timestamp)", "8");
assertOptimizedEquals("extract (WEEK from bound_timestamp)", "34");
- assertOptimizedEquals("extract (DOW from bound_timestamp)", "2");
- assertOptimizedEquals("extract (DOY from bound_timestamp)", "233");
- assertOptimizedEquals("extract (DAY from bound_timestamp)", "21");
- assertOptimizedEquals("extract (HOUR from bound_timestamp)", "16");
+ assertOptimizedEquals("extract (DOW from bound_timestamp)", "3");
+ assertOptimizedEquals("extract (DOY from bound_timestamp)", "234");
+ assertOptimizedEquals("extract (DAY from bound_timestamp)", "22");
+ assertOptimizedEquals("extract (HOUR from bound_timestamp)", "3");
assertOptimizedEquals("extract (MINUTE from bound_timestamp)", "4");
assertOptimizedEquals("extract (SECOND from bound_timestamp)", "5");
// todo reenable when cast as timestamp with time zone is implemented
@@ -561,9 +560,9 @@ public void testInComplexTypes()
public void testCurrentTimestamp()
{
double current = TEST_SESSION.getStartTime() / 1000.0;
- assertOptimizedEquals("current_timestamp = from_unixtime(" + current + ")", "true");
+ assertOptimizedEquals("current_timestamp = from_unixtime(" + current + ", '" + TEST_SESSION.getTimeZoneKey().getId() + "')", "true");
double future = current + TimeUnit.MINUTES.toSeconds(1);
- assertOptimizedEquals("current_timestamp > from_unixtime(" + future + ")", "false");
+ assertOptimizedEquals("current_timestamp > from_unixtime(" + future + ", '" + TEST_SESSION.getTimeZoneKey().getId() + "')", "false");
}
@Test
@@ -1723,15 +1722,15 @@ public static Object symbolConstant(Symbol symbol)
case "bound_double":
return 12.34;
case "bound_date":
- return new LocalDate(2001, 8, 22).toDateMidnight(DateTimeZone.UTC).getMillis();
+ return new LocalDate(2001, 8, 22).toDateMidnight(UTC).getMillis();
case "bound_time":
- return new LocalTime(3, 4, 5, 321).toDateTime(new DateTime(0, DateTimeZone.UTC)).getMillis();
+ return new LocalTime(3, 4, 5, 321).toDateTime(new DateTime(0, UTC)).getMillis();
case "bound_timestamp":
- return new DateTime(2001, 8, 22, 3, 4, 5, 321, DateTimeZone.UTC).getMillis();
+ return new DateTime(2001, 8, 22, 3, 4, 5, 321, UTC).getMillis();
case "bound_pattern":
return utf8Slice("%el%");
case "bound_timestamp_with_timezone":
- return new SqlTimestampWithTimeZone(new DateTime(1970, 1, 1, 1, 0, 0, 999, DateTimeZone.UTC).getMillis(), getTimeZoneKey("Z"));
+ return new SqlTimestampWithTimeZone(new DateTime(1970, 1, 1, 1, 0, 0, 999, UTC).getMillis(), getTimeZoneKey("Z"));
case "bound_varbinary":
return Slices.wrappedBuffer((byte) 0xab);
case "bound_decimal_short":
diff --git a/presto-main-base/src/test/java/com/facebook/presto/sql/gen/TestExpressionCompiler.java b/presto-main-base/src/test/java/com/facebook/presto/sql/gen/TestExpressionCompiler.java
index c5383b669af60..1e72e678a54e2 100644
--- a/presto-main-base/src/test/java/com/facebook/presto/sql/gen/TestExpressionCompiler.java
+++ b/presto-main-base/src/test/java/com/facebook/presto/sql/gen/TestExpressionCompiler.java
@@ -77,6 +77,7 @@
import static com.facebook.presto.common.type.IntegerType.INTEGER;
import static com.facebook.presto.common.type.JsonType.JSON;
import static com.facebook.presto.common.type.SmallintType.SMALLINT;
+import static com.facebook.presto.common.type.TimeZoneKey.UTC_KEY;
import static com.facebook.presto.common.type.TimestampType.TIMESTAMP;
import static com.facebook.presto.common.type.TimestampWithTimeZoneType.TIMESTAMP_WITH_TIME_ZONE;
import static com.facebook.presto.common.type.UnknownType.UNKNOWN;
@@ -85,6 +86,8 @@
import static com.facebook.presto.common.type.VarcharType.createUnboundedVarcharType;
import static com.facebook.presto.common.type.VarcharType.createVarcharType;
import static com.facebook.presto.operator.scalar.JoniRegexpCasts.joniRegexp;
+import static com.facebook.presto.sql.tree.Extract.Field.TIMEZONE_HOUR;
+import static com.facebook.presto.sql.tree.Extract.Field.TIMEZONE_MINUTE;
import static com.facebook.presto.testing.DateTimeTestingUtils.sqlTimestampOf;
import static com.facebook.presto.util.DateTimeZoneIndex.getDateTimeZone;
import static com.facebook.presto.util.StructuralTestUtil.mapType;
@@ -1493,7 +1496,9 @@ public void testExtract()
millis = left.getMillis();
expected = callExtractFunction(TEST_SESSION.toConnectorSession(), millis, field);
}
- DateTimeZone zone = getDateTimeZone(TEST_SESSION.getTimeZoneKey());
+ DateTimeZone zone = TEST_SESSION.getSqlFunctionProperties().isLegacyTimestamp() ?
+ getDateTimeZone(TEST_SESSION.getTimeZoneKey()) :
+ getDateTimeZone(UTC_KEY);
long zoneOffsetMinutes = millis != null ? MILLISECONDS.toMinutes(zone.getOffset(millis)) : 0;
String expressionPattern = format(
"extract(%s from from_unixtime(%%s / 1000.0E0, %s, %s))",
@@ -1538,9 +1543,9 @@ private static long callExtractFunction(ConnectorSession session, long value, Fi
case SECOND:
return DateTimeFunctions.secondFromTimestamp(session.getSqlFunctionProperties(), value);
case TIMEZONE_MINUTE:
- return DateTimeFunctions.timeZoneMinuteFromTimestampWithTimeZone(packDateTimeWithZone(value, session.getSqlFunctionProperties().getTimeZoneKey()));
+ return DateTimeFunctions.timeZoneMinuteFromTimestampWithTimeZone(packDateTimeWithZone(value, session.getSqlFunctionProperties().isLegacyTimestamp() ? session.getSqlFunctionProperties().getTimeZoneKey() : UTC_KEY));
case TIMEZONE_HOUR:
- return DateTimeFunctions.timeZoneHourFromTimestampWithTimeZone(packDateTimeWithZone(value, session.getSqlFunctionProperties().getTimeZoneKey()));
+ return DateTimeFunctions.timeZoneHourFromTimestampWithTimeZone(packDateTimeWithZone(value, session.getSqlFunctionProperties().isLegacyTimestamp() ? session.getSqlFunctionProperties().getTimeZoneKey() : UTC_KEY));
}
throw new AssertionError("Unhandled field: " + field);
}
diff --git a/presto-main-base/src/test/java/com/facebook/presto/sql/planner/TestRowExpressionFormatter.java b/presto-main-base/src/test/java/com/facebook/presto/sql/planner/TestRowExpressionFormatter.java
index f6e6fcb94ed27..8b6c026f424d7 100644
--- a/presto-main-base/src/test/java/com/facebook/presto/sql/planner/TestRowExpressionFormatter.java
+++ b/presto-main-base/src/test/java/com/facebook/presto/sql/planner/TestRowExpressionFormatter.java
@@ -156,7 +156,7 @@ public void testConstants()
assertEquals(format(constantExpression), "DECIMAL'1.281734081274028174012432412423134'");
// time
- constantExpression = constant(662727600000L, TIMESTAMP);
+ constantExpression = constant(662688000000L, TIMESTAMP); // 662688000000 corresponds to 1991-01-01 00:00:00.000
assertEquals(format(constantExpression), "TIMESTAMP'1991-01-01 00:00:00.000'");
constantExpression = constant(7670L, DATE);
assertEquals(format(constantExpression), "DATE'1991-01-01'");
diff --git a/presto-native-execution/presto_cpp/main/PrestoServer.cpp b/presto-native-execution/presto_cpp/main/PrestoServer.cpp
index c3210d84575ca..8bc505040c34e 100644
--- a/presto-native-execution/presto_cpp/main/PrestoServer.cpp
+++ b/presto-native-execution/presto_cpp/main/PrestoServer.cpp
@@ -239,7 +239,7 @@ json::array_t getOptimizedExpressions(
auto configs = toVeloxConfigsFromSessionProperties(sessionProperties);
configs.insert({velox::core::QueryConfig::kSessionTimezone, timezone});
configs.insert(
- {velox::core::QueryConfig::kAdjustTimestampToTimezone, "true"});
+ {velox::core::QueryConfig::kAdjustTimestampToTimezone, "false"});
configs.insert(
{velox::core::QueryConfig::kSessionStartTime, sessionStartTime});
diff --git a/presto-native-execution/presto_cpp/main/PrestoToVeloxQueryConfig.cpp b/presto-native-execution/presto_cpp/main/PrestoToVeloxQueryConfig.cpp
index 773f4b9bddc10..5d709fa3dd229 100644
--- a/presto-native-execution/presto_cpp/main/PrestoToVeloxQueryConfig.cpp
+++ b/presto-native-execution/presto_cpp/main/PrestoToVeloxQueryConfig.cpp
@@ -29,7 +29,7 @@ void updateVeloxConfigsWithSpecialCases(
// session_timezone.
auto it = configStrings.find("legacy_timestamp");
// `legacy_timestamp` default value is true in the coordinator.
- if ((it == configStrings.end()) || (folly::to(it->second))) {
+ if ((it != configStrings.end()) && folly::to(it->second)) {
configStrings.emplace(
velox::core::QueryConfig::kAdjustTimestampToTimezone, "true");
}
diff --git a/presto-native-execution/presto_cpp/main/properties/session/SessionProperties.cpp b/presto-native-execution/presto_cpp/main/properties/session/SessionProperties.cpp
index 06216598dd1d9..6ff22a3dccd03 100644
--- a/presto-native-execution/presto_cpp/main/properties/session/SessionProperties.cpp
+++ b/presto-native-execution/presto_cpp/main/properties/session/SessionProperties.cpp
@@ -384,7 +384,7 @@ SessionProperties::SessionProperties() {
QueryConfig::kAdjustTimestampToTimezone,
// Overrides velox default value. legacy_timestamp default value is true
// in the coordinator.
- "true");
+ "false");
// TODO: remove this once cpu driver slicing config is turned on by default in
// Velox.
diff --git a/presto-native-execution/presto_cpp/main/tests/PrestoToVeloxQueryConfigTest.cpp b/presto-native-execution/presto_cpp/main/tests/PrestoToVeloxQueryConfigTest.cpp
index b049ac97744b7..50aa9cce502ac 100644
--- a/presto-native-execution/presto_cpp/main/tests/PrestoToVeloxQueryConfigTest.cpp
+++ b/presto-native-execution/presto_cpp/main/tests/PrestoToVeloxQueryConfigTest.cpp
@@ -554,7 +554,7 @@ TEST_F(PrestoToVeloxQueryConfigTest, specialHardCodedPrestoConfigurations) {
session.systemProperties.clear();
auto veloxConfig3 = QueryConfig(toVeloxConfigs(session));
- EXPECT_TRUE(veloxConfig3.adjustTimestampToTimezone());
+ EXPECT_FALSE(veloxConfig3.adjustTimestampToTimezone());
session.systemProperties.clear();
auto veloxConfig8 = QueryConfig(toVeloxConfigs(session));
@@ -765,11 +765,6 @@ TEST_F(PrestoToVeloxQueryConfigTest, systemConfigsWithoutSessionOverride) {
}
// Verify special case configs (always added)
- EXPECT_TRUE(
- veloxConfigs.count(core::QueryConfig::kAdjustTimestampToTimezone) > 0);
- EXPECT_EQ(
- "true", veloxConfigs.at(core::QueryConfig::kAdjustTimestampToTimezone));
-
EXPECT_TRUE(
veloxConfigs.count(core::QueryConfig::kDriverCpuTimeSliceLimitMs) > 0);
EXPECT_EQ(
@@ -787,8 +782,7 @@ TEST_F(PrestoToVeloxQueryConfigTest, systemConfigsWithoutSessionOverride) {
expectedExactConfigs++;
}
}
- expectedExactConfigs += 2; // kAdjustTimestampToTimezone,
- // kDriverCpuTimeSliceLimitMs
+ expectedExactConfigs += 1; // kDriverCpuTimeSliceLimitMs
expectedExactConfigs += 1; // kSessionStartTime
// Use exact matching to catch any config additions/removals
diff --git a/presto-native-sidecar-plugin/src/test/java/com/facebook/presto/sidecar/expressions/TestNativeExpressionOptimizer.java b/presto-native-sidecar-plugin/src/test/java/com/facebook/presto/sidecar/expressions/TestNativeExpressionOptimizer.java
index 08f7350fda450..6db3d605311d8 100644
--- a/presto-native-sidecar-plugin/src/test/java/com/facebook/presto/sidecar/expressions/TestNativeExpressionOptimizer.java
+++ b/presto-native-sidecar-plugin/src/test/java/com/facebook/presto/sidecar/expressions/TestNativeExpressionOptimizer.java
@@ -135,7 +135,7 @@ public void testCurrentTimestamp()
double epochSeconds = instant.toEpochMilli() / 1000.0;
assertOptimizedEquals(
- "now() = from_unixtime(" + epochSeconds + ")",
+ "now() = from_unixtime(" + epochSeconds + ", '" + session.getTimeZoneKey().getId() + "')",
"true",
session);
assertOptimizedEquals(
diff --git a/presto-native-tests/src/test/java/com/facebook/presto/nativetests/AbstractTestAggregationsNative.java b/presto-native-tests/src/test/java/com/facebook/presto/nativetests/AbstractTestAggregationsNative.java
index 9619c92a89e57..5f4c7d2d88ca9 100644
--- a/presto-native-tests/src/test/java/com/facebook/presto/nativetests/AbstractTestAggregationsNative.java
+++ b/presto-native-tests/src/test/java/com/facebook/presto/nativetests/AbstractTestAggregationsNative.java
@@ -60,8 +60,8 @@ public void testApproximateCountDistinct()
assertQuery(format("SELECT approx_distinct(%s, 0.023) FROM orders", orderdate), "SELECT 2372");
// test timestamp
- assertQuery("SELECT approx_distinct(CAST(orderdate AS TIMESTAMP)) FROM orders", "SELECT 2347");
- assertQuery("SELECT approx_distinct(CAST(orderdate AS TIMESTAMP), 0.023) FROM orders", "SELECT 2347");
+ assertQuery("SELECT approx_distinct(CAST(orderdate AS TIMESTAMP)) FROM orders", "SELECT 2384");
+ assertQuery("SELECT approx_distinct(CAST(orderdate AS TIMESTAMP), 0.023) FROM orders", "SELECT 2384");
// test timestamp with time zone
assertQueryFails("SELECT approx_distinct(CAST(orderdate AS TIMESTAMP WITH TIME ZONE)) FROM orders",
diff --git a/presto-orc/src/main/java/com/facebook/presto/orc/AbstractOrcRecordReader.java b/presto-orc/src/main/java/com/facebook/presto/orc/AbstractOrcRecordReader.java
index 048f9465424fc..c65688e7aa486 100644
--- a/presto-orc/src/main/java/com/facebook/presto/orc/AbstractOrcRecordReader.java
+++ b/presto-orc/src/main/java/com/facebook/presto/orc/AbstractOrcRecordReader.java
@@ -43,6 +43,7 @@
import java.io.Closeable;
import java.io.IOException;
+import java.time.ZoneId;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
@@ -264,7 +265,8 @@ public AbstractOrcRecordReader(
this.dwrfEncryptionGroupMap,
runtimeStats,
fileIntrospector,
- fileModificationTime);
+ fileModificationTime,
+ hiveStorageTimeZone.toTimeZone().toZoneId());
this.streamReaders = requireNonNull(streamReaders, "streamReaders is null");
for (int columnId = 0; columnId < root.getFieldCount(); columnId++) {
@@ -671,9 +673,10 @@ private void advanceToNextStripe()
SharedBuffer sharedDecompressionBuffer = new SharedBuffer(currentStripeSystemMemoryContext.newOrcLocalMemoryContext("sharedDecompressionBuffer"));
Stripe stripe = stripeReader.readStripe(stripeInformation, currentStripeSystemMemoryContext, dwrfEncryptionInfo, sharedDecompressionBuffer);
if (stripe != null) {
+ ZoneId timezone = stripe.getTimezone();
for (StreamReader column : streamReaders) {
if (column != null) {
- column.startStripe(stripe);
+ column.startStripe(timezone, stripe);
}
}
diff --git a/presto-orc/src/main/java/com/facebook/presto/orc/DecodeTimestampOptions.java b/presto-orc/src/main/java/com/facebook/presto/orc/DecodeTimestampOptions.java
index 93004d6b4a720..ab6c197178c26 100644
--- a/presto-orc/src/main/java/com/facebook/presto/orc/DecodeTimestampOptions.java
+++ b/presto-orc/src/main/java/com/facebook/presto/orc/DecodeTimestampOptions.java
@@ -13,9 +13,8 @@
*/
package com.facebook.presto.orc;
-import org.joda.time.DateTime;
-import org.joda.time.DateTimeZone;
-
+import java.time.ZoneId;
+import java.time.ZonedDateTime;
import java.util.concurrent.TimeUnit;
import static java.util.Objects.requireNonNull;
@@ -29,17 +28,17 @@ public class DecodeTimestampOptions
private final long nanosecondsPerUnit;
private final long baseSeconds;
- public DecodeTimestampOptions(DateTimeZone hiveStorageTimeZone, boolean enableMicroPrecision)
+ public DecodeTimestampOptions(ZoneId timezone, boolean enableMicroPrecision)
{
this.enableMicroPrecision = enableMicroPrecision;
TimeUnit timeUnit = enableMicroPrecision ? MICROSECONDS : MILLISECONDS;
- requireNonNull(hiveStorageTimeZone, "hiveStorageTimeZone is null");
+ requireNonNull(timezone, "timezone is null");
this.unitsPerSecond = timeUnit.convert(1, TimeUnit.SECONDS);
this.nanosecondsPerUnit = TimeUnit.NANOSECONDS.convert(1, timeUnit);
- this.baseSeconds = MILLISECONDS.toSeconds(new DateTime(2015, 1, 1, 0, 0, hiveStorageTimeZone).getMillis());
+ this.baseSeconds = ZonedDateTime.of(2015, 1, 1, 0, 0, 0, 0, timezone).toEpochSecond();
}
public boolean enableMicroPrecision()
@@ -58,7 +57,7 @@ public long getNanosPerUnit()
}
/**
- * @return Seconds since 01/01/2015 (see https://orc.apache.org/specification/ORCv1/) in hive storage timezone (see {@link DecodeTimestampOptions#DecodeTimestampOptions(DateTimeZone, boolean)} })
+ * @return Seconds since 01/01/2015 (see https://orc.apache.org/specification/ORCv1/) in hive storage timezone (see {@link DecodeTimestampOptions#DecodeTimestampOptions(ZoneId, boolean)} })
*/
public long getBaseSeconds()
{
diff --git a/presto-orc/src/main/java/com/facebook/presto/orc/OrcBatchRecordReader.java b/presto-orc/src/main/java/com/facebook/presto/orc/OrcBatchRecordReader.java
index cbe13b7527d99..c7f6d5935cea8 100644
--- a/presto-orc/src/main/java/com/facebook/presto/orc/OrcBatchRecordReader.java
+++ b/presto-orc/src/main/java/com/facebook/presto/orc/OrcBatchRecordReader.java
@@ -80,7 +80,7 @@ public OrcBatchRecordReader(
// doesn't have a local buffer. All non-leaf level StreamReaders' (e.g. MapStreamReader, LongStreamReader,
// ListStreamReader and StructStreamReader) instance sizes were not counted, because calling setBytes() in
// their constructors is confusing.
- createStreamReaders(orcDataSource, types, hiveStorageTimeZone, options, includedColumns, systemMemoryUsage.newOrcAggregatedMemoryContext()),
+ createStreamReaders(orcDataSource, types, options, includedColumns, systemMemoryUsage.newOrcAggregatedMemoryContext()),
predicate,
numberOfRows,
fileStripes,
@@ -155,7 +155,6 @@ private void validateWritePageChecksum(int batchSize)
private static BatchStreamReader[] createStreamReaders(
OrcDataSource orcDataSource,
List types,
- DateTimeZone hiveStorageTimeZone,
OrcRecordReaderOptions options,
Map includedColumns,
OrcAggregatedMemoryContext systemMemoryContext)
@@ -170,7 +169,7 @@ private static BatchStreamReader[] createStreamReaders(
Type type = includedColumns.get(columnId);
if (type != null) {
StreamDescriptor streamDescriptor = streamDescriptors.get(columnId);
- streamReaders[columnId] = BatchStreamReaders.createStreamReader(type, streamDescriptor, hiveStorageTimeZone, options, systemMemoryContext);
+ streamReaders[columnId] = BatchStreamReaders.createStreamReader(type, streamDescriptor, options, systemMemoryContext);
}
}
}
diff --git a/presto-orc/src/main/java/com/facebook/presto/orc/OrcSelectiveRecordReader.java b/presto-orc/src/main/java/com/facebook/presto/orc/OrcSelectiveRecordReader.java
index 364d5f8f9933f..af67a3034f6af 100644
--- a/presto-orc/src/main/java/com/facebook/presto/orc/OrcSelectiveRecordReader.java
+++ b/presto-orc/src/main/java/com/facebook/presto/orc/OrcSelectiveRecordReader.java
@@ -195,7 +195,6 @@ public OrcSelectiveRecordReader(
createStreamReaders(
orcDataSource,
types,
- hiveStorageTimeZone,
options,
includedColumns,
outputColumns,
@@ -582,7 +581,6 @@ private static int[] orderStreamReaders(
private static SelectiveStreamReader[] createStreamReaders(
OrcDataSource orcDataSource,
List types,
- DateTimeZone hiveStorageTimeZone,
OrcRecordReaderOptions options,
Map includedColumns,
List outputColumns,
@@ -614,7 +612,6 @@ private static SelectiveStreamReader[] createStreamReaders(
Optional.ofNullable(filters.get(columnId)).orElse(ImmutableMap.of()),
outputRequired ? Optional.of(includedColumns.get(columnId)) : Optional.empty(),
Optional.ofNullable(requiredSubfields.get(columnId)).orElse(ImmutableList.of()),
- hiveStorageTimeZone,
options,
systemMemoryContext,
false);
diff --git a/presto-orc/src/main/java/com/facebook/presto/orc/OrcWriteValidation.java b/presto-orc/src/main/java/com/facebook/presto/orc/OrcWriteValidation.java
index d11edb867132f..fd877a6016a7f 100644
--- a/presto-orc/src/main/java/com/facebook/presto/orc/OrcWriteValidation.java
+++ b/presto-orc/src/main/java/com/facebook/presto/orc/OrcWriteValidation.java
@@ -54,6 +54,7 @@
import io.airlift.slice.XxHash64;
import org.openjdk.jol.info.ClassLayout;
+import java.time.ZoneId;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
@@ -83,6 +84,7 @@
import static com.facebook.presto.common.type.TimestampType.TIMESTAMP_MICROSECONDS;
import static com.facebook.presto.common.type.TinyintType.TINYINT;
import static com.facebook.presto.common.type.VarbinaryType.VARBINARY;
+import static com.facebook.presto.orc.OrcEncoding.DWRF;
import static com.facebook.presto.orc.OrcWriteValidation.OrcWriteValidationMode.BOTH;
import static com.facebook.presto.orc.OrcWriteValidation.OrcWriteValidationMode.DETAILED;
import static com.facebook.presto.orc.OrcWriteValidation.OrcWriteValidationMode.HASHED;
@@ -124,6 +126,9 @@ public enum OrcWriteValidationMode
// keeps all flat map value nodes
private final Set flattenedValueNodes;
+ private final OrcEncoding orcEncoding;
+ private final ZoneId timezone;
+
// all values passed into this constructor are collected by the writer
private OrcWriteValidation(
List version,
@@ -137,7 +142,9 @@ private OrcWriteValidation(
List fileStatistics,
int stringStatisticsLimitInBytes,
Set flattenedNodes,
- List orcTypes)
+ List orcTypes,
+ OrcEncoding orcEncoding,
+ ZoneId timezone)
{
this.version = version;
this.compression = compression;
@@ -152,6 +159,8 @@ private OrcWriteValidation(
this.flattenedKeyToMapNodes = getFlattenedKeyToMapNodes(flattenedNodes, orcTypes);
this.flattenedValueNodes = getFlattenedValueNodes(flattenedNodes, orcTypes);
this.flattenedMapToValueNodes = getFlattenedMapToValueNodes(flattenedNodes, orcTypes);
+ this.orcEncoding = orcEncoding;
+ this.timezone = timezone;
}
public List getVersion()
@@ -182,16 +191,42 @@ public Map getMetadata()
public void validateMetadata(OrcDataSourceId orcDataSourceId, Map actualMetadata)
throws OrcCorruptionException
{
- // Filter out metadata value statically added by the DWRF writer
- Map filteredMetadata = actualMetadata.entrySet().stream()
- .filter(entry -> !STATIC_METADATA.containsKey(entry.getKey()))
- .collect(toImmutableMap(Entry::getKey, Entry::getValue));
+ if (isDwrf()) {
+ // Filter out metadata value statically added by the DWRF writer
+ actualMetadata = actualMetadata.entrySet().stream()
+ .filter(entry -> !STATIC_METADATA.containsKey(entry.getKey()))
+ .collect(toImmutableMap(Entry::getKey, Entry::getValue));
+ }
- if (!metadata.equals(filteredMetadata)) {
+ if (!metadata.equals(actualMetadata)) {
throw new OrcCorruptionException(orcDataSourceId, "Unexpected metadata");
}
}
+ public OrcEncoding getOrcEncoding()
+ {
+ return orcEncoding;
+ }
+
+ public boolean isDwrf()
+ {
+ return orcEncoding == DWRF;
+ }
+
+ public ZoneId getTimezone()
+ {
+ return timezone;
+ }
+
+ public void validateTimeZone(OrcDataSourceId orcDataSourceId, ZoneId actualTimezone)
+ throws OrcCorruptionException
+ {
+ // DWRF does not store the writer timezone
+ if (!isDwrf() && !timezone.equals(actualTimezone)) {
+ throw new OrcCorruptionException(orcDataSourceId, "Unexpected timezone");
+ }
+ }
+
public WriteChecksum getChecksum()
{
return checksum;
@@ -209,7 +244,7 @@ public void validateStripeStatistics(OrcDataSourceId orcDataSourceId, List flattenedNodes;
private List orcTypes;
+ private final OrcEncoding orcEncoding;
+ private ZoneId timezone;
- public OrcWriteValidationBuilder(OrcWriteValidationMode validationMode, List types)
+ public OrcWriteValidationBuilder(OrcWriteValidationMode validationMode, List types, OrcEncoding orcEncoding)
{
this.validationMode = validationMode;
this.checksum = new WriteChecksumBuilder(types);
+ this.orcEncoding = orcEncoding;
}
public long getRetainedSize()
@@ -1065,6 +1105,11 @@ public void setOrcTypes(List orcTypes)
this.orcTypes = orcTypes;
}
+ public void setTimezone(ZoneId timezone)
+ {
+ this.timezone = timezone;
+ }
+
public OrcWriteValidation build()
{
return new OrcWriteValidation(
@@ -1079,7 +1124,9 @@ public OrcWriteValidation build()
fileStatistics,
stringStatisticsLimitInBytes,
flattenedNodes,
- orcTypes);
+ orcTypes,
+ orcEncoding,
+ timezone);
}
}
}
diff --git a/presto-orc/src/main/java/com/facebook/presto/orc/OrcWriter.java b/presto-orc/src/main/java/com/facebook/presto/orc/OrcWriter.java
index 94e28d55b46df..e835ffc479cfd 100644
--- a/presto-orc/src/main/java/com/facebook/presto/orc/OrcWriter.java
+++ b/presto-orc/src/main/java/com/facebook/presto/orc/OrcWriter.java
@@ -62,6 +62,7 @@
import java.io.Closeable;
import java.io.IOException;
+import java.time.ZoneId;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
@@ -205,7 +206,7 @@ public OrcWriter(
OrcWriteValidationMode validationMode,
WriterStats stats)
{
- this.validationBuilder = validate ? new OrcWriteValidation.OrcWriteValidationBuilder(validationMode, types).setStringStatisticsLimitInBytes(toIntExact(options.getMaxStringStatisticsLimit().toBytes())) : null;
+ this.validationBuilder = validate ? new OrcWriteValidation.OrcWriteValidationBuilder(validationMode, types, orcEncoding).setStringStatisticsLimitInBytes(toIntExact(options.getMaxStringStatisticsLimit().toBytes())) : null;
this.dataSink = requireNonNull(dataSink, "dataSink is null");
this.types = ImmutableList.copyOf(requireNonNull(types, "types is null"));
@@ -241,6 +242,7 @@ public OrcWriter(
recordValidation(validation -> validation.setCompression(compressionKind));
recordValidation(validation -> validation.setFlattenedNodes(flattenedNodes));
recordValidation(validation -> validation.setOrcTypes(orcTypes));
+ recordValidation(validation -> validation.setTimezone(hiveStorageTimeZone.toTimeZone().toZoneId()));
requireNonNull(options, "options is null");
this.flushPolicy = requireNonNull(options.getFlushPolicy(), "flushPolicy is null");
@@ -630,7 +632,8 @@ private List bufferStripeData(long stripeStartOffset, FlushReason fl
.collect(toImmutableMap(Entry::getKey, Entry::getValue));
List encryptedGroups = createEncryptedGroups(encryptedStreams, encryptedColumnEncodings);
- StripeFooter stripeFooter = new StripeFooter(unencryptedStreams, unencryptedColumnEncodings, encryptedGroups);
+ Optional timezone = Optional.of(hiveStorageTimeZone.toTimeZone().toZoneId());
+ StripeFooter stripeFooter = new StripeFooter(unencryptedStreams, unencryptedColumnEncodings, encryptedGroups, timezone);
Slice footer = metadataWriter.writeStripeFooter(stripeFooter);
DataOutput footerDataOutput = createDataOutput(footer);
dwrfStripeCacheWriter.ifPresent(stripeCacheWriter -> stripeCacheWriter.addStripeFooter(createDataOutput(footer)));
diff --git a/presto-orc/src/main/java/com/facebook/presto/orc/Stripe.java b/presto-orc/src/main/java/com/facebook/presto/orc/Stripe.java
index fab11aa5d7ed1..4a7ee7249550f 100644
--- a/presto-orc/src/main/java/com/facebook/presto/orc/Stripe.java
+++ b/presto-orc/src/main/java/com/facebook/presto/orc/Stripe.java
@@ -18,6 +18,7 @@
import com.facebook.presto.orc.stream.InputStreamSources;
import com.google.common.collect.ImmutableList;
+import java.time.ZoneId;
import java.util.List;
import java.util.Map;
@@ -31,14 +32,16 @@ public class Stripe
private final List rowGroups;
private final InputStreamSources dictionaryStreamSources;
private final LongDictionaryProvider longDictionaryProvider;
+ private final ZoneId timezone;
- public Stripe(long rowCount, Map columnEncodings, List rowGroups, InputStreamSources dictionaryStreamSources)
+ public Stripe(long rowCount, Map columnEncodings, List rowGroups, InputStreamSources dictionaryStreamSources, ZoneId timezone)
{
this.rowCount = rowCount;
this.columnEncodings = requireNonNull(columnEncodings, "columnEncodings is null");
this.rowGroups = ImmutableList.copyOf(requireNonNull(rowGroups, "rowGroups is null"));
this.dictionaryStreamSources = requireNonNull(dictionaryStreamSources, "dictionaryStreamSources is null");
this.longDictionaryProvider = new LongDictionaryProvider(this.dictionaryStreamSources);
+ this.timezone = requireNonNull(timezone, "timezone is null");
}
public long getRowCount()
@@ -66,6 +69,11 @@ public LongDictionaryProvider getLongDictionaryProvider()
return longDictionaryProvider;
}
+ public ZoneId getTimezone()
+ {
+ return timezone;
+ }
+
@Override
public String toString()
{
@@ -75,6 +83,7 @@ public String toString()
.add("rowGroups", rowGroups)
.add("dictionaryStreams", dictionaryStreamSources)
.add("longDictionaryProvider", longDictionaryProvider)
+ .add("timezone", timezone)
.toString();
}
}
diff --git a/presto-orc/src/main/java/com/facebook/presto/orc/StripeReader.java b/presto-orc/src/main/java/com/facebook/presto/orc/StripeReader.java
index 9ff325d2a8c5c..a99aaa76d742f 100644
--- a/presto-orc/src/main/java/com/facebook/presto/orc/StripeReader.java
+++ b/presto-orc/src/main/java/com/facebook/presto/orc/StripeReader.java
@@ -50,6 +50,7 @@
import java.io.IOException;
import java.io.InputStream;
+import java.time.ZoneId;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
@@ -100,6 +101,7 @@ public class StripeReader
private final Multimap dwrfEncryptionGroupColumns;
private final RuntimeStats runtimeStats;
private final Optional fileIntrospector;
+ private final ZoneId defaultTimezone;
public StripeReader(
OrcDataSource orcDataSource,
@@ -116,7 +118,8 @@ public StripeReader(
Map dwrfEncryptionGroupMap,
RuntimeStats runtimeStats,
Optional fileIntrospector,
- long fileModificationTime)
+ long fileModificationTime,
+ ZoneId defaultTimezone)
{
this.orcDataSource = requireNonNull(orcDataSource, "orcDataSource is null");
this.decompressor = requireNonNull(decompressor, "decompressor is null");
@@ -133,6 +136,7 @@ public StripeReader(
this.runtimeStats = requireNonNull(runtimeStats, "runtimeStats is null");
this.fileIntrospector = requireNonNull(fileIntrospector, "fileIntrospector is null");
this.fileModificationTime = fileModificationTime;
+ this.defaultTimezone = requireNonNull(defaultTimezone, "defaultTimezone is null");
}
private Multimap invertEncryptionGroupMap(Map dwrfEncryptionGroupMap)
@@ -162,6 +166,11 @@ public Stripe readStripe(
StripeFooter stripeFooter = readStripeFooter(stripeId, stripe, systemMemoryUsage);
fileIntrospector.ifPresent(introspector -> introspector.onStripeFooter(stripe, stripeFooter));
+ writeValidation.ifPresent(orcWriteValidation ->
+ orcWriteValidation.validateTimeZone(orcDataSource.getId(), stripeFooter.getTimezone().orElse(defaultTimezone)));
+
+ ZoneId timezone = stripeFooter.getTimezone().orElse(defaultTimezone);
+
// get streams for selected columns
List> allStreams = new ArrayList<>();
allStreams.add(stripeFooter.getStreams());
@@ -227,7 +236,7 @@ public Stripe readStripe(
selectedRowGroups,
columnEncodings);
- return new Stripe(stripe.getNumberOfRows(), columnEncodings, rowGroups, dictionaryStreamSources);
+ return new Stripe(stripe.getNumberOfRows(), columnEncodings, rowGroups, dictionaryStreamSources, timezone);
}
catch (InvalidCheckpointException e) {
// The ORC file contains a corrupt checkpoint stream
@@ -287,7 +296,12 @@ public Stripe readStripe(
}
RowGroup rowGroup = new RowGroup(0, 0, stripe.getNumberOfRows(), totalBytes, new InputStreamSources(builder.build()));
- return new Stripe(stripe.getNumberOfRows(), columnEncodings, ImmutableList.of(rowGroup), dictionaryStreamSources);
+ return new Stripe(
+ stripe.getNumberOfRows(),
+ columnEncodings,
+ ImmutableList.of(rowGroup),
+ dictionaryStreamSources,
+ timezone);
}
private StripeEncryptionGroup getStripeEncryptionGroup(DwrfDataEncryptor decryptor, Slice encryptedGroup, Collection columns, OrcAggregatedMemoryContext systemMemoryUsage)
diff --git a/presto-orc/src/main/java/com/facebook/presto/orc/metadata/DwrfMetadataReader.java b/presto-orc/src/main/java/com/facebook/presto/orc/metadata/DwrfMetadataReader.java
index bf07ef9cc4fff..63116bcdfb787 100644
--- a/presto-orc/src/main/java/com/facebook/presto/orc/metadata/DwrfMetadataReader.java
+++ b/presto-orc/src/main/java/com/facebook/presto/orc/metadata/DwrfMetadataReader.java
@@ -53,6 +53,7 @@
import java.io.IOException;
import java.io.InputStream;
import java.lang.management.ManagementFactory;
+import java.time.ZoneId;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
@@ -61,6 +62,7 @@
import java.util.OptionalInt;
import java.util.OptionalLong;
import java.util.SortedMap;
+import java.util.TimeZone;
import java.util.stream.IntStream;
import static com.facebook.presto.orc.NoopOrcAggregatedMemoryContext.NOOP_ORC_AGGREGATED_MEMORY_CONTEXT;
@@ -78,6 +80,7 @@
import static com.facebook.presto.orc.metadata.PostScript.HiveWriterVersion.ORIGINAL;
import static com.facebook.presto.orc.metadata.statistics.ColumnStatistics.createColumnStatistics;
import static com.google.common.base.Preconditions.checkState;
+import static com.google.common.base.Strings.emptyToNull;
import static com.google.common.collect.ImmutableList.toImmutableList;
import static java.lang.Math.toIntExact;
import static java.util.Objects.requireNonNull;
@@ -331,7 +334,9 @@ public StripeFooter readStripeFooter(OrcDataSourceId orcDataSourceId, List TimeZone.getTimeZone(ZoneId.of(timezone)).toZoneId()));
}
private static Stream toStream(OrcDataSourceId orcDataSourceId, DwrfProto.Stream stream)
@@ -541,7 +546,7 @@ static StringStatistics toStringStatistics(HiveWriterVersion hiveWriterVersion,
Slice minimum = stringStatistics.hasMinimum() ? minStringTruncateToValidRange(byteStringToSlice(stringStatistics.getMinimumBytes()), hiveWriterVersion) : null;
long sum = stringStatistics.hasSum() ? stringStatistics.getSum() : 0;
- return new StringStatistics(minimum, maximum, sum);
+ return new StringStatistics(minimum, maximum, false, false, sum);
}
private static BinaryStatistics toBinaryStatistics(DwrfProto.BinaryStatistics binaryStatistics)
diff --git a/presto-orc/src/main/java/com/facebook/presto/orc/metadata/DwrfMetadataWriter.java b/presto-orc/src/main/java/com/facebook/presto/orc/metadata/DwrfMetadataWriter.java
index af925dc44ff3b..f2249316a9d89 100644
--- a/presto-orc/src/main/java/com/facebook/presto/orc/metadata/DwrfMetadataWriter.java
+++ b/presto-orc/src/main/java/com/facebook/presto/orc/metadata/DwrfMetadataWriter.java
@@ -36,10 +36,12 @@
import java.io.IOException;
import java.io.OutputStream;
+import java.time.ZoneId;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Optional;
+import java.util.TimeZone;
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.collect.ImmutableList.toImmutableList;
@@ -296,6 +298,8 @@ private static UserMetadataItem toUserMetadata(Entry entry)
public int writeStripeFooter(SliceOutput output, StripeFooter footer)
throws IOException
{
+ ZoneId timezone = footer.getTimezone().orElseThrow(() -> new IllegalArgumentException("Timezone not set"));
+
DwrfProto.StripeFooter footerProtobuf = DwrfProto.StripeFooter.newBuilder()
.addAllStreams(footer.getStreams().stream()
.map(DwrfMetadataWriter::toStream)
@@ -304,6 +308,7 @@ public int writeStripeFooter(SliceOutput output, StripeFooter footer)
.addAllEncryptedGroups(footer.getStripeEncryptionGroups().stream()
.map(group -> ByteString.copyFrom(group.getBytes()))
.collect(toImmutableList()))
+ .setWriterTimezone(TimeZone.getTimeZone(timezone).getID())
.build();
return writeProtobufObject(output, footerProtobuf);
diff --git a/presto-orc/src/main/java/com/facebook/presto/orc/metadata/OrcMetadataReader.java b/presto-orc/src/main/java/com/facebook/presto/orc/metadata/OrcMetadataReader.java
index 36ff1bb93650f..1d5a4c92eaed8 100644
--- a/presto-orc/src/main/java/com/facebook/presto/orc/metadata/OrcMetadataReader.java
+++ b/presto-orc/src/main/java/com/facebook/presto/orc/metadata/OrcMetadataReader.java
@@ -50,11 +50,13 @@
import java.io.InputStream;
import java.lang.management.ManagementFactory;
import java.math.BigDecimal;
+import java.time.ZoneId;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.OptionalInt;
import java.util.OptionalLong;
+import java.util.TimeZone;
import java.util.stream.IntStream;
import static com.facebook.airlift.units.DataSize.Unit.GIGABYTE;
@@ -69,6 +71,7 @@
import static com.facebook.presto.orc.metadata.statistics.ColumnStatistics.createColumnStatistics;
import static com.facebook.presto.orc.metadata.statistics.ShortDecimalStatisticsBuilder.SHORT_DECIMAL_VALUE_BYTES;
import static com.google.common.base.Preconditions.checkState;
+import static com.google.common.base.Strings.emptyToNull;
import static com.google.common.collect.ImmutableList.toImmutableList;
import static com.google.common.collect.ImmutableMap.toImmutableMap;
import static io.airlift.slice.SliceUtf8.lengthOfCodePoint;
@@ -196,7 +199,12 @@ public StripeFooter readStripeFooter(OrcDataSourceId orcDataSourceId, List TimeZone.getTimeZone(ZoneId.of(timezone)).toZoneId()));
}
private static Stream toStream(OrcProto.Stream stream)
@@ -339,10 +347,16 @@ static StringStatistics toStringStatistics(HiveWriterVersion hiveWriterVersion,
return null;
}
- Slice maximum = stringStatistics.hasMaximum() ? maxStringTruncateToValidRange(byteStringToSlice(stringStatistics.getMaximumBytes()), hiveWriterVersion) : null;
- Slice minimum = stringStatistics.hasMinimum() ? minStringTruncateToValidRange(byteStringToSlice(stringStatistics.getMinimumBytes()), hiveWriterVersion) : null;
+ Slice maximum = stringStatistics.hasUpperBound() ?
+ maxStringTruncateToValidRange(byteStringToSlice(stringStatistics.getUpperBoundBytes()), hiveWriterVersion) :
+ stringStatistics.hasMaximum() ?
+ maxStringTruncateToValidRange(byteStringToSlice(stringStatistics.getMaximumBytes()), hiveWriterVersion) : null;
+ Slice minimum = stringStatistics.hasLowerBound() ?
+ minStringTruncateToValidRange(byteStringToSlice(stringStatistics.getLowerBoundBytes()), hiveWriterVersion) :
+ stringStatistics.hasMinimum() ?
+ minStringTruncateToValidRange(byteStringToSlice(stringStatistics.getMinimumBytes()), hiveWriterVersion) : null;
long sum = stringStatistics.hasSum() ? stringStatistics.getSum() : 0;
- return new StringStatistics(minimum, maximum, sum);
+ return new StringStatistics(minimum, maximum, stringStatistics.hasLowerBound(), stringStatistics.hasUpperBound(), sum);
}
private static DecimalStatistics toDecimalStatistics(OrcProto.DecimalStatistics decimalStatistics)
diff --git a/presto-orc/src/main/java/com/facebook/presto/orc/metadata/OrcMetadataWriter.java b/presto-orc/src/main/java/com/facebook/presto/orc/metadata/OrcMetadataWriter.java
index 0465f0a2e8992..c8341f00418cc 100644
--- a/presto-orc/src/main/java/com/facebook/presto/orc/metadata/OrcMetadataWriter.java
+++ b/presto-orc/src/main/java/com/facebook/presto/orc/metadata/OrcMetadataWriter.java
@@ -32,10 +32,12 @@
import java.io.IOException;
import java.io.OutputStream;
+import java.time.ZoneId;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Optional;
+import java.util.TimeZone;
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.collect.ImmutableList.toImmutableList;
@@ -294,6 +296,8 @@ private static UserMetadataItem toUserMetadata(Entry entry)
public int writeStripeFooter(SliceOutput output, StripeFooter footer)
throws IOException
{
+ ZoneId timezone = footer.getTimezone().orElseThrow(() -> new IllegalArgumentException("Timezone not set"));
+
OrcProto.StripeFooter footerProtobuf = OrcProto.StripeFooter.newBuilder()
.addAllStreams(footer.getStreams().stream()
.map(OrcMetadataWriter::toStream)
@@ -302,6 +306,7 @@ public int writeStripeFooter(SliceOutput output, StripeFooter footer)
.sorted(Entry.comparingByKey())
.map(entry -> toColumnEncoding(entry.getValue()))
.collect(toList()))
+ .setWriterTimezone(TimeZone.getTimeZone(timezone).getID())
.build();
return writeProtobufObject(output, footerProtobuf);
diff --git a/presto-orc/src/main/java/com/facebook/presto/orc/metadata/StripeFooter.java b/presto-orc/src/main/java/com/facebook/presto/orc/metadata/StripeFooter.java
index c5a1e1fe90df1..1130cea5a1095 100644
--- a/presto-orc/src/main/java/com/facebook/presto/orc/metadata/StripeFooter.java
+++ b/presto-orc/src/main/java/com/facebook/presto/orc/metadata/StripeFooter.java
@@ -17,8 +17,10 @@
import com.google.common.collect.ImmutableMap;
import io.airlift.slice.Slice;
+import java.time.ZoneId;
import java.util.List;
import java.util.Map;
+import java.util.Optional;
import static java.util.Objects.requireNonNull;
@@ -29,12 +31,18 @@ public class StripeFooter
// encrypted StripeEncryptionGroups
private final List stripeEncryptionGroups;
+ private final Optional timezone;
- public StripeFooter(List streams, Map columnEncodings, List stripeEncryptionGroups)
+ public StripeFooter(
+ List streams,
+ Map columnEncodings,
+ List stripeEncryptionGroups,
+ Optional timezone)
{
this.streams = ImmutableList.copyOf(requireNonNull(streams, "streams is null"));
this.columnEncodings = ImmutableMap.copyOf(requireNonNull(columnEncodings, "columnEncodings is null"));
this.stripeEncryptionGroups = ImmutableList.copyOf(requireNonNull(stripeEncryptionGroups, "stripeEncryptionGroups is null"));
+ this.timezone = requireNonNull(timezone, "timezone is null");
}
public Map getColumnEncodings()
@@ -51,4 +59,9 @@ public List getStripeEncryptionGroups()
{
return stripeEncryptionGroups;
}
+
+ public Optional getTimezone()
+ {
+ return timezone;
+ }
}
diff --git a/presto-orc/src/main/java/com/facebook/presto/orc/metadata/statistics/StringStatistics.java b/presto-orc/src/main/java/com/facebook/presto/orc/metadata/statistics/StringStatistics.java
index 616a2579a9450..21bbd3b13af07 100644
--- a/presto-orc/src/main/java/com/facebook/presto/orc/metadata/statistics/StringStatistics.java
+++ b/presto-orc/src/main/java/com/facebook/presto/orc/metadata/statistics/StringStatistics.java
@@ -35,13 +35,19 @@ public class StringStatistics
private final Slice minimum;
@Nullable
private final Slice maximum;
+ @Nullable
+ private final boolean lowerBoundSet;
+ @Nullable
+ private final boolean upperBoundSet;
private final long sum;
- public StringStatistics(@Nullable Slice minimum, @Nullable Slice maximum, long sum)
+ public StringStatistics(@Nullable Slice minimum, @Nullable Slice maximum, boolean lowerBoundSet, boolean upperBoundSet, long sum)
{
checkArgument(minimum == null || maximum == null || minimum.compareTo(maximum) <= 0, "minimum is not less than maximum");
this.minimum = minimum;
this.maximum = maximum;
+ this.lowerBoundSet = lowerBoundSet;
+ this.upperBoundSet = upperBoundSet;
this.sum = sum;
}
@@ -57,6 +63,16 @@ public Slice getMax()
return maximum;
}
+ public boolean isLowerBoundSet()
+ {
+ return lowerBoundSet;
+ }
+
+ public boolean isUpperBoundSet()
+ {
+ return upperBoundSet;
+ }
+
public long getSum()
{
return sum;
@@ -80,13 +96,15 @@ public boolean equals(Object o)
StringStatistics that = (StringStatistics) o;
return Objects.equals(minimum, that.minimum) &&
Objects.equals(maximum, that.maximum) &&
+ Objects.equals(lowerBoundSet, that.lowerBoundSet) &&
+ Objects.equals(upperBoundSet, that.upperBoundSet) &&
Objects.equals(sum, that.sum);
}
@Override
public int hashCode()
{
- return Objects.hash(minimum, maximum, sum);
+ return Objects.hash(minimum, maximum, lowerBoundSet, upperBoundSet, sum);
}
@Override
@@ -95,6 +113,8 @@ public String toString()
return toStringHelper(this)
.add("min", minimum == null ? "" : minimum.toStringUtf8())
.add("max", maximum == null ? "" : maximum.toStringUtf8())
+ .add("lowerBound", lowerBoundSet)
+ .add("upperBound", upperBoundSet)
.add("sum", sum)
.toString();
}
@@ -104,6 +124,8 @@ public void addHash(StatisticsHasher hasher)
{
hasher.putOptionalSlice(minimum)
.putOptionalSlice(maximum)
+ .putBoolean(lowerBoundSet)
+ .putBoolean(upperBoundSet)
.putLong(sum);
}
}
diff --git a/presto-orc/src/main/java/com/facebook/presto/orc/metadata/statistics/StringStatisticsBuilder.java b/presto-orc/src/main/java/com/facebook/presto/orc/metadata/statistics/StringStatisticsBuilder.java
index 83007e640fbda..07acaf630d09e 100644
--- a/presto-orc/src/main/java/com/facebook/presto/orc/metadata/statistics/StringStatisticsBuilder.java
+++ b/presto-orc/src/main/java/com/facebook/presto/orc/metadata/statistics/StringStatisticsBuilder.java
@@ -119,7 +119,7 @@ private Optional buildStringStatistics()
}
minimum = dropStringMinMaxIfNecessary(minimum);
maximum = dropStringMinMaxIfNecessary(maximum);
- return Optional.of(new StringStatistics(minimum, maximum, sum));
+ return Optional.of(new StringStatistics(minimum, maximum, false, false, sum));
}
@Override
diff --git a/presto-orc/src/main/java/com/facebook/presto/orc/reader/AbstractDecimalSelectiveStreamReader.java b/presto-orc/src/main/java/com/facebook/presto/orc/reader/AbstractDecimalSelectiveStreamReader.java
index 1eb18cbfda5fb..9daf341b8b7ea 100644
--- a/presto-orc/src/main/java/com/facebook/presto/orc/reader/AbstractDecimalSelectiveStreamReader.java
+++ b/presto-orc/src/main/java/com/facebook/presto/orc/reader/AbstractDecimalSelectiveStreamReader.java
@@ -30,6 +30,7 @@
import org.openjdk.jol.info.ClassLayout;
import java.io.IOException;
+import java.time.ZoneId;
import java.util.Optional;
import static com.facebook.presto.orc.metadata.Stream.StreamKind.DATA;
@@ -99,7 +100,7 @@ public AbstractDecimalSelectiveStreamReader(
}
@Override
- public void startStripe(Stripe stripe)
+ public void startStripe(ZoneId timezone, Stripe stripe)
{
presentStreamSource = getBooleanMissingStreamSource();
dataStreamSource = getDecimalMissingStreamSource();
diff --git a/presto-orc/src/main/java/com/facebook/presto/orc/reader/BatchStreamReaders.java b/presto-orc/src/main/java/com/facebook/presto/orc/reader/BatchStreamReaders.java
index 047005e1ae690..cf5a820b6cdd3 100644
--- a/presto-orc/src/main/java/com/facebook/presto/orc/reader/BatchStreamReaders.java
+++ b/presto-orc/src/main/java/com/facebook/presto/orc/reader/BatchStreamReaders.java
@@ -18,7 +18,6 @@
import com.facebook.presto.orc.OrcCorruptionException;
import com.facebook.presto.orc.OrcRecordReaderOptions;
import com.facebook.presto.orc.StreamDescriptor;
-import org.joda.time.DateTimeZone;
import static com.facebook.presto.common.type.TimestampType.TIMESTAMP_MICROSECONDS;
@@ -28,7 +27,7 @@ private BatchStreamReaders()
{
}
- public static BatchStreamReader createStreamReader(Type type, StreamDescriptor streamDescriptor, DateTimeZone hiveStorageTimeZone, OrcRecordReaderOptions options, OrcAggregatedMemoryContext systemMemoryContext)
+ public static BatchStreamReader createStreamReader(Type type, StreamDescriptor streamDescriptor, OrcRecordReaderOptions options, OrcAggregatedMemoryContext systemMemoryContext)
throws OrcCorruptionException
{
switch (streamDescriptor.getOrcTypeKind()) {
@@ -53,13 +52,13 @@ public static BatchStreamReader createStreamReader(Type type, StreamDescriptor s
case TIMESTAMP:
case TIMESTAMP_MICROSECONDS:
boolean enableMicroPrecision = type == TIMESTAMP_MICROSECONDS;
- return new TimestampBatchStreamReader(type, streamDescriptor, hiveStorageTimeZone, enableMicroPrecision);
+ return new TimestampBatchStreamReader(type, streamDescriptor, enableMicroPrecision);
case LIST:
- return new ListBatchStreamReader(type, streamDescriptor, hiveStorageTimeZone, options, systemMemoryContext);
+ return new ListBatchStreamReader(type, streamDescriptor, options, systemMemoryContext);
case STRUCT:
- return new StructBatchStreamReader(type, streamDescriptor, hiveStorageTimeZone, options, systemMemoryContext);
+ return new StructBatchStreamReader(type, streamDescriptor, options, systemMemoryContext);
case MAP:
- return new MapBatchStreamReader(type, streamDescriptor, hiveStorageTimeZone, options, systemMemoryContext);
+ return new MapBatchStreamReader(type, streamDescriptor, options, systemMemoryContext);
case DECIMAL:
return new DecimalBatchStreamReader(type, streamDescriptor);
case UNION:
diff --git a/presto-orc/src/main/java/com/facebook/presto/orc/reader/BooleanBatchStreamReader.java b/presto-orc/src/main/java/com/facebook/presto/orc/reader/BooleanBatchStreamReader.java
index 6b3e0f730b179..bed552c7a98ce 100644
--- a/presto-orc/src/main/java/com/facebook/presto/orc/reader/BooleanBatchStreamReader.java
+++ b/presto-orc/src/main/java/com/facebook/presto/orc/reader/BooleanBatchStreamReader.java
@@ -29,6 +29,7 @@
import org.openjdk.jol.info.ClassLayout;
import java.io.IOException;
+import java.time.ZoneId;
import java.util.Optional;
import static com.facebook.presto.common.type.BooleanType.BOOLEAN;
@@ -177,7 +178,7 @@ private void openRowGroup()
}
@Override
- public void startStripe(Stripe stripe)
+ public void startStripe(ZoneId timezone, Stripe stripe)
{
presentStreamSource = getBooleanMissingStreamSource();
dataStreamSource = getBooleanMissingStreamSource();
diff --git a/presto-orc/src/main/java/com/facebook/presto/orc/reader/BooleanSelectiveStreamReader.java b/presto-orc/src/main/java/com/facebook/presto/orc/reader/BooleanSelectiveStreamReader.java
index 21688f5db2b09..0724795ba689f 100644
--- a/presto-orc/src/main/java/com/facebook/presto/orc/reader/BooleanSelectiveStreamReader.java
+++ b/presto-orc/src/main/java/com/facebook/presto/orc/reader/BooleanSelectiveStreamReader.java
@@ -29,6 +29,7 @@
import org.openjdk.jol.info.ClassLayout;
import java.io.IOException;
+import java.time.ZoneId;
import java.util.Optional;
import static com.facebook.presto.common.array.Arrays.ensureCapacity;
@@ -96,7 +97,7 @@ public BooleanSelectiveStreamReader(
}
@Override
- public void startStripe(Stripe stripe)
+ public void startStripe(ZoneId timezone, Stripe stripe)
{
presentStreamSource = getBooleanMissingStreamSource();
dataStreamSource = getBooleanMissingStreamSource();
diff --git a/presto-orc/src/main/java/com/facebook/presto/orc/reader/ByteBatchStreamReader.java b/presto-orc/src/main/java/com/facebook/presto/orc/reader/ByteBatchStreamReader.java
index db74427ea2a94..c8bb7675ccafc 100644
--- a/presto-orc/src/main/java/com/facebook/presto/orc/reader/ByteBatchStreamReader.java
+++ b/presto-orc/src/main/java/com/facebook/presto/orc/reader/ByteBatchStreamReader.java
@@ -30,6 +30,7 @@
import org.openjdk.jol.info.ClassLayout;
import java.io.IOException;
+import java.time.ZoneId;
import java.util.Optional;
import static com.facebook.presto.common.type.TinyintType.TINYINT;
@@ -172,7 +173,7 @@ private void openRowGroup()
}
@Override
- public void startStripe(Stripe stripe)
+ public void startStripe(ZoneId timezone, Stripe stripe)
{
presentStreamSource = getBooleanMissingStreamSource();
dataStreamSource = getByteMissingStreamSource();
diff --git a/presto-orc/src/main/java/com/facebook/presto/orc/reader/ByteSelectiveStreamReader.java b/presto-orc/src/main/java/com/facebook/presto/orc/reader/ByteSelectiveStreamReader.java
index 47b8c8e621d2a..f0517e7bb032b 100644
--- a/presto-orc/src/main/java/com/facebook/presto/orc/reader/ByteSelectiveStreamReader.java
+++ b/presto-orc/src/main/java/com/facebook/presto/orc/reader/ByteSelectiveStreamReader.java
@@ -30,6 +30,7 @@
import org.openjdk.jol.info.ClassLayout;
import java.io.IOException;
+import java.time.ZoneId;
import java.util.Optional;
import static com.facebook.presto.common.array.Arrays.ensureCapacity;
@@ -96,7 +97,7 @@ public ByteSelectiveStreamReader(
}
@Override
- public void startStripe(Stripe stripe)
+ public void startStripe(ZoneId timezone, Stripe stripe)
{
presentStreamSource = getBooleanMissingStreamSource();
dataStreamSource = getByteMissingStreamSource();
diff --git a/presto-orc/src/main/java/com/facebook/presto/orc/reader/DecimalBatchStreamReader.java b/presto-orc/src/main/java/com/facebook/presto/orc/reader/DecimalBatchStreamReader.java
index a6a68e67cd227..00ee34e33e5ab 100644
--- a/presto-orc/src/main/java/com/facebook/presto/orc/reader/DecimalBatchStreamReader.java
+++ b/presto-orc/src/main/java/com/facebook/presto/orc/reader/DecimalBatchStreamReader.java
@@ -33,6 +33,7 @@
import org.openjdk.jol.info.ClassLayout;
import java.io.IOException;
+import java.time.ZoneId;
import static com.facebook.presto.common.type.UnscaledDecimal128Arithmetic.rescale;
import static com.facebook.presto.orc.metadata.Stream.StreamKind.DATA;
@@ -196,7 +197,7 @@ private void seekToOffset()
}
@Override
- public void startStripe(Stripe stripe)
+ public void startStripe(ZoneId timezone, Stripe stripe)
{
presentStreamSource = getBooleanMissingStreamSource();
decimalStreamSource = getDecimalMissingStreamSource();
diff --git a/presto-orc/src/main/java/com/facebook/presto/orc/reader/DoubleBatchStreamReader.java b/presto-orc/src/main/java/com/facebook/presto/orc/reader/DoubleBatchStreamReader.java
index c9506616e212e..b16302e93944c 100644
--- a/presto-orc/src/main/java/com/facebook/presto/orc/reader/DoubleBatchStreamReader.java
+++ b/presto-orc/src/main/java/com/facebook/presto/orc/reader/DoubleBatchStreamReader.java
@@ -29,6 +29,7 @@
import org.openjdk.jol.info.ClassLayout;
import java.io.IOException;
+import java.time.ZoneId;
import static com.facebook.presto.common.type.DoubleType.DOUBLE;
import static com.facebook.presto.orc.metadata.Stream.StreamKind.DATA;
@@ -138,7 +139,7 @@ private void openRowGroup()
}
@Override
- public void startStripe(Stripe stripe)
+ public void startStripe(ZoneId timezone, Stripe stripe)
{
presentStreamSource = getBooleanMissingStreamSource();
dataStreamSource = getDoubleMissingStreamSource();
diff --git a/presto-orc/src/main/java/com/facebook/presto/orc/reader/DoubleSelectiveStreamReader.java b/presto-orc/src/main/java/com/facebook/presto/orc/reader/DoubleSelectiveStreamReader.java
index 96f8f6cfd8a10..8f157915ce155 100644
--- a/presto-orc/src/main/java/com/facebook/presto/orc/reader/DoubleSelectiveStreamReader.java
+++ b/presto-orc/src/main/java/com/facebook/presto/orc/reader/DoubleSelectiveStreamReader.java
@@ -30,6 +30,7 @@
import org.openjdk.jol.info.ClassLayout;
import java.io.IOException;
+import java.time.ZoneId;
import java.util.Optional;
import static com.facebook.presto.common.array.Arrays.ensureCapacity;
@@ -99,7 +100,7 @@ public DoubleSelectiveStreamReader(
}
@Override
- public void startStripe(Stripe stripe)
+ public void startStripe(ZoneId timezone, Stripe stripe)
{
presentStreamSource = getBooleanMissingStreamSource();
dataStreamSource = getDoubleMissingStreamSource();
diff --git a/presto-orc/src/main/java/com/facebook/presto/orc/reader/FloatBatchStreamReader.java b/presto-orc/src/main/java/com/facebook/presto/orc/reader/FloatBatchStreamReader.java
index 85bfc7849a593..7eac5ccf16b4e 100644
--- a/presto-orc/src/main/java/com/facebook/presto/orc/reader/FloatBatchStreamReader.java
+++ b/presto-orc/src/main/java/com/facebook/presto/orc/reader/FloatBatchStreamReader.java
@@ -29,6 +29,7 @@
import org.openjdk.jol.info.ClassLayout;
import java.io.IOException;
+import java.time.ZoneId;
import static com.facebook.presto.common.type.RealType.REAL;
import static com.facebook.presto.orc.metadata.Stream.StreamKind.DATA;
@@ -139,7 +140,7 @@ private void openRowGroup()
}
@Override
- public void startStripe(Stripe stripe)
+ public void startStripe(ZoneId timezone, Stripe stripe)
{
presentStreamSource = getBooleanMissingStreamSource();
dataStreamSource = getFloatMissingStreamSource();
diff --git a/presto-orc/src/main/java/com/facebook/presto/orc/reader/FloatSelectiveStreamReader.java b/presto-orc/src/main/java/com/facebook/presto/orc/reader/FloatSelectiveStreamReader.java
index fb2db102e61d0..50a13d96b1bf0 100644
--- a/presto-orc/src/main/java/com/facebook/presto/orc/reader/FloatSelectiveStreamReader.java
+++ b/presto-orc/src/main/java/com/facebook/presto/orc/reader/FloatSelectiveStreamReader.java
@@ -29,6 +29,7 @@
import org.openjdk.jol.info.ClassLayout;
import java.io.IOException;
+import java.time.ZoneId;
import java.util.Optional;
import static com.facebook.presto.common.array.Arrays.ensureCapacity;
@@ -89,7 +90,7 @@ public FloatSelectiveStreamReader(
}
@Override
- public void startStripe(Stripe stripe)
+ public void startStripe(ZoneId timezone, Stripe stripe)
{
presentStreamSource = getBooleanMissingStreamSource();
dataStreamSource = getFloatMissingStreamSource();
diff --git a/presto-orc/src/main/java/com/facebook/presto/orc/reader/ListBatchStreamReader.java b/presto-orc/src/main/java/com/facebook/presto/orc/reader/ListBatchStreamReader.java
index 6b0ed8beaf477..3b5314505acf9 100644
--- a/presto-orc/src/main/java/com/facebook/presto/orc/reader/ListBatchStreamReader.java
+++ b/presto-orc/src/main/java/com/facebook/presto/orc/reader/ListBatchStreamReader.java
@@ -28,11 +28,11 @@
import com.facebook.presto.orc.stream.LongInputStream;
import com.google.common.io.Closer;
import jakarta.annotation.Nullable;
-import org.joda.time.DateTimeZone;
import org.openjdk.jol.info.ClassLayout;
import java.io.IOException;
import java.io.UncheckedIOException;
+import java.time.ZoneId;
import java.util.Optional;
import static com.facebook.presto.orc.metadata.Stream.StreamKind.LENGTH;
@@ -70,14 +70,14 @@ public class ListBatchStreamReader
private boolean rowGroupOpen;
- public ListBatchStreamReader(Type type, StreamDescriptor streamDescriptor, DateTimeZone hiveStorageTimeZone, OrcRecordReaderOptions options, OrcAggregatedMemoryContext systemMemoryContext)
+ public ListBatchStreamReader(Type type, StreamDescriptor streamDescriptor, OrcRecordReaderOptions options, OrcAggregatedMemoryContext systemMemoryContext)
throws OrcCorruptionException
{
requireNonNull(type, "type is null");
verifyStreamType(streamDescriptor, type, ArrayType.class::isInstance);
elementType = ((ArrayType) type).getElementType();
this.streamDescriptor = requireNonNull(streamDescriptor, "stream is null");
- this.elementStreamReader = createStreamReader(elementType, streamDescriptor.getNestedStreams().get(0), hiveStorageTimeZone, options, systemMemoryContext);
+ this.elementStreamReader = createStreamReader(elementType, streamDescriptor.getNestedStreams().get(0), options, systemMemoryContext);
}
@Override
@@ -162,7 +162,7 @@ private void openRowGroup()
}
@Override
- public void startStripe(Stripe stripe)
+ public void startStripe(ZoneId timezone, Stripe stripe)
throws IOException
{
presentStreamSource = getBooleanMissingStreamSource();
@@ -176,7 +176,7 @@ public void startStripe(Stripe stripe)
rowGroupOpen = false;
- elementStreamReader.startStripe(stripe);
+ elementStreamReader.startStripe(timezone, stripe);
}
@Override
diff --git a/presto-orc/src/main/java/com/facebook/presto/orc/reader/ListSelectiveStreamReader.java b/presto-orc/src/main/java/com/facebook/presto/orc/reader/ListSelectiveStreamReader.java
index fc88391d5d1b5..19d55a4089956 100644
--- a/presto-orc/src/main/java/com/facebook/presto/orc/reader/ListSelectiveStreamReader.java
+++ b/presto-orc/src/main/java/com/facebook/presto/orc/reader/ListSelectiveStreamReader.java
@@ -36,10 +36,10 @@
import com.google.common.collect.Iterables;
import com.google.common.collect.Maps;
import jakarta.annotation.Nullable;
-import org.joda.time.DateTimeZone;
import org.openjdk.jol.info.ClassLayout;
import java.io.IOException;
+import java.time.ZoneId;
import java.util.List;
import java.util.Map;
import java.util.Optional;
@@ -119,7 +119,6 @@ public ListSelectiveStreamReader(
ListFilter listFilter,
int subfieldLevel, // 0 - top level
Optional outputType,
- DateTimeZone hiveStorageTimeZone,
OrcRecordReaderOptions options,
OrcAggregatedMemoryContext systemMemoryContext,
boolean isLowMemory)
@@ -207,7 +206,6 @@ else if (!filters.isEmpty()) {
Optional.ofNullable(this.listFilter),
elementOutputType,
elementSubfields,
- hiveStorageTimeZone,
options,
systemMemoryContext,
isLowMemory);
@@ -706,7 +704,7 @@ public void close()
}
@Override
- public void startStripe(Stripe stripe)
+ public void startStripe(ZoneId timezone, Stripe stripe)
throws IOException
{
presentStreamSource = getBooleanMissingStreamSource();
@@ -721,7 +719,7 @@ public void startStripe(Stripe stripe)
rowGroupOpen = false;
if (elementStreamReader != null) {
- elementStreamReader.startStripe(stripe);
+ elementStreamReader.startStripe(timezone, stripe);
}
}
diff --git a/presto-orc/src/main/java/com/facebook/presto/orc/reader/LongBatchStreamReader.java b/presto-orc/src/main/java/com/facebook/presto/orc/reader/LongBatchStreamReader.java
index f6bce66652780..795f936666632 100644
--- a/presto-orc/src/main/java/com/facebook/presto/orc/reader/LongBatchStreamReader.java
+++ b/presto-orc/src/main/java/com/facebook/presto/orc/reader/LongBatchStreamReader.java
@@ -26,6 +26,7 @@
import java.io.IOException;
import java.io.UncheckedIOException;
+import java.time.ZoneId;
import static com.facebook.presto.orc.metadata.ColumnEncoding.ColumnEncodingKind.DICTIONARY;
import static com.facebook.presto.orc.metadata.ColumnEncoding.ColumnEncodingKind.DIRECT;
@@ -68,7 +69,7 @@ public Block readBlock()
}
@Override
- public void startStripe(Stripe stripe)
+ public void startStripe(ZoneId timezone, Stripe stripe)
throws IOException
{
ColumnEncodingKind kind = stripe.getColumnEncodings().get(streamDescriptor.getStreamId())
@@ -77,14 +78,14 @@ public void startStripe(Stripe stripe)
if (kind == DIRECT || kind == DIRECT_V2 || kind == DWRF_DIRECT) {
currentReader = directReader;
if (dictionaryReader != null && resetAllReaders) {
- dictionaryReader.startStripe(stripe);
+ dictionaryReader.startStripe(timezone, stripe);
System.setProperty("RESET_LONG_BATCH_READER", "RESET_LONG_BATCH_READER");
}
}
else if (kind == DICTIONARY) {
currentReader = dictionaryReader;
if (directReader != null && resetAllReaders) {
- directReader.startStripe(stripe);
+ directReader.startStripe(timezone, stripe);
System.setProperty("RESET_LONG_BATCH_READER", "RESET_LONG_BATCH_READER");
}
}
@@ -92,7 +93,7 @@ else if (kind == DICTIONARY) {
throw new IllegalArgumentException("Unsupported encoding " + kind);
}
- currentReader.startStripe(stripe);
+ currentReader.startStripe(timezone, stripe);
}
@Override
diff --git a/presto-orc/src/main/java/com/facebook/presto/orc/reader/LongDictionaryBatchStreamReader.java b/presto-orc/src/main/java/com/facebook/presto/orc/reader/LongDictionaryBatchStreamReader.java
index 4405c095d129d..9494eaea4d4de 100644
--- a/presto-orc/src/main/java/com/facebook/presto/orc/reader/LongDictionaryBatchStreamReader.java
+++ b/presto-orc/src/main/java/com/facebook/presto/orc/reader/LongDictionaryBatchStreamReader.java
@@ -34,6 +34,7 @@
import org.openjdk.jol.info.ClassLayout;
import java.io.IOException;
+import java.time.ZoneId;
import static com.facebook.presto.orc.metadata.Stream.StreamKind.DATA;
import static com.facebook.presto.orc.metadata.Stream.StreamKind.IN_DICTIONARY;
@@ -202,7 +203,7 @@ private void openRowGroup()
}
@Override
- public void startStripe(Stripe stripe)
+ public void startStripe(ZoneId timezone, Stripe stripe)
{
dictionaryProvider = stripe.getLongDictionaryProvider();
dictionarySize = stripe.getColumnEncodings().get(streamDescriptor.getStreamId())
diff --git a/presto-orc/src/main/java/com/facebook/presto/orc/reader/LongDictionarySelectiveStreamReader.java b/presto-orc/src/main/java/com/facebook/presto/orc/reader/LongDictionarySelectiveStreamReader.java
index 0427659cdb7a7..dfe294e715a3f 100644
--- a/presto-orc/src/main/java/com/facebook/presto/orc/reader/LongDictionarySelectiveStreamReader.java
+++ b/presto-orc/src/main/java/com/facebook/presto/orc/reader/LongDictionarySelectiveStreamReader.java
@@ -27,6 +27,7 @@
import org.openjdk.jol.info.ClassLayout;
import java.io.IOException;
+import java.time.ZoneId;
import java.util.Arrays;
import static com.facebook.presto.common.array.Arrays.ensureCapacity;
@@ -301,7 +302,7 @@ public BlockLease getBlockView(int[] positions, int positionCount)
}
@Override
- public void startStripe(Stripe stripe)
+ public void startStripe(ZoneId timezone, Stripe stripe)
{
dictionaryProvider = stripe.getLongDictionaryProvider();
dictionarySize = stripe.getColumnEncodings().get(context.getStreamDescriptor().getStreamId())
diff --git a/presto-orc/src/main/java/com/facebook/presto/orc/reader/LongDirectBatchStreamReader.java b/presto-orc/src/main/java/com/facebook/presto/orc/reader/LongDirectBatchStreamReader.java
index a5194b7049264..27ab228544954 100644
--- a/presto-orc/src/main/java/com/facebook/presto/orc/reader/LongDirectBatchStreamReader.java
+++ b/presto-orc/src/main/java/com/facebook/presto/orc/reader/LongDirectBatchStreamReader.java
@@ -36,6 +36,7 @@
import org.openjdk.jol.info.ClassLayout;
import java.io.IOException;
+import java.time.ZoneId;
import java.util.Optional;
import java.util.function.LongFunction;
@@ -273,7 +274,7 @@ private void openRowGroup()
}
@Override
- public void startStripe(Stripe stripe)
+ public void startStripe(ZoneId timezone, Stripe stripe)
{
presentStreamSource = getBooleanMissingStreamSource();
dataStreamSource = getLongMissingStreamSource();
diff --git a/presto-orc/src/main/java/com/facebook/presto/orc/reader/LongDirectSelectiveStreamReader.java b/presto-orc/src/main/java/com/facebook/presto/orc/reader/LongDirectSelectiveStreamReader.java
index 316536b488206..b5e604155f6d5 100644
--- a/presto-orc/src/main/java/com/facebook/presto/orc/reader/LongDirectSelectiveStreamReader.java
+++ b/presto-orc/src/main/java/com/facebook/presto/orc/reader/LongDirectSelectiveStreamReader.java
@@ -27,6 +27,7 @@
import org.openjdk.jol.info.ClassLayout;
import java.io.IOException;
+import java.time.ZoneId;
import static com.facebook.presto.common.block.ClosingBlockLease.newLease;
import static com.facebook.presto.orc.metadata.Stream.StreamKind.DATA;
@@ -319,7 +320,7 @@ public BlockLease getBlockView(int[] positions, int positionCount)
}
@Override
- public void startStripe(Stripe stripe)
+ public void startStripe(ZoneId timezone, Stripe stripe)
{
presentStreamSource = getBooleanMissingStreamSource();
dataStreamSource = getLongMissingStreamSource();
diff --git a/presto-orc/src/main/java/com/facebook/presto/orc/reader/LongSelectiveStreamReader.java b/presto-orc/src/main/java/com/facebook/presto/orc/reader/LongSelectiveStreamReader.java
index 2d7953ef4ac0c..ce6c502e631a1 100644
--- a/presto-orc/src/main/java/com/facebook/presto/orc/reader/LongSelectiveStreamReader.java
+++ b/presto-orc/src/main/java/com/facebook/presto/orc/reader/LongSelectiveStreamReader.java
@@ -28,6 +28,7 @@
import java.io.IOException;
import java.io.UncheckedIOException;
+import java.time.ZoneId;
import java.util.Optional;
import static com.google.common.base.MoreObjects.toStringHelper;
@@ -58,7 +59,7 @@ public LongSelectiveStreamReader(
}
@Override
- public void startStripe(Stripe stripe)
+ public void startStripe(ZoneId timezone, Stripe stripe)
throws IOException
{
StreamDescriptor streamDescriptor = context.getStreamDescriptor();
@@ -92,7 +93,7 @@ public void startStripe(Stripe stripe)
throw new IllegalArgumentException("Unsupported encoding " + kind);
}
- currentReader.startStripe(stripe);
+ currentReader.startStripe(timezone, stripe);
}
@Override
diff --git a/presto-orc/src/main/java/com/facebook/presto/orc/reader/MapBatchStreamReader.java b/presto-orc/src/main/java/com/facebook/presto/orc/reader/MapBatchStreamReader.java
index 96c6e204c6be4..dca46901b86d7 100644
--- a/presto-orc/src/main/java/com/facebook/presto/orc/reader/MapBatchStreamReader.java
+++ b/presto-orc/src/main/java/com/facebook/presto/orc/reader/MapBatchStreamReader.java
@@ -23,11 +23,11 @@
import com.facebook.presto.orc.metadata.ColumnEncoding.ColumnEncodingKind;
import com.facebook.presto.orc.stream.InputStreamSources;
import com.google.common.io.Closer;
-import org.joda.time.DateTimeZone;
import org.openjdk.jol.info.ClassLayout;
import java.io.IOException;
import java.io.UncheckedIOException;
+import java.time.ZoneId;
import static com.facebook.presto.orc.metadata.ColumnEncoding.ColumnEncodingKind.DIRECT;
import static com.facebook.presto.orc.metadata.ColumnEncoding.ColumnEncodingKind.DIRECT_V2;
@@ -46,12 +46,12 @@ public class MapBatchStreamReader
private final MapFlatBatchStreamReader flatReader;
private BatchStreamReader currentReader;
- public MapBatchStreamReader(Type type, StreamDescriptor streamDescriptor, DateTimeZone hiveStorageTimeZone, OrcRecordReaderOptions options, OrcAggregatedMemoryContext systemMemoryContext)
+ public MapBatchStreamReader(Type type, StreamDescriptor streamDescriptor, OrcRecordReaderOptions options, OrcAggregatedMemoryContext systemMemoryContext)
throws OrcCorruptionException
{
this.streamDescriptor = requireNonNull(streamDescriptor, "stream is null");
- this.directReader = new MapDirectBatchStreamReader(type, streamDescriptor, hiveStorageTimeZone, options, systemMemoryContext);
- this.flatReader = new MapFlatBatchStreamReader(type, streamDescriptor, hiveStorageTimeZone, options, systemMemoryContext);
+ this.directReader = new MapDirectBatchStreamReader(type, streamDescriptor, options, systemMemoryContext);
+ this.flatReader = new MapFlatBatchStreamReader(type, streamDescriptor, options, systemMemoryContext);
}
@Override
@@ -68,7 +68,7 @@ public Block readBlock()
}
@Override
- public void startStripe(Stripe stripe)
+ public void startStripe(ZoneId timezone, Stripe stripe)
throws IOException
{
ColumnEncodingKind kind = stripe.getColumnEncodings().get(streamDescriptor.getStreamId())
@@ -84,7 +84,7 @@ else if (kind == DWRF_MAP_FLAT) {
throw new IllegalArgumentException("Unsupported encoding " + kind);
}
- currentReader.startStripe(stripe);
+ currentReader.startStripe(timezone, stripe);
}
@Override
diff --git a/presto-orc/src/main/java/com/facebook/presto/orc/reader/MapDirectBatchStreamReader.java b/presto-orc/src/main/java/com/facebook/presto/orc/reader/MapDirectBatchStreamReader.java
index 196e5013f2176..8110400cf4b64 100644
--- a/presto-orc/src/main/java/com/facebook/presto/orc/reader/MapDirectBatchStreamReader.java
+++ b/presto-orc/src/main/java/com/facebook/presto/orc/reader/MapDirectBatchStreamReader.java
@@ -28,11 +28,11 @@
import com.google.common.io.Closer;
import it.unimi.dsi.fastutil.ints.IntArrayList;
import jakarta.annotation.Nullable;
-import org.joda.time.DateTimeZone;
import org.openjdk.jol.info.ClassLayout;
import java.io.IOException;
import java.io.UncheckedIOException;
+import java.time.ZoneId;
import java.util.Optional;
import static com.facebook.presto.orc.metadata.Stream.StreamKind.LENGTH;
@@ -75,7 +75,6 @@ public class MapDirectBatchStreamReader
public MapDirectBatchStreamReader(
Type type,
StreamDescriptor streamDescriptor,
- DateTimeZone hiveStorageTimeZone,
OrcRecordReaderOptions options,
OrcAggregatedMemoryContext systemMemoryContext)
throws OrcCorruptionException
@@ -85,8 +84,8 @@ public MapDirectBatchStreamReader(
verifyStreamType(streamDescriptor, type, MapType.class::isInstance);
this.type = (MapType) type;
this.streamDescriptor = requireNonNull(streamDescriptor, "stream is null");
- this.keyStreamReader = createStreamReader(this.type.getKeyType(), streamDescriptor.getNestedStreams().get(0), hiveStorageTimeZone, options, systemMemoryContext);
- this.valueStreamReader = createStreamReader(this.type.getValueType(), streamDescriptor.getNestedStreams().get(1), hiveStorageTimeZone, options, systemMemoryContext);
+ this.keyStreamReader = createStreamReader(this.type.getKeyType(), streamDescriptor.getNestedStreams().get(0), options, systemMemoryContext);
+ this.valueStreamReader = createStreamReader(this.type.getValueType(), streamDescriptor.getNestedStreams().get(1), options, systemMemoryContext);
}
@Override
@@ -230,7 +229,7 @@ private void openRowGroup()
}
@Override
- public void startStripe(Stripe stripe)
+ public void startStripe(ZoneId timezone, Stripe stripe)
throws IOException
{
presentStreamSource = getBooleanMissingStreamSource();
@@ -244,8 +243,8 @@ public void startStripe(Stripe stripe)
rowGroupOpen = false;
- keyStreamReader.startStripe(stripe);
- valueStreamReader.startStripe(stripe);
+ keyStreamReader.startStripe(timezone, stripe);
+ valueStreamReader.startStripe(timezone, stripe);
}
@Override
diff --git a/presto-orc/src/main/java/com/facebook/presto/orc/reader/MapDirectSelectiveStreamReader.java b/presto-orc/src/main/java/com/facebook/presto/orc/reader/MapDirectSelectiveStreamReader.java
index 101a3aeb7eabe..670a5a5690097 100644
--- a/presto-orc/src/main/java/com/facebook/presto/orc/reader/MapDirectSelectiveStreamReader.java
+++ b/presto-orc/src/main/java/com/facebook/presto/orc/reader/MapDirectSelectiveStreamReader.java
@@ -39,10 +39,10 @@
import com.google.common.collect.Iterables;
import com.google.common.collect.Maps;
import jakarta.annotation.Nullable;
-import org.joda.time.DateTimeZone;
import org.openjdk.jol.info.ClassLayout;
import java.io.IOException;
+import java.time.ZoneId;
import java.util.List;
import java.util.Map;
import java.util.Optional;
@@ -112,7 +112,6 @@ public MapDirectSelectiveStreamReader(
Map filters,
List requiredSubfields,
Optional outputType,
- DateTimeZone hiveStorageTimeZone,
OrcRecordReaderOptions options,
OrcAggregatedMemoryContext systemMemoryContext,
boolean isLowMemory)
@@ -146,8 +145,8 @@ public MapDirectSelectiveStreamReader(
.collect(toImmutableList());
}
- this.keyReader = SelectiveStreamReaders.createStreamReader(nestedStreams.get(0), keyFilter, keyOutputType, ImmutableList.of(), hiveStorageTimeZone, options, systemMemoryContext.newOrcAggregatedMemoryContext(), isLowMemory);
- this.valueReader = SelectiveStreamReaders.createStreamReader(nestedStreams.get(1), ImmutableMap.of(), valueOutputType, elementRequiredSubfields, hiveStorageTimeZone, options, systemMemoryContext.newOrcAggregatedMemoryContext(), isLowMemory);
+ this.keyReader = SelectiveStreamReaders.createStreamReader(nestedStreams.get(0), keyFilter, keyOutputType, ImmutableList.of(), options, systemMemoryContext.newOrcAggregatedMemoryContext(), isLowMemory);
+ this.valueReader = SelectiveStreamReaders.createStreamReader(nestedStreams.get(1), ImmutableMap.of(), valueOutputType, elementRequiredSubfields, options, systemMemoryContext.newOrcAggregatedMemoryContext(), isLowMemory);
}
else {
this.keyReader = null;
@@ -685,7 +684,7 @@ public void close()
}
@Override
- public void startStripe(Stripe stripe)
+ public void startStripe(ZoneId timezone, Stripe stripe)
throws IOException
{
presentStreamSource = getBooleanMissingStreamSource();
@@ -700,8 +699,8 @@ public void startStripe(Stripe stripe)
rowGroupOpen = false;
if (outputRequired) {
- keyReader.startStripe(stripe);
- valueReader.startStripe(stripe);
+ keyReader.startStripe(timezone, stripe);
+ valueReader.startStripe(timezone, stripe);
}
}
diff --git a/presto-orc/src/main/java/com/facebook/presto/orc/reader/MapFlatBatchStreamReader.java b/presto-orc/src/main/java/com/facebook/presto/orc/reader/MapFlatBatchStreamReader.java
index e467ec21fd568..3b637e27f474f 100644
--- a/presto-orc/src/main/java/com/facebook/presto/orc/reader/MapFlatBatchStreamReader.java
+++ b/presto-orc/src/main/java/com/facebook/presto/orc/reader/MapFlatBatchStreamReader.java
@@ -38,11 +38,11 @@
import io.airlift.slice.Slice;
import io.airlift.slice.Slices;
import jakarta.annotation.Nullable;
-import org.joda.time.DateTimeZone;
import org.openjdk.jol.info.ClassLayout;
import java.io.IOException;
import java.io.UncheckedIOException;
+import java.time.ZoneId;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
@@ -75,7 +75,6 @@ public class MapFlatBatchStreamReader
private final MapType type;
private final StreamDescriptor streamDescriptor;
- private final DateTimeZone hiveStorageTimeZone;
// This is the StreamDescriptor for the value stream with sequence ID 0, it is used to derive StreamDescriptors for the
// value streams with other sequence IDs
@@ -100,14 +99,13 @@ public class MapFlatBatchStreamReader
private OrcAggregatedMemoryContext systemMemoryContext;
private final OrcRecordReaderOptions options;
- public MapFlatBatchStreamReader(Type type, StreamDescriptor streamDescriptor, DateTimeZone hiveStorageTimeZone, OrcRecordReaderOptions options, OrcAggregatedMemoryContext systemMemoryContext)
+ public MapFlatBatchStreamReader(Type type, StreamDescriptor streamDescriptor, OrcRecordReaderOptions options, OrcAggregatedMemoryContext systemMemoryContext)
throws OrcCorruptionException
{
requireNonNull(type, "type is null");
verifyStreamType(streamDescriptor, type, MapType.class::isInstance);
this.type = (MapType) type;
this.streamDescriptor = requireNonNull(streamDescriptor, "stream is null");
- this.hiveStorageTimeZone = requireNonNull(hiveStorageTimeZone, "hiveStorageTimeZone is null");
this.systemMemoryContext = requireNonNull(systemMemoryContext, "systemMemoryContext is null");
this.keyOrcType = streamDescriptor.getNestedStreams().get(0).getOrcTypeKind();
this.baseValueStreamDescriptor = streamDescriptor.getNestedStreams().get(1);
@@ -235,7 +233,7 @@ private void openRowGroup()
}
@Override
- public void startStripe(Stripe stripe)
+ public void startStripe(ZoneId timezone, Stripe stripe)
throws IOException
{
presentStreamSource = getBooleanMissingStreamSource();
@@ -258,8 +256,8 @@ public void startStripe(Stripe stripe)
StreamDescriptor valueStreamDescriptor = baseValueStreamDescriptor.duplicate(sequence);
valueStreamDescriptors.add(valueStreamDescriptor);
- BatchStreamReader valueStreamReader = BatchStreamReaders.createStreamReader(type.getValueType(), valueStreamDescriptor, hiveStorageTimeZone, options, systemMemoryContext);
- valueStreamReader.startStripe(stripe);
+ BatchStreamReader valueStreamReader = BatchStreamReaders.createStreamReader(type.getValueType(), valueStreamDescriptor, options, systemMemoryContext);
+ valueStreamReader.startStripe(timezone, stripe);
valueStreamReaders.add(valueStreamReader);
}
diff --git a/presto-orc/src/main/java/com/facebook/presto/orc/reader/MapFlatSelectiveStreamReader.java b/presto-orc/src/main/java/com/facebook/presto/orc/reader/MapFlatSelectiveStreamReader.java
index ab2719e5ae555..15c5976819b99 100644
--- a/presto-orc/src/main/java/com/facebook/presto/orc/reader/MapFlatSelectiveStreamReader.java
+++ b/presto-orc/src/main/java/com/facebook/presto/orc/reader/MapFlatSelectiveStreamReader.java
@@ -48,10 +48,10 @@
import io.airlift.slice.Slice;
import io.airlift.slice.Slices;
import jakarta.annotation.Nullable;
-import org.joda.time.DateTimeZone;
import org.openjdk.jol.info.ClassLayout;
import java.io.IOException;
+import java.time.ZoneId;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
@@ -87,7 +87,6 @@ public class MapFlatSelectiveStreamReader
// value streams with other sequence IDs
private final StreamDescriptor baseValueStreamDescriptor;
private final OrcTypeKind keyOrcTypeKind;
- private final DateTimeZone hiveStorageTimeZone;
private final boolean nullsAllowed;
private final boolean nonNullsAllowed;
@@ -139,7 +138,6 @@ public MapFlatSelectiveStreamReader(
Map filters,
List requiredSubfields,
Optional outputType,
- DateTimeZone hiveStorageTimeZone,
OrcRecordReaderOptions options,
OrcAggregatedMemoryContext systemMemoryContext)
{
@@ -150,7 +148,6 @@ public MapFlatSelectiveStreamReader(
this.streamDescriptor = requireNonNull(streamDescriptor, "streamDescriptor is null");
this.keyOrcTypeKind = streamDescriptor.getNestedStreams().get(0).getOrcTypeKind();
this.baseValueStreamDescriptor = streamDescriptor.getNestedStreams().get(1);
- this.hiveStorageTimeZone = requireNonNull(hiveStorageTimeZone, "hiveStorageTimeZone is null");
this.systemMemoryContext = requireNonNull(systemMemoryContext, "systemMemoryContext is null");
this.localMemoryContext = systemMemoryContext.newOrcLocalMemoryContext(MapFlatSelectiveStreamReader.class.getSimpleName());
@@ -633,7 +630,7 @@ public void close()
}
@Override
- public void startStripe(Stripe stripe)
+ public void startStripe(ZoneId timezone, Stripe stripe)
throws IOException
{
presentStreamSource = getBooleanMissingStreamSource();
@@ -675,11 +672,10 @@ public void startStripe(Stripe stripe)
ImmutableBiMap.of(),
Optional.ofNullable(outputType).map(MapType::getValueType),
ImmutableList.of(),
- hiveStorageTimeZone,
options,
systemMemoryContext.newOrcAggregatedMemoryContext(),
true);
- valueStreamReader.startStripe(stripe);
+ valueStreamReader.startStripe(timezone, stripe);
valueStreamReaders.add(valueStreamReader);
}
diff --git a/presto-orc/src/main/java/com/facebook/presto/orc/reader/MapSelectiveStreamReader.java b/presto-orc/src/main/java/com/facebook/presto/orc/reader/MapSelectiveStreamReader.java
index e547b566782b7..b3ad8a1af42f6 100644
--- a/presto-orc/src/main/java/com/facebook/presto/orc/reader/MapSelectiveStreamReader.java
+++ b/presto-orc/src/main/java/com/facebook/presto/orc/reader/MapSelectiveStreamReader.java
@@ -26,10 +26,10 @@
import com.facebook.presto.orc.metadata.ColumnEncoding.ColumnEncodingKind;
import com.facebook.presto.orc.stream.InputStreamSources;
import jakarta.annotation.Nullable;
-import org.joda.time.DateTimeZone;
import org.openjdk.jol.info.ClassLayout;
import java.io.IOException;
+import java.time.ZoneId;
import java.util.List;
import java.util.Map;
import java.util.Optional;
@@ -58,15 +58,14 @@ public MapSelectiveStreamReader(
Map filters,
List requiredSubfields,
Optional outputType,
- DateTimeZone hiveStorageTimeZone,
OrcRecordReaderOptions options,
OrcAggregatedMemoryContext systemMemoryContext,
boolean isLowMemory)
{
this.streamDescriptor = requireNonNull(streamDescriptor, "stream is null");
- directReader = new MapDirectSelectiveStreamReader(streamDescriptor, filters, requiredSubfields, outputType, hiveStorageTimeZone, options, systemMemoryContext, isLowMemory);
+ directReader = new MapDirectSelectiveStreamReader(streamDescriptor, filters, requiredSubfields, outputType, options, systemMemoryContext, isLowMemory);
if (streamDescriptor.getSequence() == DEFAULT_SEQUENCE_ID) {
- flatReader = new MapFlatSelectiveStreamReader(streamDescriptor, filters, requiredSubfields, outputType, hiveStorageTimeZone, options, systemMemoryContext);
+ flatReader = new MapFlatSelectiveStreamReader(streamDescriptor, filters, requiredSubfields, outputType, options, systemMemoryContext);
}
else {
// When sequence id is not DEFAULT_SEQUENCE_ID, this map is inside a flat map.
@@ -76,7 +75,7 @@ public MapSelectiveStreamReader(
}
@Override
- public void startStripe(Stripe stripe)
+ public void startStripe(ZoneId timezone, Stripe stripe)
throws IOException
{
ColumnEncodingKind kind = stripe.getColumnEncodings().get(streamDescriptor.getStreamId())
@@ -95,7 +94,7 @@ else if (kind == DWRF_MAP_FLAT) {
throw new IllegalArgumentException("Unsupported encoding " + kind);
}
- currentReader.startStripe(stripe);
+ currentReader.startStripe(timezone, stripe);
}
@Override
diff --git a/presto-orc/src/main/java/com/facebook/presto/orc/reader/SelectiveStreamReaders.java b/presto-orc/src/main/java/com/facebook/presto/orc/reader/SelectiveStreamReaders.java
index 9433624570d0f..30dd313281f2a 100644
--- a/presto-orc/src/main/java/com/facebook/presto/orc/reader/SelectiveStreamReaders.java
+++ b/presto-orc/src/main/java/com/facebook/presto/orc/reader/SelectiveStreamReaders.java
@@ -38,7 +38,6 @@
import com.facebook.presto.orc.metadata.OrcType.OrcTypeKind;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Iterables;
-import org.joda.time.DateTimeZone;
import java.util.List;
import java.util.Map;
@@ -60,7 +59,6 @@ public static SelectiveStreamReader createStreamReader(
Map filters,
Optional outputType,
List requiredSubfields,
- DateTimeZone hiveStorageTimeZone,
OrcRecordReaderOptions options,
OrcAggregatedMemoryContext systemMemoryContext,
boolean isLowMemory)
@@ -109,20 +107,19 @@ public static SelectiveStreamReader createStreamReader(
return new TimestampSelectiveStreamReader(
streamDescriptor,
getOptionalOnlyFilter(type, filters),
- hiveStorageTimeZone,
outputType.isPresent(),
systemMemoryContext.newOrcLocalMemoryContext(SelectiveStreamReaders.class.getSimpleName()),
enableMicroPrecision);
}
case LIST:
verifyStreamType(streamDescriptor, outputType, ArrayType.class::isInstance);
- return new ListSelectiveStreamReader(streamDescriptor, filters, requiredSubfields, null, 0, outputType, hiveStorageTimeZone, options, systemMemoryContext, isLowMemory);
+ return new ListSelectiveStreamReader(streamDescriptor, filters, requiredSubfields, null, 0, outputType, options, systemMemoryContext, isLowMemory);
case STRUCT:
verifyStreamType(streamDescriptor, outputType, RowType.class::isInstance);
- return new StructSelectiveStreamReader(streamDescriptor, filters, requiredSubfields, outputType, hiveStorageTimeZone, options, systemMemoryContext, isLowMemory);
+ return new StructSelectiveStreamReader(streamDescriptor, filters, requiredSubfields, outputType, options, systemMemoryContext, isLowMemory);
case MAP:
verifyStreamType(streamDescriptor, outputType, MapType.class::isInstance);
- return new MapSelectiveStreamReader(streamDescriptor, filters, requiredSubfields, outputType, hiveStorageTimeZone, options, systemMemoryContext, isLowMemory);
+ return new MapSelectiveStreamReader(streamDescriptor, filters, requiredSubfields, outputType, options, systemMemoryContext, isLowMemory);
case DECIMAL: {
verifyStreamType(streamDescriptor, outputType, DecimalType.class::isInstance);
if (streamDescriptor.getOrcType().getPrecision().get() <= MAX_SHORT_PRECISION) {
@@ -161,7 +158,6 @@ public static SelectiveStreamReader createNestedStreamReader(
Optional parentFilter,
Optional outputType,
List requiredSubfields,
- DateTimeZone hiveStorageTimeZone,
OrcRecordReaderOptions options,
OrcAggregatedMemoryContext systemMemoryContext,
boolean isLowMemory)
@@ -193,16 +189,16 @@ public static SelectiveStreamReader createNestedStreamReader(
// No need to read the elements when output is not required and the filter is a simple IS [NOT] NULL
return null;
}
- return createStreamReader(streamDescriptor, elementFilters, outputType, requiredSubfields, hiveStorageTimeZone, options, systemMemoryContext.newOrcAggregatedMemoryContext(), isLowMemory);
+ return createStreamReader(streamDescriptor, elementFilters, outputType, requiredSubfields, options, systemMemoryContext.newOrcAggregatedMemoryContext(), isLowMemory);
case LIST:
Optional childFilter = parentFilter.map(HierarchicalFilter::getChild).map(ListFilter.class::cast);
- return new ListSelectiveStreamReader(streamDescriptor, ImmutableMap.of(), requiredSubfields, childFilter.orElse(null), level, outputType, hiveStorageTimeZone, options, systemMemoryContext.newOrcAggregatedMemoryContext(), isLowMemory);
+ return new ListSelectiveStreamReader(streamDescriptor, ImmutableMap.of(), requiredSubfields, childFilter.orElse(null), level, outputType, options, systemMemoryContext.newOrcAggregatedMemoryContext(), isLowMemory);
case STRUCT:
checkArgument(!parentFilter.isPresent(), "Filters on nested structs are not supported yet");
- return new StructSelectiveStreamReader(streamDescriptor, ImmutableMap.of(), requiredSubfields, outputType, hiveStorageTimeZone, options, systemMemoryContext.newOrcAggregatedMemoryContext(), isLowMemory);
+ return new StructSelectiveStreamReader(streamDescriptor, ImmutableMap.of(), requiredSubfields, outputType, options, systemMemoryContext.newOrcAggregatedMemoryContext(), isLowMemory);
case MAP:
checkArgument(!parentFilter.isPresent(), "Filters on nested maps are not supported yet");
- return new MapSelectiveStreamReader(streamDescriptor, ImmutableMap.of(), requiredSubfields, outputType, hiveStorageTimeZone, options, systemMemoryContext.newOrcAggregatedMemoryContext(), isLowMemory);
+ return new MapSelectiveStreamReader(streamDescriptor, ImmutableMap.of(), requiredSubfields, outputType, options, systemMemoryContext.newOrcAggregatedMemoryContext(), isLowMemory);
case UNION:
default:
throw new IllegalArgumentException("Unsupported type: " + streamDescriptor.getOrcTypeKind());
diff --git a/presto-orc/src/main/java/com/facebook/presto/orc/reader/SliceBatchStreamReader.java b/presto-orc/src/main/java/com/facebook/presto/orc/reader/SliceBatchStreamReader.java
index 7951adb307b9a..8386707c77964 100644
--- a/presto-orc/src/main/java/com/facebook/presto/orc/reader/SliceBatchStreamReader.java
+++ b/presto-orc/src/main/java/com/facebook/presto/orc/reader/SliceBatchStreamReader.java
@@ -30,6 +30,7 @@
import java.io.IOException;
import java.io.UncheckedIOException;
+import java.time.ZoneId;
import static com.facebook.presto.common.type.Chars.byteCountWithoutTrailingSpace;
import static com.facebook.presto.common.type.Chars.isCharType;
@@ -81,7 +82,7 @@ public void prepareNextRead(int batchSize)
}
@Override
- public void startStripe(Stripe stripe)
+ public void startStripe(ZoneId timezone, Stripe stripe)
throws IOException
{
ColumnEncodingKind columnEncodingKind = stripe.getColumnEncodings().get(streamDescriptor.getStreamId())
@@ -90,14 +91,14 @@ public void startStripe(Stripe stripe)
if (columnEncodingKind == DIRECT || columnEncodingKind == DIRECT_V2 || columnEncodingKind == DWRF_DIRECT) {
currentReader = directReader;
if (dictionaryReader != null && resetAllReaders) {
- dictionaryReader.startStripe(stripe);
+ dictionaryReader.startStripe(timezone, stripe);
System.setProperty("RESET_SLICE_BATCH_READER", "RESET_SLICE_BATCH_READER");
}
}
else if (columnEncodingKind == DICTIONARY || columnEncodingKind == DICTIONARY_V2) {
currentReader = dictionaryReader;
if (directReader != null && resetAllReaders) {
- directReader.startStripe(stripe);
+ directReader.startStripe(timezone, stripe);
System.setProperty("RESET_SLICE_BATCH_READER", "RESET_SLICE_BATCH_READER");
}
}
@@ -105,7 +106,7 @@ else if (columnEncodingKind == DICTIONARY || columnEncodingKind == DICTIONARY_V2
throw new IllegalArgumentException("Unsupported encoding " + columnEncodingKind);
}
- currentReader.startStripe(stripe);
+ currentReader.startStripe(timezone, stripe);
}
@Override
diff --git a/presto-orc/src/main/java/com/facebook/presto/orc/reader/SliceDictionaryBatchStreamReader.java b/presto-orc/src/main/java/com/facebook/presto/orc/reader/SliceDictionaryBatchStreamReader.java
index 1587df1d08929..42a0d72e0bc0e 100644
--- a/presto-orc/src/main/java/com/facebook/presto/orc/reader/SliceDictionaryBatchStreamReader.java
+++ b/presto-orc/src/main/java/com/facebook/presto/orc/reader/SliceDictionaryBatchStreamReader.java
@@ -31,6 +31,7 @@
import org.openjdk.jol.info.ClassLayout;
import java.io.IOException;
+import java.time.ZoneId;
import java.util.Arrays;
import java.util.Optional;
@@ -334,7 +335,7 @@ private static void readDictionary(
}
@Override
- public void startStripe(Stripe stripe)
+ public void startStripe(ZoneId timezone, Stripe stripe)
{
InputStreamSources dictionaryStreamSources = stripe.getDictionaryStreamSources();
stripeDictionaryDataStreamSource = dictionaryStreamSources.getInputStreamSource(streamDescriptor, DICTIONARY_DATA, ByteArrayInputStream.class);
diff --git a/presto-orc/src/main/java/com/facebook/presto/orc/reader/SliceDictionarySelectiveReader.java b/presto-orc/src/main/java/com/facebook/presto/orc/reader/SliceDictionarySelectiveReader.java
index ff96fe90f7d65..1607d446b9683 100644
--- a/presto-orc/src/main/java/com/facebook/presto/orc/reader/SliceDictionarySelectiveReader.java
+++ b/presto-orc/src/main/java/com/facebook/presto/orc/reader/SliceDictionarySelectiveReader.java
@@ -37,6 +37,7 @@
import org.openjdk.jol.info.ClassLayout;
import java.io.IOException;
+import java.time.ZoneId;
import java.util.Arrays;
import java.util.Optional;
@@ -601,7 +602,7 @@ private static void readDictionary(
}
@Override
- public void startStripe(Stripe stripe)
+ public void startStripe(ZoneId timezone, Stripe stripe)
{
InputStreamSources dictionaryStreamSources = stripe.getDictionaryStreamSources();
stripeDictionaryDataStreamSource = dictionaryStreamSources.getInputStreamSource(context.getStreamDescriptor(), DICTIONARY_DATA, ByteArrayInputStream.class);
diff --git a/presto-orc/src/main/java/com/facebook/presto/orc/reader/SliceDirectBatchStreamReader.java b/presto-orc/src/main/java/com/facebook/presto/orc/reader/SliceDirectBatchStreamReader.java
index bce6275a0e17c..df11da7d50094 100644
--- a/presto-orc/src/main/java/com/facebook/presto/orc/reader/SliceDirectBatchStreamReader.java
+++ b/presto-orc/src/main/java/com/facebook/presto/orc/reader/SliceDirectBatchStreamReader.java
@@ -31,6 +31,7 @@
import org.openjdk.jol.info.ClassLayout;
import java.io.IOException;
+import java.time.ZoneId;
import java.util.Optional;
import static com.facebook.presto.orc.metadata.Stream.StreamKind.DATA;
@@ -246,7 +247,7 @@ private void openRowGroup()
}
@Override
- public void startStripe(Stripe stripe)
+ public void startStripe(ZoneId timezone, Stripe stripe)
{
presentStreamSource = getBooleanMissingStreamSource();
lengthStreamSource = getLongMissingStreamSource();
diff --git a/presto-orc/src/main/java/com/facebook/presto/orc/reader/SliceDirectSelectiveStreamReader.java b/presto-orc/src/main/java/com/facebook/presto/orc/reader/SliceDirectSelectiveStreamReader.java
index 00a3556d3e35a..f243305d58b18 100644
--- a/presto-orc/src/main/java/com/facebook/presto/orc/reader/SliceDirectSelectiveStreamReader.java
+++ b/presto-orc/src/main/java/com/facebook/presto/orc/reader/SliceDirectSelectiveStreamReader.java
@@ -36,6 +36,7 @@
import org.openjdk.jol.info.ClassLayout;
import java.io.IOException;
+import java.time.ZoneId;
import java.util.Optional;
import static com.facebook.presto.common.array.Arrays.ExpansionFactor.SMALL;
@@ -622,7 +623,7 @@ private void openRowGroup()
}
@Override
- public void startStripe(Stripe stripe)
+ public void startStripe(ZoneId timezone, Stripe stripe)
{
presentStreamSource = getBooleanMissingStreamSource();
lengthStreamSource = getLongMissingStreamSource();
diff --git a/presto-orc/src/main/java/com/facebook/presto/orc/reader/SliceSelectiveStreamReader.java b/presto-orc/src/main/java/com/facebook/presto/orc/reader/SliceSelectiveStreamReader.java
index ed7076ad1b634..33671cc052505 100644
--- a/presto-orc/src/main/java/com/facebook/presto/orc/reader/SliceSelectiveStreamReader.java
+++ b/presto-orc/src/main/java/com/facebook/presto/orc/reader/SliceSelectiveStreamReader.java
@@ -32,6 +32,7 @@
import java.io.IOException;
import java.io.UncheckedIOException;
+import java.time.ZoneId;
import java.util.Optional;
import static com.google.common.base.MoreObjects.toStringHelper;
@@ -74,7 +75,7 @@ public static int computeTruncatedLength(Slice slice, int offset, int length, in
}
@Override
- public void startStripe(Stripe stripe)
+ public void startStripe(ZoneId timezone, Stripe stripe)
throws IOException
{
ColumnEncoding.ColumnEncodingKind kind = stripe.getColumnEncodings().get(context.getStreamDescriptor().getStreamId())
@@ -108,7 +109,7 @@ public void startStripe(Stripe stripe)
throw new IllegalArgumentException("Unsupported encoding " + kind);
}
- currentReader.startStripe(stripe);
+ currentReader.startStripe(timezone, stripe);
}
@Override
diff --git a/presto-orc/src/main/java/com/facebook/presto/orc/reader/StreamReader.java b/presto-orc/src/main/java/com/facebook/presto/orc/reader/StreamReader.java
index 2d7416e4e1dd0..f34c1ec2eeb7c 100644
--- a/presto-orc/src/main/java/com/facebook/presto/orc/reader/StreamReader.java
+++ b/presto-orc/src/main/java/com/facebook/presto/orc/reader/StreamReader.java
@@ -17,10 +17,11 @@
import com.facebook.presto.orc.stream.InputStreamSources;
import java.io.IOException;
+import java.time.ZoneId;
public interface StreamReader
{
- void startStripe(Stripe stripe)
+ void startStripe(ZoneId timezone, Stripe stripe)
throws IOException;
void startRowGroup(InputStreamSources dataStreamSources)
diff --git a/presto-orc/src/main/java/com/facebook/presto/orc/reader/StructBatchStreamReader.java b/presto-orc/src/main/java/com/facebook/presto/orc/reader/StructBatchStreamReader.java
index 555b24cac7ef0..2fee16a260c6a 100644
--- a/presto-orc/src/main/java/com/facebook/presto/orc/reader/StructBatchStreamReader.java
+++ b/presto-orc/src/main/java/com/facebook/presto/orc/reader/StructBatchStreamReader.java
@@ -32,11 +32,11 @@
import com.google.common.collect.Maps;
import com.google.common.io.Closer;
import jakarta.annotation.Nullable;
-import org.joda.time.DateTimeZone;
import org.openjdk.jol.info.ClassLayout;
import java.io.IOException;
import java.io.UncheckedIOException;
+import java.time.ZoneId;
import java.util.Arrays;
import java.util.List;
import java.util.Locale;
@@ -71,7 +71,7 @@ public class StructBatchStreamReader
private boolean rowGroupOpen;
- StructBatchStreamReader(Type type, StreamDescriptor streamDescriptor, DateTimeZone hiveStorageTimeZone, OrcRecordReaderOptions options, OrcAggregatedMemoryContext systemMemoryContext)
+ StructBatchStreamReader(Type type, StreamDescriptor streamDescriptor, OrcRecordReaderOptions options, OrcAggregatedMemoryContext systemMemoryContext)
throws OrcCorruptionException
{
requireNonNull(type, "type is null");
@@ -91,7 +91,7 @@ public class StructBatchStreamReader
StreamDescriptor fieldStream = nestedStreams.get(fieldName);
if (fieldStream != null) {
- structFields.put(fieldName, createStreamReader(field.getType(), fieldStream, hiveStorageTimeZone, options, systemMemoryContext));
+ structFields.put(fieldName, createStreamReader(field.getType(), fieldStream, options, systemMemoryContext));
}
}
this.fieldNames = fieldNames.build();
@@ -168,7 +168,7 @@ private void openRowGroup()
}
@Override
- public void startStripe(Stripe stripe)
+ public void startStripe(ZoneId timezone, Stripe stripe)
throws IOException
{
presentStreamSource = getBooleanMissingStreamSource();
@@ -181,7 +181,7 @@ public void startStripe(Stripe stripe)
rowGroupOpen = false;
for (BatchStreamReader structField : structFields.values()) {
- structField.startStripe(stripe);
+ structField.startStripe(timezone, stripe);
}
}
diff --git a/presto-orc/src/main/java/com/facebook/presto/orc/reader/StructSelectiveStreamReader.java b/presto-orc/src/main/java/com/facebook/presto/orc/reader/StructSelectiveStreamReader.java
index 43c536cf84410..8b53b4d90e478 100644
--- a/presto-orc/src/main/java/com/facebook/presto/orc/reader/StructSelectiveStreamReader.java
+++ b/presto-orc/src/main/java/com/facebook/presto/orc/reader/StructSelectiveStreamReader.java
@@ -36,10 +36,10 @@
import com.google.common.collect.Iterables;
import com.google.common.collect.Maps;
import jakarta.annotation.Nullable;
-import org.joda.time.DateTimeZone;
import org.openjdk.jol.info.ClassLayout;
import java.io.IOException;
+import java.time.ZoneId;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
@@ -106,7 +106,6 @@ public StructSelectiveStreamReader(
Map filters,
List requiredSubfields,
Optional outputType,
- DateTimeZone hiveStorageTimeZone,
OrcRecordReaderOptions options,
OrcAggregatedMemoryContext systemMemoryContext,
boolean isLowMemory)
@@ -184,7 +183,6 @@ else if (outputRequired || !fieldsWithFilters.isEmpty()) {
nestedFilters,
fieldOutputType,
nestedRequiredSubfields,
- hiveStorageTimeZone,
options,
systemMemoryContext.newOrcAggregatedMemoryContext(),
isLowMemory);
@@ -624,7 +622,7 @@ public void close()
}
@Override
- public void startStripe(Stripe stripe)
+ public void startStripe(ZoneId timezone, Stripe stripe)
throws IOException
{
presentStreamSource = getBooleanMissingStreamSource();
@@ -637,7 +635,7 @@ public void startStripe(Stripe stripe)
rowGroupOpen = false;
for (SelectiveStreamReader reader : nestedReaders.values()) {
- reader.startStripe(stripe);
+ reader.startStripe(timezone, stripe);
}
}
@@ -769,7 +767,7 @@ public void close()
}
@Override
- public void startStripe(Stripe stripe)
+ public void startStripe(ZoneId timezone, Stripe stripe)
{
}
@@ -839,7 +837,7 @@ public void close()
}
@Override
- public void startStripe(Stripe stripe)
+ public void startStripe(ZoneId timezone, Stripe stripe)
{
}
diff --git a/presto-orc/src/main/java/com/facebook/presto/orc/reader/TimestampBatchStreamReader.java b/presto-orc/src/main/java/com/facebook/presto/orc/reader/TimestampBatchStreamReader.java
index 4792574d1ff73..0636a900e49c8 100644
--- a/presto-orc/src/main/java/com/facebook/presto/orc/reader/TimestampBatchStreamReader.java
+++ b/presto-orc/src/main/java/com/facebook/presto/orc/reader/TimestampBatchStreamReader.java
@@ -27,10 +27,10 @@
import com.facebook.presto.orc.stream.InputStreamSources;
import com.facebook.presto.orc.stream.LongInputStream;
import jakarta.annotation.Nullable;
-import org.joda.time.DateTimeZone;
import org.openjdk.jol.info.ClassLayout;
import java.io.IOException;
+import java.time.ZoneId;
import java.util.Optional;
import static com.facebook.presto.common.type.TimestampType.TIMESTAMP;
@@ -67,12 +67,13 @@ public class TimestampBatchStreamReader
private LongInputStream nanosStream;
private boolean rowGroupOpen;
- private final DecodeTimestampOptions decodeTimestampOptions;
+ private final boolean enableMicroPrecision;
+ private DecodeTimestampOptions decodeTimestampOptions;
- public TimestampBatchStreamReader(Type type, StreamDescriptor streamDescriptor, DateTimeZone hiveStorageTimeZone, boolean enableMicroPrecision)
+ public TimestampBatchStreamReader(Type type, StreamDescriptor streamDescriptor, boolean enableMicroPrecision)
throws OrcCorruptionException
{
- this.decodeTimestampOptions = new DecodeTimestampOptions(hiveStorageTimeZone, enableMicroPrecision);
+ this.enableMicroPrecision = enableMicroPrecision;
requireNonNull(type, "type is null");
verifyStreamType(streamDescriptor, type, TimestampType.class::isInstance);
this.streamDescriptor = requireNonNull(streamDescriptor, "stream is null");
@@ -190,8 +191,10 @@ private void openRowGroup()
}
@Override
- public void startStripe(Stripe stripe)
+ public void startStripe(ZoneId timezone, Stripe stripe)
{
+ decodeTimestampOptions = new DecodeTimestampOptions(timezone, enableMicroPrecision);
+
presentStreamSource = getBooleanMissingStreamSource();
secondsStreamSource = getLongMissingStreamSource();
nanosStreamSource = getLongMissingStreamSource();
diff --git a/presto-orc/src/main/java/com/facebook/presto/orc/reader/TimestampSelectiveStreamReader.java b/presto-orc/src/main/java/com/facebook/presto/orc/reader/TimestampSelectiveStreamReader.java
index 9f8aaa31c41d3..295aad8ee8074 100644
--- a/presto-orc/src/main/java/com/facebook/presto/orc/reader/TimestampSelectiveStreamReader.java
+++ b/presto-orc/src/main/java/com/facebook/presto/orc/reader/TimestampSelectiveStreamReader.java
@@ -28,11 +28,10 @@
import com.facebook.presto.orc.stream.InputStreamSources;
import com.facebook.presto.orc.stream.LongInputStream;
import jakarta.annotation.Nullable;
-import org.joda.time.DateTime;
-import org.joda.time.DateTimeZone;
import org.openjdk.jol.info.ClassLayout;
import java.io.IOException;
+import java.time.ZoneId;
import java.util.Optional;
import static com.facebook.presto.common.array.Arrays.ensureCapacity;
@@ -61,9 +60,9 @@ public class TimestampSelectiveStreamReader
private final boolean nullsAllowed;
private final boolean outputRequired;
private final OrcLocalMemoryContext systemMemoryContext;
- private final long baseTimestampInSeconds;
private final boolean nonDeterministicFilter;
- private final DecodeTimestampOptions decodeTimestampOptions;
+ private final boolean enableMicroPrecision;
+ private DecodeTimestampOptions decodeTimestampOptions;
private InputStreamSource presentStreamSource = getBooleanMissingStreamSource();
private InputStreamSource secondsStreamSource = getLongMissingStreamSource();
@@ -89,12 +88,11 @@ public class TimestampSelectiveStreamReader
public TimestampSelectiveStreamReader(
StreamDescriptor streamDescriptor,
Optional filter,
- DateTimeZone hiveStorageTimeZone,
boolean outputRequired,
OrcLocalMemoryContext systemMemoryContext,
boolean enableMicroPrecision)
{
- this.decodeTimestampOptions = new DecodeTimestampOptions(hiveStorageTimeZone, enableMicroPrecision);
+ this.enableMicroPrecision = enableMicroPrecision;
requireNonNull(filter, "filter is null");
checkArgument(filter.isPresent() || outputRequired, "filter must be present if outputRequired is false");
this.streamDescriptor = requireNonNull(streamDescriptor, "streamDescriptor is null");
@@ -103,12 +101,13 @@ public TimestampSelectiveStreamReader(
this.systemMemoryContext = requireNonNull(systemMemoryContext, "systemMemoryContext is null");
this.nonDeterministicFilter = this.filter != null && !this.filter.isDeterministic();
this.nullsAllowed = this.filter == null || nonDeterministicFilter || this.filter.testNull();
- this.baseTimestampInSeconds = new DateTime(2015, 1, 1, 0, 0, requireNonNull(hiveStorageTimeZone, "hiveStorageTimeZone is null")).getMillis() / 1000;
}
@Override
- public void startStripe(Stripe stripe)
+ public void startStripe(ZoneId timezone, Stripe stripe)
{
+ decodeTimestampOptions = new DecodeTimestampOptions(timezone, enableMicroPrecision);
+
presentStreamSource = getBooleanMissingStreamSource();
secondsStreamSource = getLongMissingStreamSource();
nanosStreamSource = getLongMissingStreamSource();
diff --git a/presto-orc/src/main/java/com/facebook/presto/orc/writer/TimestampColumnWriter.java b/presto-orc/src/main/java/com/facebook/presto/orc/writer/TimestampColumnWriter.java
index d6c1d8761f49a..0e0d2b95e195a 100644
--- a/presto-orc/src/main/java/com/facebook/presto/orc/writer/TimestampColumnWriter.java
+++ b/presto-orc/src/main/java/com/facebook/presto/orc/writer/TimestampColumnWriter.java
@@ -34,11 +34,11 @@
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import io.airlift.slice.Slice;
-import org.joda.time.DateTime;
import org.joda.time.DateTimeZone;
import org.openjdk.jol.info.ClassLayout;
import java.io.IOException;
+import java.time.ZonedDateTime;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
@@ -132,7 +132,7 @@ else if (type == TIMESTAMP_MICROSECONDS) {
}
this.presentStream = new PresentOutputStream(columnWriterOptions, dwrfEncryptor);
this.metadataWriter = new CompressedMetadataWriter(metadataWriter, columnWriterOptions, dwrfEncryptor);
- this.baseTimestampInSeconds = new DateTime(2015, 1, 1, 0, 0, requireNonNull(hiveStorageTimeZone, "hiveStorageTimeZone is null")).getMillis() / MILLIS_PER_SECOND;
+ this.baseTimestampInSeconds = ZonedDateTime.of(2015, 1, 1, 0, 0, 0, 0, hiveStorageTimeZone.toTimeZone().toZoneId()).toEpochSecond();
}
@Override
diff --git a/presto-orc/src/test/java/com/facebook/presto/orc/AbstractTestOrcReader.java b/presto-orc/src/test/java/com/facebook/presto/orc/AbstractTestOrcReader.java
index 5391ffb9df6da..2b3d544f02966 100644
--- a/presto-orc/src/test/java/com/facebook/presto/orc/AbstractTestOrcReader.java
+++ b/presto-orc/src/test/java/com/facebook/presto/orc/AbstractTestOrcReader.java
@@ -45,8 +45,6 @@
import org.apache.hadoop.hive.serde2.objectinspector.SettableStructObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.StructField;
import org.apache.hadoop.io.Writable;
-import org.joda.time.DateTimeZone;
-import org.testng.annotations.BeforeClass;
import org.testng.annotations.Test;
import java.io.IOException;
@@ -75,7 +73,6 @@
import static com.facebook.presto.orc.OrcEncoding.ORC;
import static com.facebook.presto.orc.OrcReader.INITIAL_BATCH_SIZE;
import static com.facebook.presto.orc.OrcTester.Format.ORC_12;
-import static com.facebook.presto.orc.OrcTester.HIVE_STORAGE_TIME_ZONE;
import static com.facebook.presto.orc.OrcTester.createCustomOrcRecordReader;
import static com.facebook.presto.orc.OrcTester.createOrcRecordWriter;
import static com.facebook.presto.orc.OrcTester.createSettableStructObjectInspector;
@@ -114,12 +111,6 @@ public AbstractTestOrcReader(OrcTester tester)
this.tester = tester;
}
- @BeforeClass
- public void setUp()
- {
- assertEquals(DateTimeZone.getDefault(), HIVE_STORAGE_TIME_ZONE);
- }
-
@Test
public void testBooleanSequence()
throws Exception
diff --git a/presto-orc/src/test/java/com/facebook/presto/orc/OrcTester.java b/presto-orc/src/test/java/com/facebook/presto/orc/OrcTester.java
index 10faad504f0dc..f5302a8fa289a 100644
--- a/presto-orc/src/test/java/com/facebook/presto/orc/OrcTester.java
+++ b/presto-orc/src/test/java/com/facebook/presto/orc/OrcTester.java
@@ -64,8 +64,10 @@
import io.airlift.slice.Slices;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hive.common.type.Date;
import org.apache.hadoop.hive.common.type.HiveChar;
import org.apache.hadoop.hive.common.type.HiveDecimal;
+import org.apache.hadoop.hive.common.type.Timestamp;
import org.apache.hadoop.hive.ql.exec.FileSinkOperator.RecordWriter;
import org.apache.hadoop.hive.ql.io.orc.OrcFile;
import org.apache.hadoop.hive.ql.io.orc.OrcFile.ReaderOptions;
@@ -75,11 +77,11 @@
import org.apache.hadoop.hive.ql.io.orc.OrcUtil;
import org.apache.hadoop.hive.ql.io.orc.Reader;
import org.apache.hadoop.hive.serde2.Serializer;
-import org.apache.hadoop.hive.serde2.io.DateWritable;
+import org.apache.hadoop.hive.serde2.io.DateWritableV2;
import org.apache.hadoop.hive.serde2.io.HiveCharWritable;
import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
import org.apache.hadoop.hive.serde2.io.ShortWritable;
-import org.apache.hadoop.hive.serde2.io.TimestampWritable;
+import org.apache.hadoop.hive.serde2.io.TimestampWritableV2;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.SettableStructObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.StructField;
@@ -107,8 +109,6 @@
import java.lang.reflect.Field;
import java.math.BigDecimal;
import java.math.BigInteger;
-import java.sql.Date;
-import java.sql.Timestamp;
import java.time.LocalDate;
import java.time.ZoneId;
import java.time.ZonedDateTime;
@@ -209,7 +209,7 @@
public class OrcTester
{
public static final DataSize MAX_BLOCK_SIZE = new DataSize(1, Unit.MEGABYTE);
- public static final DateTimeZone HIVE_STORAGE_TIME_ZONE = DateTimeZone.forID("America/Bahia_Banderas");
+ public static final DateTimeZone HIVE_STORAGE_TIME_ZONE = DateTimeZone.UTC;
private static final FunctionAndTypeManager FUNCTION_AND_TYPE_MANAGER = createTestFunctionAndTypeManager();
private static final List PRIME_NUMBERS = ImmutableList.of(5, 7, 11, 13, 17, 19, 23, 29, 31, 37, 41, 43, 47, 53, 59, 61, 67, 71, 73, 79, 83, 89, 97);
@@ -1292,7 +1292,12 @@ private static boolean testValue(Type type, Object value, TupleDomainFilter filt
}
if (type == TIMESTAMP) {
- return filter.testLong(((SqlTimestamp) value).getMillisUtc());
+ if (SESSION.getSqlFunctionProperties().isLegacyTimestamp()) {
+ return filter.testLong(((SqlTimestamp) value).getMillisUtc());
+ }
+ else {
+ return filter.testLong(((SqlTimestamp) value).getMillis());
+ }
}
if (type instanceof DecimalType) {
@@ -1888,11 +1893,23 @@ else if (DATE.equals(type)) {
type.writeLong(blockBuilder, days);
}
else if (TIMESTAMP.equals(type)) {
- long millis = ((SqlTimestamp) value).getMillisUtc();
+ long millis;
+ if (SESSION.getSqlFunctionProperties().isLegacyTimestamp()) {
+ millis = ((SqlTimestamp) value).getMillisUtc();
+ }
+ else {
+ millis = ((SqlTimestamp) value).getMillis();
+ }
type.writeLong(blockBuilder, millis);
}
else if (TIMESTAMP_MICROSECONDS.equals(type)) {
- long micros = ((SqlTimestamp) value).getMicrosUtc();
+ long micros;
+ if (SESSION.getSqlFunctionProperties().isLegacyTimestamp()) {
+ micros = ((SqlTimestamp) value).getMicrosUtc();
+ }
+ else {
+ micros = ((SqlTimestamp) value).getMicros();
+ }
type.writeLong(blockBuilder, micros);
}
else {
@@ -2051,8 +2068,8 @@ else if (actualValue instanceof ByteWritable) {
else if (actualValue instanceof BytesWritable) {
actualValue = new SqlVarbinary(((BytesWritable) actualValue).copyBytes());
}
- else if (actualValue instanceof DateWritable) {
- actualValue = new SqlDate(((DateWritable) actualValue).getDays());
+ else if (actualValue instanceof DateWritableV2) {
+ actualValue = new SqlDate(((DateWritableV2) actualValue).getDays());
}
else if (actualValue instanceof DoubleWritable) {
actualValue = ((DoubleWritable) actualValue).get();
@@ -2082,9 +2099,9 @@ else if (actualValue instanceof HiveDecimalWritable) {
else if (actualValue instanceof Text) {
actualValue = actualValue.toString();
}
- else if (actualValue instanceof TimestampWritable) {
- TimestampWritable timestamp = (TimestampWritable) actualValue;
- actualValue = sqlTimestampOf((timestamp.getSeconds() * 1000) + (timestamp.getNanos() / 1000000L), SESSION);
+ else if (actualValue instanceof TimestampWritableV2) {
+ TimestampWritableV2 timestamp = (TimestampWritableV2) actualValue;
+ actualValue = sqlTimestampOf(timestamp.getTimestamp().toEpochMilli(), SESSION);
}
else if (actualValue instanceof OrcStruct) {
List