diff --git a/jdbc-core/src/main/java/com/salesforce/datacloud/jdbc/core/ArrowStreamReaderCursor.java b/jdbc-core/src/main/java/com/salesforce/datacloud/jdbc/core/ArrowStreamReaderCursor.java index ac143ae1..71942000 100644 --- a/jdbc-core/src/main/java/com/salesforce/datacloud/jdbc/core/ArrowStreamReaderCursor.java +++ b/jdbc-core/src/main/java/com/salesforce/datacloud/jdbc/core/ArrowStreamReaderCursor.java @@ -17,16 +17,27 @@ import lombok.SneakyThrows; import lombok.extern.slf4j.Slf4j; import lombok.val; +import org.apache.arrow.memory.BufferAllocator; import org.apache.arrow.vector.FieldVector; import org.apache.arrow.vector.VectorSchemaRoot; import org.apache.arrow.vector.ipc.ArrowStreamReader; +/** + * Row cursor over an {@link ArrowStreamReader} that drives the {@link DataCloudResultSet}. + * + *

The cursor owns the supplied {@link BufferAllocator} alongside the reader: closing the + * cursor closes the reader (which releases ArrowBuf accounting) and then the allocator (which + * returns its budget). This is the single place that guarantees root-allocator hygiene for the + * driver; callers of {@link DataCloudResultSet#of} hand ownership over and do not close the + * allocator themselves. + */ @Slf4j class ArrowStreamReaderCursor implements AutoCloseable { private static final int INIT_ROW_NUMBER = -1; private final ArrowStreamReader reader; + private final BufferAllocator allocator; private final ZoneId sessionZone; @lombok.Getter @@ -34,8 +45,9 @@ class ArrowStreamReaderCursor implements AutoCloseable { private final AtomicInteger currentIndex = new AtomicInteger(INIT_ROW_NUMBER); - ArrowStreamReaderCursor(ArrowStreamReader reader, ZoneId sessionZone) { + ArrowStreamReaderCursor(ArrowStreamReader reader, BufferAllocator allocator, ZoneId sessionZone) { this.reader = reader; + this.allocator = allocator; this.sessionZone = sessionZone; } @@ -91,6 +103,13 @@ public boolean next() { @SneakyThrows @Override public void close() { - reader.close(); + // try-with-resources closes in reverse declaration order: reader first (releases the + // buffers accounted against the allocator so its closing budget check passes), then + // allocator. If both throw, Java attaches the second as suppressed onto the first + // instead of dropping the reader exception via the standard try/finally semantics. + try (BufferAllocator a = allocator; + ArrowStreamReader r = reader) { + // resource cleanup happens at exit + } } } diff --git a/jdbc-core/src/main/java/com/salesforce/datacloud/jdbc/core/DataCloudConnection.java b/jdbc-core/src/main/java/com/salesforce/datacloud/jdbc/core/DataCloudConnection.java index fdc32e92..56ca5a5f 100644 --- a/jdbc-core/src/main/java/com/salesforce/datacloud/jdbc/core/DataCloudConnection.java +++ b/jdbc-core/src/main/java/com/salesforce/datacloud/jdbc/core/DataCloudConnection.java @@ -48,6 +48,7 @@ import java.sql.Statement; import java.sql.Struct; import java.time.Duration; +import java.time.ZoneId; import java.util.Arrays; import java.util.List; import java.util.Map; @@ -220,7 +221,7 @@ public DataCloudResultSet getRowBasedResultSet(String queryId, long offset, long QueryResultArrowStream.OUTPUT_FORMAT); val arrowStream = SQLExceptionQueryResultIterator.createSqlExceptionArrowStreamReader( iterator, connectionProperties.isIncludeCustomerDetailInReason(), queryId, null); - return StreamingResultSet.of(arrowStream, queryId); + return DataCloudResultSet.of(arrowStream, queryId, ZoneId.systemDefault()); } catch (StatusRuntimeException ex) { throw QueryExceptionHandler.createException( connectionProperties.isIncludeCustomerDetailInReason(), null, queryId, ex); @@ -263,7 +264,7 @@ public DataCloudResultSet getChunkBasedResultSet(String queryId, long chunkId, l QueryResultArrowStream.OUTPUT_FORMAT); val arrowStream = SQLExceptionQueryResultIterator.createSqlExceptionArrowStreamReader( iterator, connectionProperties.isIncludeCustomerDetailInReason(), queryId, null); - return StreamingResultSet.of(arrowStream, queryId); + return DataCloudResultSet.of(arrowStream, queryId, ZoneId.systemDefault()); } catch (StatusRuntimeException ex) { throw QueryExceptionHandler.createException( connectionProperties.isIncludeCustomerDetailInReason(), null, queryId, ex); diff --git a/jdbc-core/src/main/java/com/salesforce/datacloud/jdbc/core/DataCloudDatabaseMetadata.java b/jdbc-core/src/main/java/com/salesforce/datacloud/jdbc/core/DataCloudDatabaseMetadata.java index 1dcfb65e..9adc8600 100644 --- a/jdbc-core/src/main/java/com/salesforce/datacloud/jdbc/core/DataCloudDatabaseMetadata.java +++ b/jdbc-core/src/main/java/com/salesforce/datacloud/jdbc/core/DataCloudDatabaseMetadata.java @@ -13,7 +13,7 @@ import com.google.common.collect.ImmutableList; import com.salesforce.datacloud.jdbc.config.DriverVersion; -import com.salesforce.datacloud.jdbc.core.metadata.DataCloudResultSetMetaData; +import com.salesforce.datacloud.jdbc.core.metadata.MetadataResultSets; import com.salesforce.datacloud.jdbc.core.types.HyperTypes; import com.salesforce.datacloud.jdbc.util.JdbcURL; import com.salesforce.datacloud.jdbc.util.ThrowingJdbcSupplier; @@ -706,39 +706,39 @@ public ResultSet getColumns(String catalog, String schemaPattern, String tableNa @Override public ResultSet getColumnPrivileges(String catalog, String schema, String table, String columnNamePattern) throws SQLException { - return DataCloudMetadataResultSet.empty(); + return MetadataResultSets.emptyNoColumns(); } @Override public ResultSet getTablePrivileges(String catalog, String schemaPattern, String tableNamePattern) throws SQLException { - return DataCloudMetadataResultSet.empty(); + return MetadataResultSets.emptyNoColumns(); } @Override public ResultSet getBestRowIdentifier(String catalog, String schema, String table, int scope, boolean nullable) throws SQLException { - return DataCloudMetadataResultSet.empty(); + return MetadataResultSets.emptyNoColumns(); } @Override public ResultSet getVersionColumns(String catalog, String schema, String table) throws SQLException { - return DataCloudMetadataResultSet.empty(); + return MetadataResultSets.emptyNoColumns(); } @Override public ResultSet getPrimaryKeys(String catalog, String schema, String table) throws SQLException { - return DataCloudMetadataResultSet.empty(); + return MetadataResultSets.emptyNoColumns(); } @Override public ResultSet getImportedKeys(String catalog, String schema, String table) throws SQLException { - return DataCloudMetadataResultSet.empty(); + return MetadataResultSets.emptyNoColumns(); } @Override public ResultSet getExportedKeys(String catalog, String schema, String table) throws SQLException { - return DataCloudMetadataResultSet.empty(); + return MetadataResultSets.emptyNoColumns(); } @Override @@ -750,19 +750,18 @@ public ResultSet getCrossReference( String foreignSchema, String foreignTable) throws SQLException { - return DataCloudMetadataResultSet.empty(); + return MetadataResultSets.emptyNoColumns(); } @Override public ResultSet getTypeInfo() throws SQLException { - return DataCloudMetadataResultSet.of( - new DataCloudResultSetMetaData(MetadataSchemas.TYPE_INFO), HyperTypes.typeInfoRows()); + return MetadataResultSets.ofRawRows(MetadataSchemas.TYPE_INFO, HyperTypes.typeInfoRows()); } @Override public ResultSet getIndexInfo(String catalog, String schema, String table, boolean unique, boolean approximate) throws SQLException { - return DataCloudMetadataResultSet.empty(); + return MetadataResultSets.emptyNoColumns(); } @Override diff --git a/jdbc-core/src/main/java/com/salesforce/datacloud/jdbc/core/DataCloudMetadataResultSet.java b/jdbc-core/src/main/java/com/salesforce/datacloud/jdbc/core/DataCloudMetadataResultSet.java deleted file mode 100644 index 1c6ee1d9..00000000 --- a/jdbc-core/src/main/java/com/salesforce/datacloud/jdbc/core/DataCloudMetadataResultSet.java +++ /dev/null @@ -1,164 +0,0 @@ -/** - * This file is part of https://github.com/forcedotcom/datacloud-jdbc which is released under the - * Apache 2.0 license. See https://github.com/forcedotcom/datacloud-jdbc/blob/main/LICENSE.txt - */ -package com.salesforce.datacloud.jdbc.core; - -import com.salesforce.datacloud.jdbc.core.metadata.DataCloudResultSetMetaData; -import com.salesforce.datacloud.jdbc.core.resultset.ColumnAccessor; -import com.salesforce.datacloud.jdbc.core.resultset.SimpleResultSet; -import com.salesforce.datacloud.jdbc.protocol.data.ColumnMetadata; -import java.sql.SQLException; -import java.sql.Statement; -import java.util.Collections; -import java.util.List; -import java.util.OptionalLong; - -/** - * Custom ResultSet implementation for metadata queries - */ -public class DataCloudMetadataResultSet extends SimpleResultSet { - - private final List data; - private int currentRow = -1; - private boolean closed = false; - - private DataCloudMetadataResultSet( - DataCloudResultSetMetaData metadata, - ColumnAccessor[] accessors, - List data) { - super(metadata, accessors, false); - this.data = data; - } - - public static DataCloudMetadataResultSet empty() throws SQLException { - return of(new DataCloudResultSetMetaData(Collections.emptyList()), Collections.emptyList()); - } - - public static DataCloudMetadataResultSet of(DataCloudResultSetMetaData metadata, List data) - throws SQLException { - @SuppressWarnings("unchecked") - ColumnAccessor[] accessors = new ColumnAccessor[metadata.getColumnCount()]; - for (int i = 0; i < metadata.getColumnCount(); i++) { - final int columnIndex = i; - accessors[i] = createAccessor(metadata.getColumn(i + 1), columnIndex); - } - - return new DataCloudMetadataResultSet(metadata, accessors, data); - } - - /** - * Creates a ColumnAccessor for a specific column. - */ - private static ColumnAccessor createAccessor(ColumnMetadata column, int columnIndex) { - return new ColumnAccessor() { - @Override - public String getString(DataCloudMetadataResultSet resultSet) throws SQLException { - Object value = getValue(resultSet, columnIndex); - if (value == null) { - return null; - } - return value.toString(); - } - - @Override - public Boolean getBoolean(DataCloudMetadataResultSet resultSet) throws SQLException { - Object value = getValue(resultSet, columnIndex); - if (value == null) { - return null; - } - if (value instanceof Boolean) { - return (Boolean) value; - } - return false; - } - - @Override - public OptionalLong getAnyInteger(DataCloudMetadataResultSet resultSet) throws SQLException { - Object value = getValue(resultSet, columnIndex); - if (value == null) { - return OptionalLong.empty(); - } - if (value instanceof Number) { - return OptionalLong.of(((Number) value).longValue()); - } - throw new SQLException( - "Cannot convert to integer: " + value.getClass().getName()); - } - - /** - * Helper method to get the value for the current row and column. - */ - private Object getValue(DataCloudMetadataResultSet resultSet, int columnIndex) throws SQLException { - if (resultSet.closed) { - throw new SQLException("ResultSet is closed"); - } - if (resultSet.currentRow < 0) { - throw new SQLException("No current row. Call next() first."); - } - if (resultSet.currentRow >= resultSet.data.size()) { - throw new SQLException("Row index out of bounds"); - } - - Object row = resultSet.data.get(resultSet.currentRow); - if (!(row instanceof List)) { - throw new SQLException("Data row is not a List"); - } - - @SuppressWarnings("unchecked") - List rowList = (List) row; - - if (columnIndex >= rowList.size()) { - throw new SQLException("Column index " + columnIndex + " out of bounds for row"); - } - - return rowList.get(columnIndex); - } - }; - } - - @Override - public boolean next() throws SQLException { - if (closed) { - throw new SQLException("ResultSet is closed"); - } - currentRow++; - return currentRow < data.size(); - } - - @Override - public void close() throws SQLException { - closed = true; - } - - @Override - public boolean isClosed() throws SQLException { - return closed; - } - - @Override - public int getRow() throws SQLException { - if (closed) { - throw new SQLException("ResultSet is closed"); - } - if (currentRow >= 0 && currentRow < data.size()) { - return currentRow + 1; - } - return 0; - } - - @Override - public Statement getStatement() throws SQLException { - return null; - } - - @Override - public T unwrap(Class iface) throws SQLException { - return null; - } - - @Override - public boolean isWrapperFor(Class iface) throws SQLException { - return false; - } -} diff --git a/jdbc-core/src/main/java/com/salesforce/datacloud/jdbc/core/DataCloudResultSet.java b/jdbc-core/src/main/java/com/salesforce/datacloud/jdbc/core/DataCloudResultSet.java index 28cd51a1..d8b2efb8 100644 --- a/jdbc-core/src/main/java/com/salesforce/datacloud/jdbc/core/DataCloudResultSet.java +++ b/jdbc-core/src/main/java/com/salesforce/datacloud/jdbc/core/DataCloudResultSet.java @@ -4,8 +4,515 @@ */ package com.salesforce.datacloud.jdbc.core; +import com.salesforce.datacloud.jdbc.core.accessor.QueryJDBCAccessor; +import com.salesforce.datacloud.jdbc.core.metadata.DataCloudResultSetMetaData; +import com.salesforce.datacloud.jdbc.core.resultset.ForwardOnlyResultSet; +import com.salesforce.datacloud.jdbc.core.resultset.ReadOnlyResultSet; +import com.salesforce.datacloud.jdbc.core.resultset.ResultSetWithPositionalGetters; +import com.salesforce.datacloud.jdbc.protocol.QueryResultArrowStream; +import com.salesforce.datacloud.jdbc.protocol.data.ArrowToHyperTypeMapper; +import com.salesforce.datacloud.jdbc.util.ThrowingJdbcSupplier; +import com.salesforce.datacloud.query.v3.QueryStatus; +import java.io.IOException; +import java.io.InputStream; +import java.io.Reader; +import java.math.BigDecimal; +import java.net.URL; +import java.sql.Array; +import java.sql.Blob; +import java.sql.Clob; +import java.sql.Date; +import java.sql.Ref; import java.sql.ResultSet; +import java.sql.ResultSetMetaData; +import java.sql.RowId; +import java.sql.SQLException; +import java.sql.SQLFeatureNotSupportedException; +import java.sql.SQLWarning; +import java.sql.SQLXML; +import java.sql.Statement; +import java.sql.Struct; +import java.sql.Time; +import java.sql.Timestamp; +import java.sql.Types; +import java.time.ZoneId; +import java.util.Calendar; +import java.util.Map; +import java.util.stream.Collectors; +import lombok.Getter; +import lombok.extern.slf4j.Slf4j; +import lombok.val; +import org.apache.arrow.memory.BufferAllocator; +import org.apache.arrow.vector.ipc.ArrowStreamReader; -public interface DataCloudResultSet extends ResultSet { - String getQueryId(); +@Slf4j +public class DataCloudResultSet implements ReadOnlyResultSet, ForwardOnlyResultSet, ResultSetWithPositionalGetters { + + @Getter + private final String queryId; + + private final ArrowStreamReaderCursor cursor; + private final QueryJDBCAccessor[] accessors; + private final DataCloudResultSetMetaData metadata; + private final ColumnNameResolver columnNameResolver; + ThrowingJdbcSupplier getQueryStatus; + private boolean wasNull; + private boolean closed; + + private DataCloudResultSet( + ArrowStreamReaderCursor cursor, + String queryId, + DataCloudResultSetMetaData metadata, + QueryJDBCAccessor[] accessors, + ColumnNameResolver columnNameResolver) { + this.cursor = cursor; + this.queryId = queryId; + this.metadata = metadata; + this.accessors = accessors; + this.columnNameResolver = columnNameResolver; + this.closed = false; + } + + /** + * Creates a DataCloudResultSet from a {@link QueryResultArrowStream.Result} (reader paired + * with its backing allocator). + * + *

Ownership of both the reader and the allocator transfers to the returned result set — + * closing the result set closes the reader and then the allocator, in that order, so Arrow's + * buffer accounting clears before the allocator's budget check. If construction itself + * throws (for example a {@link SQLException} wrapping an unsupported Arrow type), this + * method closes both before re-throwing so the 100 MB {@link + * org.apache.arrow.memory.RootAllocator} does not leak. Callers must not close either + * separately on success. + * + * @param arrowStream The Arrow stream + allocator pair, both owned by the result set. + * @param queryId The query identifier (may be {@code null} for synthesized result sets). + * @param sessionZone The session timezone used for timestamp conversions. + */ + public static DataCloudResultSet of(QueryResultArrowStream.Result arrowStream, String queryId, ZoneId sessionZone) + throws SQLException { + try { + return create(arrowStream.getReader(), arrowStream.getAllocator(), queryId, sessionZone); + } catch (SQLException | RuntimeException ex) { + try { + arrowStream.getReader().close(); + } catch (Exception suppressed) { + ex.addSuppressed(suppressed); + } + try { + arrowStream.getAllocator().close(); + } catch (Exception suppressed) { + ex.addSuppressed(suppressed); + } + throw ex; + } + } + + private static DataCloudResultSet create( + ArrowStreamReader reader, BufferAllocator allocator, String queryId, ZoneId sessionZone) + throws SQLException { + try { + val schemaRoot = reader.getVectorSchemaRoot(); + val columns = schemaRoot.getSchema().getFields().stream() + .map(ArrowToHyperTypeMapper::toColumnMetadata) + .collect(Collectors.toList()); + val metadata = new DataCloudResultSetMetaData(columns); + val cursor = new ArrowStreamReaderCursor(reader, allocator, sessionZone); + val accessors = cursor.createAccessors().toArray(new QueryJDBCAccessor[0]); + val columnNameResolver = new ColumnNameResolver(columns); + return new DataCloudResultSet(cursor, queryId, metadata, accessors, columnNameResolver); + } catch (IOException ex) { + throw new SQLException("Unexpected error during ResultSet creation", "XX000", ex); + } catch (IllegalArgumentException ex) { + // Thrown by ArrowToHyperTypeMapper for Arrow types the driver does not model. + throw new SQLException("Unsupported column type in query result: " + ex.getMessage(), "0A000", ex); + } + } + + // --- Core ResultSet navigation --- + + @Override + public boolean next() throws SQLException { + checkClosed(); + return cursor.next(); + } + + @Override + public void close() throws SQLException { + if (!closed) { + cursor.close(); + closed = true; + } + } + + @Override + public boolean isClosed() throws SQLException { + return closed; + } + + @Override + public int getRow() throws SQLException { + checkClosed(); + return cursor.getRowsSeen(); + } + + @Override + public ResultSetMetaData getMetaData() throws SQLException { + checkClosed(); + return metadata; + } + + @Override + public int findColumn(String columnLabel) throws SQLException { + checkClosed(); + return columnNameResolver.findColumn(columnLabel); + } + + @Override + public Statement getStatement() throws SQLException { + checkClosed(); + return null; + } + + @Override + public boolean wasNull() throws SQLException { + checkClosed(); + return wasNull; + } + + // --- Accessor dispatch: delegate to QueryJDBCAccessor --- + + private QueryJDBCAccessor getAccessor(int columnIndex) throws SQLException { + checkClosed(); + if (columnIndex <= 0 || columnIndex > accessors.length) { + throw new SQLException( + "Column index " + columnIndex + " out of bounds (" + accessors.length + " columns available)"); + } + return accessors[columnIndex - 1]; + } + + private void updateWasNull(QueryJDBCAccessor accessor) throws SQLException { + wasNull = accessor.wasNull(); + } + + @Override + public String getString(int columnIndex) throws SQLException { + val accessor = getAccessor(columnIndex); + val result = accessor.getString(); + updateWasNull(accessor); + return result; + } + + @Override + public boolean getBoolean(int columnIndex) throws SQLException { + val accessor = getAccessor(columnIndex); + val result = accessor.getBoolean(); + updateWasNull(accessor); + return result; + } + + @Override + public byte getByte(int columnIndex) throws SQLException { + val accessor = getAccessor(columnIndex); + val result = accessor.getByte(); + updateWasNull(accessor); + return result; + } + + @Override + public short getShort(int columnIndex) throws SQLException { + val accessor = getAccessor(columnIndex); + val result = accessor.getShort(); + updateWasNull(accessor); + return result; + } + + @Override + public int getInt(int columnIndex) throws SQLException { + val accessor = getAccessor(columnIndex); + val result = accessor.getInt(); + updateWasNull(accessor); + return result; + } + + @Override + public long getLong(int columnIndex) throws SQLException { + val accessor = getAccessor(columnIndex); + val result = accessor.getLong(); + updateWasNull(accessor); + return result; + } + + @Override + public float getFloat(int columnIndex) throws SQLException { + val accessor = getAccessor(columnIndex); + val result = accessor.getFloat(); + updateWasNull(accessor); + return result; + } + + @Override + public double getDouble(int columnIndex) throws SQLException { + val accessor = getAccessor(columnIndex); + val result = accessor.getDouble(); + updateWasNull(accessor); + return result; + } + + @SuppressWarnings("deprecation") + @Override + public BigDecimal getBigDecimal(int columnIndex, int scale) throws SQLException { + val accessor = getAccessor(columnIndex); + val result = accessor.getBigDecimal(scale); + updateWasNull(accessor); + return result; + } + + @Override + public BigDecimal getBigDecimal(int columnIndex) throws SQLException { + val accessor = getAccessor(columnIndex); + val result = accessor.getBigDecimal(); + updateWasNull(accessor); + return result; + } + + @Override + public byte[] getBytes(int columnIndex) throws SQLException { + val accessor = getAccessor(columnIndex); + val result = accessor.getBytes(); + updateWasNull(accessor); + return result; + } + + @Override + public Date getDate(int columnIndex) throws SQLException { + return getDate(columnIndex, null); + } + + @Override + public Date getDate(int columnIndex, Calendar cal) throws SQLException { + val accessor = getAccessor(columnIndex); + val result = accessor.getDate(cal); + updateWasNull(accessor); + return result; + } + + @Override + public Time getTime(int columnIndex) throws SQLException { + return getTime(columnIndex, null); + } + + @Override + public Time getTime(int columnIndex, Calendar cal) throws SQLException { + val accessor = getAccessor(columnIndex); + val result = accessor.getTime(cal); + updateWasNull(accessor); + return result; + } + + @Override + public Timestamp getTimestamp(int columnIndex) throws SQLException { + return getTimestamp(columnIndex, null); + } + + @Override + public Timestamp getTimestamp(int columnIndex, Calendar cal) throws SQLException { + val accessor = getAccessor(columnIndex); + val result = accessor.getTimestamp(cal); + updateWasNull(accessor); + return result; + } + + @Override + public Object getObject(int columnIndex) throws SQLException { + // For ARRAY columns, dispatch to getArray() to return DataCloudArray + // (matching the behavior of the old AvaticaResultSet type dispatch) + if (metadata.getColumnType(columnIndex) == Types.ARRAY) { + return getArray(columnIndex); + } + val accessor = getAccessor(columnIndex); + val result = accessor.getObject(); + updateWasNull(accessor); + return result; + } + + @Override + public Object getObject(int columnIndex, Map> map) throws SQLException { + if (map == null || map.isEmpty()) { + // JDBC allows a null/empty type map to behave like plain getObject(int). + return getObject(columnIndex); + } + val accessor = getAccessor(columnIndex); + val result = accessor.getObject(map); + updateWasNull(accessor); + return result; + } + + @Override + public T getObject(int columnIndex, Class type) throws SQLException { + val accessor = getAccessor(columnIndex); + val result = accessor.getObject(type); + updateWasNull(accessor); + return result; + } + + @Override + public Array getArray(int columnIndex) throws SQLException { + val accessor = getAccessor(columnIndex); + val result = accessor.getArray(); + updateWasNull(accessor); + return result; + } + + @Override + public InputStream getAsciiStream(int columnIndex) throws SQLException { + val accessor = getAccessor(columnIndex); + val result = accessor.getAsciiStream(); + updateWasNull(accessor); + return result; + } + + @SuppressWarnings("deprecation") + @Override + public InputStream getUnicodeStream(int columnIndex) throws SQLException { + val accessor = getAccessor(columnIndex); + val result = accessor.getUnicodeStream(); + updateWasNull(accessor); + return result; + } + + @Override + public InputStream getBinaryStream(int columnIndex) throws SQLException { + val accessor = getAccessor(columnIndex); + val result = accessor.getBinaryStream(); + updateWasNull(accessor); + return result; + } + + @Override + public Reader getCharacterStream(int columnIndex) throws SQLException { + val accessor = getAccessor(columnIndex); + val result = accessor.getCharacterStream(); + updateWasNull(accessor); + return result; + } + + @Override + public Ref getRef(int columnIndex) throws SQLException { + val accessor = getAccessor(columnIndex); + val result = accessor.getRef(); + updateWasNull(accessor); + return result; + } + + @Override + public Blob getBlob(int columnIndex) throws SQLException { + val accessor = getAccessor(columnIndex); + val result = accessor.getBlob(); + updateWasNull(accessor); + return result; + } + + @Override + public Clob getClob(int columnIndex) throws SQLException { + val accessor = getAccessor(columnIndex); + val result = accessor.getClob(); + updateWasNull(accessor); + return result; + } + + public Struct getStruct(int columnIndex) throws SQLException { + throw new SQLFeatureNotSupportedException("getStruct is not supported"); + } + + @Override + public URL getURL(int columnIndex) throws SQLException { + val accessor = getAccessor(columnIndex); + val result = accessor.getURL(); + updateWasNull(accessor); + return result; + } + + @Override + public RowId getRowId(int columnIndex) throws SQLException { + throw new SQLFeatureNotSupportedException("getRowId is not supported"); + } + + @Override + public SQLXML getSQLXML(int columnIndex) throws SQLException { + val accessor = getAccessor(columnIndex); + val result = accessor.getSQLXML(); + updateWasNull(accessor); + return result; + } + + @Override + public String getNString(int columnIndex) throws SQLException { + val accessor = getAccessor(columnIndex); + val result = accessor.getNString(); + updateWasNull(accessor); + return result; + } + + @Override + public Reader getNCharacterStream(int columnIndex) throws SQLException { + val accessor = getAccessor(columnIndex); + val result = accessor.getNCharacterStream(); + updateWasNull(accessor); + return result; + } + + // --- Miscellaneous ResultSet methods --- + + @Override + public int getHoldability() throws SQLException { + checkClosed(); + return ResultSet.HOLD_CURSORS_OVER_COMMIT; + } + + @Override + public void setFetchSize(int rows) throws SQLException { + // no-op: streaming result set controls its own fetch size + } + + @Override + public int getFetchSize() throws SQLException { + checkClosed(); + return 0; + } + + @Override + public SQLWarning getWarnings() throws SQLException { + checkClosed(); + return null; + } + + @Override + public void clearWarnings() throws SQLException { + // no-op + } + + @Override + public String getCursorName() throws SQLException { + throw new SQLFeatureNotSupportedException("getCursorName is not supported"); + } + + @Override + public T unwrap(Class iface) throws SQLException { + if (iface.isInstance(this)) { + return iface.cast(this); + } + throw new SQLException("Cannot unwrap to " + iface.getName()); + } + + @Override + public boolean isWrapperFor(Class iface) throws SQLException { + return iface.isInstance(this); + } + + private void checkClosed() throws SQLException { + if (closed) { + throw new SQLException("ResultSet is closed"); + } + } } diff --git a/jdbc-core/src/main/java/com/salesforce/datacloud/jdbc/core/DataCloudStatement.java b/jdbc-core/src/main/java/com/salesforce/datacloud/jdbc/core/DataCloudStatement.java index 138d95d3..5b29cdfa 100644 --- a/jdbc-core/src/main/java/com/salesforce/datacloud/jdbc/core/DataCloudStatement.java +++ b/jdbc-core/src/main/java/com/salesforce/datacloud/jdbc/core/DataCloudStatement.java @@ -194,10 +194,12 @@ public ResultSet executeQuery(String sql) throws SQLException { try { val sessionZone = resolveSessionTimeZone(); val iterator = executeAdaptiveQuery(sql); + // Resolve queryId once before allocator construction so a throw between arrowStream + // creation and DataCloudResultSet.of can't strand the allocator outside its try/catch. + val queryId = iterator.getQueryStatus().getQueryId(); val arrowStream = SQLExceptionQueryResultIterator.createSqlExceptionArrowStreamReader( - iterator, includeCustomerDetail, iterator.getQueryStatus().getQueryId(), sql); - resultSet = - StreamingResultSet.of(arrowStream, iterator.getQueryStatus().getQueryId(), sessionZone); + iterator, includeCustomerDetail, queryId, sql); + resultSet = DataCloudResultSet.of(arrowStream, queryId, sessionZone); log.info( "executeAdaptiveQuery completed. queryId={}, sessionZone={}", queryHandle.getQueryStatus().getQueryId(), @@ -431,15 +433,12 @@ public ResultSet getResultSet() throws SQLException { return logTimedValue( () -> { if (resultSet == null && adaptiveIterator != null) { + // Resolve queryId once before allocator construction; see executeQuery + // above for the same hoist. + val queryId = adaptiveIterator.getQueryStatus().getQueryId(); val arrowStream = SQLExceptionQueryResultIterator.createSqlExceptionArrowStreamReader( - adaptiveIterator, - includeCustomerDetail, - adaptiveIterator.getQueryStatus().getQueryId(), - null); - resultSet = StreamingResultSet.of( - arrowStream, - adaptiveIterator.getQueryStatus().getQueryId(), - sessionZone); + adaptiveIterator, includeCustomerDetail, queryId, null); + resultSet = DataCloudResultSet.of(arrowStream, queryId, sessionZone); } else if (resultSet == null) { log.warn( "Prefer acquiring async result sets from helper methods DataCloudConnection::getChunkBasedResultSet and DataCloudConnection::getRowBasedResultSet. We will wait for the query's results to be produced in their entirety before returning a result set."); diff --git a/jdbc-core/src/main/java/com/salesforce/datacloud/jdbc/core/MetadataSchemas.java b/jdbc-core/src/main/java/com/salesforce/datacloud/jdbc/core/MetadataSchemas.java index 10f6f1db..f140ddfc 100644 --- a/jdbc-core/src/main/java/com/salesforce/datacloud/jdbc/core/MetadataSchemas.java +++ b/jdbc-core/src/main/java/com/salesforce/datacloud/jdbc/core/MetadataSchemas.java @@ -4,6 +4,7 @@ */ package com.salesforce.datacloud.jdbc.core; +import static com.salesforce.datacloud.jdbc.util.Constants.BOOL; import static com.salesforce.datacloud.jdbc.util.Constants.INTEGER; import static com.salesforce.datacloud.jdbc.util.Constants.SHORT; import static com.salesforce.datacloud.jdbc.util.Constants.TEXT; @@ -46,11 +47,11 @@ public final class MetadataSchemas { text("LITERAL_SUFFIX"), text("CREATE_PARAMS"), shortColumn("NULLABLE"), - text("CASE_SENSITIVE"), + bool("CASE_SENSITIVE"), shortColumn("SEARCHABLE"), - text("UNSIGNED_ATTRIBUTE"), - text("FIXED_PREC_SCALE"), - text("AUTO_INCREMENT"), + bool("UNSIGNED_ATTRIBUTE"), + bool("FIXED_PREC_SCALE"), + bool("AUTO_INCREMENT"), text("LOCAL_TYPE_NAME"), shortColumn("MINIMUM_SCALE"), shortColumn("MAXIMUM_SCALE"), @@ -96,6 +97,10 @@ private static ColumnMetadata shortColumn(String name) { return new ColumnMetadata(name, HyperType.int16(true), SHORT); } + private static ColumnMetadata bool(String name) { + return new ColumnMetadata(name, HyperType.bool(true), BOOL); + } + private MetadataSchemas() { throw new UnsupportedOperationException("This is a utility class and cannot be instantiated"); } diff --git a/jdbc-core/src/main/java/com/salesforce/datacloud/jdbc/core/QueryMetadataUtil.java b/jdbc-core/src/main/java/com/salesforce/datacloud/jdbc/core/QueryMetadataUtil.java index fa3e9b8d..53de6228 100644 --- a/jdbc-core/src/main/java/com/salesforce/datacloud/jdbc/core/QueryMetadataUtil.java +++ b/jdbc-core/src/main/java/com/salesforce/datacloud/jdbc/core/QueryMetadataUtil.java @@ -12,7 +12,7 @@ import com.google.common.base.Strings; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; -import com.salesforce.datacloud.jdbc.core.metadata.DataCloudResultSetMetaData; +import com.salesforce.datacloud.jdbc.core.metadata.MetadataResultSets; import com.salesforce.datacloud.jdbc.core.types.HyperTypes; import com.salesforce.datacloud.jdbc.protocol.data.ColumnMetadata; import com.salesforce.datacloud.jdbc.protocol.data.HyperType; @@ -75,7 +75,7 @@ public static ResultSet createTableResultSet( } static ResultSet getMetadataResultSet(List columns, List data) throws SQLException { - return DataCloudMetadataResultSet.of(new DataCloudResultSetMetaData(columns), data); + return MetadataResultSets.ofRawRows(columns, data); } private static List constructTableData(ResultSet resultSet) throws SQLException { diff --git a/jdbc-core/src/main/java/com/salesforce/datacloud/jdbc/core/SQLExceptionQueryResultIterator.java b/jdbc-core/src/main/java/com/salesforce/datacloud/jdbc/core/SQLExceptionQueryResultIterator.java index 95b1cd8b..1891d730 100644 --- a/jdbc-core/src/main/java/com/salesforce/datacloud/jdbc/core/SQLExceptionQueryResultIterator.java +++ b/jdbc-core/src/main/java/com/salesforce/datacloud/jdbc/core/SQLExceptionQueryResultIterator.java @@ -12,7 +12,6 @@ import lombok.AllArgsConstructor; import lombok.SneakyThrows; import lombok.val; -import org.apache.arrow.vector.ipc.ArrowStreamReader; import salesforce.cdp.hyperdb.v1.QueryResult; /** @@ -38,20 +37,22 @@ public void close() throws Exception { } /** - * Creates an {@link ArrowStreamReader} that wraps the given iterator with SQL exception handling. + * Creates an Arrow stream result that wraps the given iterator with SQL exception handling. *

* This factory method creates a {@link SQLExceptionQueryResultIterator} that wraps the provided - * iterator and converts it to an {@link ArrowStreamReader}. Any gRPC exceptions thrown during - * iteration will be converted to SQL exceptions with appropriate context information. + * iterator and converts it to a {@link QueryResultArrowStream.Result}. Any gRPC exceptions + * thrown during iteration will be converted to SQL exceptions with appropriate context + * information. The returned {@link QueryResultArrowStream.Result} owns the reader and its + * backing allocator — the caller must close it to release both. *

* * @param resultIterator the source iterator of {@link QueryResult} objects * @param includeCustomerDetail whether to include customer-specific details in exceptions * @param queryId the unique identifier of the query being executed * @param sql the SQL statement being executed - * @return an {@link ArrowStreamReader} that converts gRPC exceptions to SQL exceptions + * @return a {@link QueryResultArrowStream.Result} that converts gRPC exceptions to SQL exceptions */ - public static ArrowStreamReader createSqlExceptionArrowStreamReader( + public static QueryResultArrowStream.Result createSqlExceptionArrowStreamReader( CloseableIterator resultIterator, boolean includeCustomerDetail, String queryId, String sql) { val throwingSqlExceptionIterator = new SQLExceptionQueryResultIterator(resultIterator, includeCustomerDetail, queryId, sql); diff --git a/jdbc-core/src/main/java/com/salesforce/datacloud/jdbc/core/StreamingResultSet.java b/jdbc-core/src/main/java/com/salesforce/datacloud/jdbc/core/StreamingResultSet.java deleted file mode 100644 index 0a924747..00000000 --- a/jdbc-core/src/main/java/com/salesforce/datacloud/jdbc/core/StreamingResultSet.java +++ /dev/null @@ -1,497 +0,0 @@ -/** - * This file is part of https://github.com/forcedotcom/datacloud-jdbc which is released under the - * Apache 2.0 license. See https://github.com/forcedotcom/datacloud-jdbc/blob/main/LICENSE.txt - */ -package com.salesforce.datacloud.jdbc.core; - -import com.salesforce.datacloud.jdbc.core.accessor.QueryJDBCAccessor; -import com.salesforce.datacloud.jdbc.core.metadata.DataCloudResultSetMetaData; -import com.salesforce.datacloud.jdbc.core.resultset.ForwardOnlyResultSet; -import com.salesforce.datacloud.jdbc.core.resultset.ReadOnlyResultSet; -import com.salesforce.datacloud.jdbc.core.resultset.ResultSetWithPositionalGetters; -import com.salesforce.datacloud.jdbc.protocol.data.ArrowToHyperTypeMapper; -import com.salesforce.datacloud.jdbc.protocol.data.ColumnMetadata; -import com.salesforce.datacloud.jdbc.util.ThrowingJdbcSupplier; -import com.salesforce.datacloud.query.v3.QueryStatus; -import java.io.IOException; -import java.io.InputStream; -import java.io.Reader; -import java.math.BigDecimal; -import java.net.URL; -import java.sql.Array; -import java.sql.Blob; -import java.sql.Clob; -import java.sql.Date; -import java.sql.Ref; -import java.sql.ResultSet; -import java.sql.ResultSetMetaData; -import java.sql.RowId; -import java.sql.SQLException; -import java.sql.SQLFeatureNotSupportedException; -import java.sql.SQLWarning; -import java.sql.SQLXML; -import java.sql.Statement; -import java.sql.Struct; -import java.sql.Time; -import java.sql.Timestamp; -import java.sql.Types; -import java.time.ZoneId; -import java.util.Calendar; -import java.util.Map; -import java.util.stream.Collectors; -import lombok.Getter; -import lombok.extern.slf4j.Slf4j; -import lombok.val; -import org.apache.arrow.vector.ipc.ArrowStreamReader; - -@Slf4j -public class StreamingResultSet - implements DataCloudResultSet, ReadOnlyResultSet, ForwardOnlyResultSet, ResultSetWithPositionalGetters { - - @Getter - private final String queryId; - - private final ArrowStreamReaderCursor cursor; - private final QueryJDBCAccessor[] accessors; - private final DataCloudResultSetMetaData metadata; - private final ColumnNameResolver columnNameResolver; - ThrowingJdbcSupplier getQueryStatus; - private boolean wasNull; - private boolean closed; - - private StreamingResultSet( - ArrowStreamReaderCursor cursor, - String queryId, - DataCloudResultSetMetaData metadata, - QueryJDBCAccessor[] accessors, - ColumnNameResolver columnNameResolver) { - this.cursor = cursor; - this.queryId = queryId; - this.metadata = metadata; - this.accessors = accessors; - this.columnNameResolver = columnNameResolver; - this.closed = false; - } - - public static StreamingResultSet of(ArrowStreamReader resultStream, String queryId) throws SQLException { - return of(resultStream, queryId, ZoneId.systemDefault()); - } - - /** - * Creates a StreamingResultSet with a specified session timezone. - * - * @param resultStream The Arrow stream containing query results - * @param queryId The query identifier - * @param sessionZone The session timezone to use for timestamp conversions - * @return A new StreamingResultSet - * @throws SQLException If an error occurs during ResultSet creation - */ - public static StreamingResultSet of(ArrowStreamReader resultStream, String queryId, ZoneId sessionZone) - throws SQLException { - try { - val schemaRoot = resultStream.getVectorSchemaRoot(); - val fields = schemaRoot.getSchema().getFields(); - - val columns = fields.stream() - .map(field -> new ColumnMetadata(field.getName(), ArrowToHyperTypeMapper.toHyperType(field))) - .collect(Collectors.toList()); - val metadata = new DataCloudResultSetMetaData(columns); - - val cursor = new ArrowStreamReaderCursor(resultStream, sessionZone); - val accessorList = cursor.createAccessors(); - val accessors = accessorList.toArray(new QueryJDBCAccessor[0]); - - val columnNameResolver = new ColumnNameResolver(columns); - - return new StreamingResultSet(cursor, queryId, metadata, accessors, columnNameResolver); - } catch (IOException ex) { - throw new SQLException("Unexpected error during ResultSet creation", "XX000", ex); - } catch (IllegalArgumentException ex) { - // Thrown by ArrowToHyperTypeMapper for Arrow types the driver does not model. - throw new SQLException("Unsupported column type in query result: " + ex.getMessage(), "0A000", ex); - } - } - - // --- Core ResultSet navigation --- - - @Override - public boolean next() throws SQLException { - checkClosed(); - return cursor.next(); - } - - @Override - public void close() throws SQLException { - if (!closed) { - cursor.close(); - closed = true; - } - } - - @Override - public boolean isClosed() throws SQLException { - return closed; - } - - @Override - public int getRow() throws SQLException { - checkClosed(); - return cursor.getRowsSeen(); - } - - @Override - public ResultSetMetaData getMetaData() throws SQLException { - checkClosed(); - return metadata; - } - - @Override - public int findColumn(String columnLabel) throws SQLException { - checkClosed(); - return columnNameResolver.findColumn(columnLabel); - } - - @Override - public Statement getStatement() throws SQLException { - checkClosed(); - return null; - } - - @Override - public boolean wasNull() throws SQLException { - checkClosed(); - return wasNull; - } - - // --- Accessor dispatch: delegate to QueryJDBCAccessor --- - - private QueryJDBCAccessor getAccessor(int columnIndex) throws SQLException { - checkClosed(); - if (columnIndex <= 0 || columnIndex > accessors.length) { - throw new SQLException( - "Column index " + columnIndex + " out of bounds (" + accessors.length + " columns available)"); - } - return accessors[columnIndex - 1]; - } - - private void updateWasNull(QueryJDBCAccessor accessor) throws SQLException { - wasNull = accessor.wasNull(); - } - - @Override - public String getString(int columnIndex) throws SQLException { - val accessor = getAccessor(columnIndex); - val result = accessor.getString(); - updateWasNull(accessor); - return result; - } - - @Override - public boolean getBoolean(int columnIndex) throws SQLException { - val accessor = getAccessor(columnIndex); - val result = accessor.getBoolean(); - updateWasNull(accessor); - return result; - } - - @Override - public byte getByte(int columnIndex) throws SQLException { - val accessor = getAccessor(columnIndex); - val result = accessor.getByte(); - updateWasNull(accessor); - return result; - } - - @Override - public short getShort(int columnIndex) throws SQLException { - val accessor = getAccessor(columnIndex); - val result = accessor.getShort(); - updateWasNull(accessor); - return result; - } - - @Override - public int getInt(int columnIndex) throws SQLException { - val accessor = getAccessor(columnIndex); - val result = accessor.getInt(); - updateWasNull(accessor); - return result; - } - - @Override - public long getLong(int columnIndex) throws SQLException { - val accessor = getAccessor(columnIndex); - val result = accessor.getLong(); - updateWasNull(accessor); - return result; - } - - @Override - public float getFloat(int columnIndex) throws SQLException { - val accessor = getAccessor(columnIndex); - val result = accessor.getFloat(); - updateWasNull(accessor); - return result; - } - - @Override - public double getDouble(int columnIndex) throws SQLException { - val accessor = getAccessor(columnIndex); - val result = accessor.getDouble(); - updateWasNull(accessor); - return result; - } - - @SuppressWarnings("deprecation") - @Override - public BigDecimal getBigDecimal(int columnIndex, int scale) throws SQLException { - val accessor = getAccessor(columnIndex); - val result = accessor.getBigDecimal(scale); - updateWasNull(accessor); - return result; - } - - @Override - public BigDecimal getBigDecimal(int columnIndex) throws SQLException { - val accessor = getAccessor(columnIndex); - val result = accessor.getBigDecimal(); - updateWasNull(accessor); - return result; - } - - @Override - public byte[] getBytes(int columnIndex) throws SQLException { - val accessor = getAccessor(columnIndex); - val result = accessor.getBytes(); - updateWasNull(accessor); - return result; - } - - @Override - public Date getDate(int columnIndex) throws SQLException { - return getDate(columnIndex, null); - } - - @Override - public Date getDate(int columnIndex, Calendar cal) throws SQLException { - val accessor = getAccessor(columnIndex); - val result = accessor.getDate(cal); - updateWasNull(accessor); - return result; - } - - @Override - public Time getTime(int columnIndex) throws SQLException { - return getTime(columnIndex, null); - } - - @Override - public Time getTime(int columnIndex, Calendar cal) throws SQLException { - val accessor = getAccessor(columnIndex); - val result = accessor.getTime(cal); - updateWasNull(accessor); - return result; - } - - @Override - public Timestamp getTimestamp(int columnIndex) throws SQLException { - return getTimestamp(columnIndex, null); - } - - @Override - public Timestamp getTimestamp(int columnIndex, Calendar cal) throws SQLException { - val accessor = getAccessor(columnIndex); - val result = accessor.getTimestamp(cal); - updateWasNull(accessor); - return result; - } - - @Override - public Object getObject(int columnIndex) throws SQLException { - // For ARRAY columns, dispatch to getArray() to return DataCloudArray - // (matching the behavior of the old AvaticaResultSet type dispatch) - if (metadata.getColumnType(columnIndex) == Types.ARRAY) { - return getArray(columnIndex); - } - val accessor = getAccessor(columnIndex); - val result = accessor.getObject(); - updateWasNull(accessor); - return result; - } - - @Override - public Object getObject(int columnIndex, Map> map) throws SQLException { - val accessor = getAccessor(columnIndex); - val result = accessor.getObject(map); - updateWasNull(accessor); - return result; - } - - @Override - public T getObject(int columnIndex, Class type) throws SQLException { - val accessor = getAccessor(columnIndex); - val result = accessor.getObject(type); - updateWasNull(accessor); - return result; - } - - @Override - public Array getArray(int columnIndex) throws SQLException { - val accessor = getAccessor(columnIndex); - val result = accessor.getArray(); - updateWasNull(accessor); - return result; - } - - @Override - public InputStream getAsciiStream(int columnIndex) throws SQLException { - val accessor = getAccessor(columnIndex); - val result = accessor.getAsciiStream(); - updateWasNull(accessor); - return result; - } - - @SuppressWarnings("deprecation") - @Override - public InputStream getUnicodeStream(int columnIndex) throws SQLException { - val accessor = getAccessor(columnIndex); - val result = accessor.getUnicodeStream(); - updateWasNull(accessor); - return result; - } - - @Override - public InputStream getBinaryStream(int columnIndex) throws SQLException { - val accessor = getAccessor(columnIndex); - val result = accessor.getBinaryStream(); - updateWasNull(accessor); - return result; - } - - @Override - public Reader getCharacterStream(int columnIndex) throws SQLException { - val accessor = getAccessor(columnIndex); - val result = accessor.getCharacterStream(); - updateWasNull(accessor); - return result; - } - - @Override - public Ref getRef(int columnIndex) throws SQLException { - val accessor = getAccessor(columnIndex); - val result = accessor.getRef(); - updateWasNull(accessor); - return result; - } - - @Override - public Blob getBlob(int columnIndex) throws SQLException { - val accessor = getAccessor(columnIndex); - val result = accessor.getBlob(); - updateWasNull(accessor); - return result; - } - - @Override - public Clob getClob(int columnIndex) throws SQLException { - val accessor = getAccessor(columnIndex); - val result = accessor.getClob(); - updateWasNull(accessor); - return result; - } - - public Struct getStruct(int columnIndex) throws SQLException { - throw new SQLFeatureNotSupportedException("getStruct is not supported"); - } - - @Override - public URL getURL(int columnIndex) throws SQLException { - val accessor = getAccessor(columnIndex); - val result = accessor.getURL(); - updateWasNull(accessor); - return result; - } - - @Override - public RowId getRowId(int columnIndex) throws SQLException { - throw new SQLFeatureNotSupportedException("getRowId is not supported"); - } - - @Override - public SQLXML getSQLXML(int columnIndex) throws SQLException { - val accessor = getAccessor(columnIndex); - val result = accessor.getSQLXML(); - updateWasNull(accessor); - return result; - } - - @Override - public String getNString(int columnIndex) throws SQLException { - val accessor = getAccessor(columnIndex); - val result = accessor.getNString(); - updateWasNull(accessor); - return result; - } - - @Override - public Reader getNCharacterStream(int columnIndex) throws SQLException { - val accessor = getAccessor(columnIndex); - val result = accessor.getNCharacterStream(); - updateWasNull(accessor); - return result; - } - - // --- Miscellaneous ResultSet methods --- - - @Override - public int getHoldability() throws SQLException { - checkClosed(); - return ResultSet.HOLD_CURSORS_OVER_COMMIT; - } - - @Override - public void setFetchSize(int rows) throws SQLException { - // no-op: streaming result set controls its own fetch size - } - - @Override - public int getFetchSize() throws SQLException { - checkClosed(); - return 0; - } - - @Override - public SQLWarning getWarnings() throws SQLException { - checkClosed(); - return null; - } - - @Override - public void clearWarnings() throws SQLException { - // no-op - } - - @Override - public String getCursorName() throws SQLException { - throw new SQLFeatureNotSupportedException("getCursorName is not supported"); - } - - @Override - public T unwrap(Class iface) throws SQLException { - if (iface.isInstance(this)) { - return iface.cast(this); - } - throw new SQLException("Cannot unwrap to " + iface.getName()); - } - - @Override - public boolean isWrapperFor(Class iface) throws SQLException { - return iface.isInstance(this); - } - - private void checkClosed() throws SQLException { - if (closed) { - throw new SQLException("ResultSet is closed"); - } - } -} diff --git a/jdbc-core/src/main/java/com/salesforce/datacloud/jdbc/core/metadata/MetadataResultSets.java b/jdbc-core/src/main/java/com/salesforce/datacloud/jdbc/core/metadata/MetadataResultSets.java new file mode 100644 index 00000000..abe7b22b --- /dev/null +++ b/jdbc-core/src/main/java/com/salesforce/datacloud/jdbc/core/metadata/MetadataResultSets.java @@ -0,0 +1,162 @@ +/** + * This file is part of https://github.com/forcedotcom/datacloud-jdbc which is released under the + * Apache 2.0 license. See https://github.com/forcedotcom/datacloud-jdbc/blob/main/LICENSE.txt + */ +package com.salesforce.datacloud.jdbc.core.metadata; + +import com.salesforce.datacloud.jdbc.core.DataCloudResultSet; +import com.salesforce.datacloud.jdbc.protocol.QueryResultArrowStream; +import com.salesforce.datacloud.jdbc.protocol.data.ColumnMetadata; +import com.salesforce.datacloud.jdbc.protocol.data.HyperTypeToArrow; +import com.salesforce.datacloud.jdbc.protocol.data.VectorPopulator; +import java.io.ByteArrayInputStream; +import java.io.ByteArrayOutputStream; +import java.io.IOException; +import java.sql.SQLException; +import java.time.ZoneId; +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; +import java.util.stream.Collectors; +import org.apache.arrow.memory.RootAllocator; +import org.apache.arrow.vector.VectorSchemaRoot; +import org.apache.arrow.vector.ipc.ArrowStreamReader; +import org.apache.arrow.vector.ipc.ArrowStreamWriter; +import org.apache.arrow.vector.types.pojo.Schema; + +/** + * Factory for Arrow-backed metadata result sets. Materialises a row-oriented list of metadata + * values into the Arrow IPC format used by every other driver result set, so streaming query + * results and materialised metadata results both flow through {@link DataCloudResultSet}. + * + *

Each call builds a fresh single-batch Arrow stream: a writer-side {@link VectorSchemaRoot} + * is populated via {@link VectorPopulator} (the same code path the JDBC parameter encoder uses), + * serialised to bytes, and wrapped in an {@link ArrowStreamReader} that the result set owns. + */ +public final class MetadataResultSets { + + private MetadataResultSets() { + throw new UnsupportedOperationException("This is a utility class and cannot be instantiated"); + } + + /** Empty result set with the given column schema. */ + public static DataCloudResultSet empty(List columns) throws SQLException { + return of(columns, Collections.emptyList()); + } + + /** Empty result set with no columns — used as a placeholder by unsupported metadata methods. */ + public static DataCloudResultSet emptyNoColumns() throws SQLException { + return of(Collections.emptyList(), Collections.emptyList()); + } + + /** + * Build a result set whose schema is {@code columns} and whose rows are {@code rows}. Each + * inner list in {@code rows} supplies values in column order, and must have exactly + * {@code columns.size()} elements — a short row would silently leave the trailing columns + * unset (interpreted as Arrow null), which is almost always a caller bug. Today every caller + * goes through {@link MetadataSchemas} so the sizes match by construction; the precondition + * here makes a future caller bug surface at the boundary instead of in vector population. + */ + public static DataCloudResultSet of(List columns, List> rows) throws SQLException { + validateRowArity(columns, rows); + byte[] ipcBytes = writeArrowStream(columns, rows); + // Reuse the query-path allocator budget so a future caller materialising a multi-MB + // metadata response trips the cap cleanly instead of letting the JVM OOM. + RootAllocator allocator = new RootAllocator(QueryResultArrowStream.ROOT_ALLOCATOR_BUDGET_BYTES); + ArrowStreamReader reader; + try { + reader = new ArrowStreamReader(new ByteArrayInputStream(ipcBytes), allocator); + } catch (Throwable t) { + // Constructor-time leak guard: if ArrowStreamReader fails before DataCloudResultSet.of + // can take ownership, close the allocator on the way out. + try { + allocator.close(); + } catch (Throwable s) { + t.addSuppressed(s); + } + throw t; + } + // Allocator and reader are now handed to DataCloudResultSet, which owns their lifecycle + // and closes both on close() — including the construction-failure path inside of(...). + return DataCloudResultSet.of( + new QueryResultArrowStream.Result(reader, allocator), /*queryId=*/ null, ZoneId.systemDefault()); + } + + /** + * Convenience overload for callers that still speak in terms of {@code List} where + * each element is itself a {@code List} row. Mirrors the old + * {@code DataCloudMetadataResultSet.of(..., List data)} signature. + */ + public static DataCloudResultSet ofRawRows(List columns, List rawRows) throws SQLException { + return of(columns, coerceRows(rawRows)); + } + + /** + * Build the Arrow schema, populate a VSR via the shared {@link VectorPopulator}, and write it + * out as a single-batch Arrow IPC stream. + */ + private static byte[] writeArrowStream(List columns, List> rows) throws SQLException { + Schema schema = new Schema(columns.stream() + .map(c -> HyperTypeToArrow.toField(c.getName(), c.getType(), c.getTypeName())) + .collect(Collectors.toList())); + try (RootAllocator writeAllocator = new RootAllocator(Long.MAX_VALUE); + VectorSchemaRoot root = VectorSchemaRoot.create(schema, writeAllocator)) { + root.allocateNew(); + VectorPopulator.populateVectors(root, columns, rows, /*calendar=*/ null); + + ByteArrayOutputStream out = new ByteArrayOutputStream(); + try (ArrowStreamWriter writer = new ArrowStreamWriter(root, null, out)) { + writer.start(); + writer.writeBatch(); + writer.end(); + } + return out.toByteArray(); + } catch (IOException ex) { + throw new SQLException("Failed to build metadata result set", "XX000", ex); + } + } + + /** + * Verify that every supplied row has exactly {@code columns.size()} elements. A {@code null} + * row is allowed and is interpreted as a row of all-nulls (matching the old + * {@code coerceRows} convention of converting null rows to empty lists, which is the only + * shape with no positional values to populate). Anything else is a caller bug. + */ + private static void validateRowArity(List columns, List> rows) throws SQLException { + int expected = columns.size(); + for (int i = 0; i < rows.size(); i++) { + List row = rows.get(i); + if (row == null) { + continue; + } + // The legacy coerceRows path turns a null-row into Collections.emptyList(); accept + // empty as the "all nulls" shape here too. + if (row.isEmpty() && expected > 0) { + continue; + } + if (row.size() != expected) { + throw new SQLException("Metadata row " + i + " has " + row.size() + " elements but schema has " + + expected + " columns"); + } + } + } + + @SuppressWarnings("unchecked") + private static List> coerceRows(List rawRows) throws SQLException { + if (rawRows == null || rawRows.isEmpty()) { + return Collections.emptyList(); + } + List> result = new ArrayList<>(rawRows.size()); + for (Object row : rawRows) { + if (row == null) { + result.add(Collections.emptyList()); + } else if (row instanceof List) { + result.add((List) row); + } else { + throw new SQLException( + "Metadata row is not a List: " + row.getClass().getName()); + } + } + return result; + } +} diff --git a/jdbc-core/src/main/java/com/salesforce/datacloud/jdbc/core/resultset/ColumnAccessor.java b/jdbc-core/src/main/java/com/salesforce/datacloud/jdbc/core/resultset/ColumnAccessor.java deleted file mode 100644 index 42f24a9d..00000000 --- a/jdbc-core/src/main/java/com/salesforce/datacloud/jdbc/core/resultset/ColumnAccessor.java +++ /dev/null @@ -1,61 +0,0 @@ -/** - * This file is part of https://github.com/forcedotcom/datacloud-jdbc which is released under the - * Apache 2.0 license. See https://github.com/forcedotcom/datacloud-jdbc/blob/main/LICENSE.txt - */ -package com.salesforce.datacloud.jdbc.core.resultset; - -import java.math.BigDecimal; -import java.sql.Array; -import java.sql.Date; -import java.sql.SQLException; -import java.sql.Time; -import java.sql.Timestamp; -import java.util.OptionalDouble; -import java.util.OptionalLong; - -/** - * Accessor functions used to read column values from a {@link SimpleResultSet}. - * - * This interface is optimized for performance, and hence avoids the use of boxed types as much as possible. - */ -public interface ColumnAccessor { - public default Boolean getBoolean(ConcreteResultSet resultSet) throws SQLException { - throw new UnsupportedOperationException(); - } - /// Get the value of the column as an integer. Used for `getShort`, `getInt`, and `getLong`. - public default OptionalLong getAnyInteger(ConcreteResultSet resultSet) throws SQLException { - throw new UnsupportedOperationException(); - } - - public default BigDecimal getBigDecimal(ConcreteResultSet resultSet) throws SQLException { - throw new UnsupportedOperationException(); - } - - public default OptionalDouble getAnyFloatingPoint(ConcreteResultSet resultSet) throws SQLException { - throw new UnsupportedOperationException(); - } - - public default String getString(ConcreteResultSet resultSet) throws SQLException { - throw new UnsupportedOperationException(); - } - - public default byte[] getBytes(ConcreteResultSet resultSet) throws SQLException { - throw new UnsupportedOperationException(); - } - - public default Date getDate(ConcreteResultSet resultSet) throws SQLException { - throw new UnsupportedOperationException(); - } - - public default Time getTime(ConcreteResultSet resultSet) throws SQLException { - throw new UnsupportedOperationException(); - } - - public default Timestamp getTimestamp(ConcreteResultSet resultSet) throws SQLException { - throw new UnsupportedOperationException(); - } - - public default Array getArray(ConcreteResultSet resultSet) throws SQLException { - throw new UnsupportedOperationException(); - } -} diff --git a/jdbc-core/src/main/java/com/salesforce/datacloud/jdbc/core/resultset/SimpleResultSet.java b/jdbc-core/src/main/java/com/salesforce/datacloud/jdbc/core/resultset/SimpleResultSet.java deleted file mode 100644 index 6c84555e..00000000 --- a/jdbc-core/src/main/java/com/salesforce/datacloud/jdbc/core/resultset/SimpleResultSet.java +++ /dev/null @@ -1,379 +0,0 @@ -/** - * This file is part of https://github.com/forcedotcom/datacloud-jdbc which is released under the - * Apache 2.0 license. See https://github.com/forcedotcom/datacloud-jdbc/blob/main/LICENSE.txt - */ -package com.salesforce.datacloud.jdbc.core.resultset; - -import com.salesforce.datacloud.jdbc.core.metadata.DataCloudResultSetMetaData; -import com.salesforce.datacloud.jdbc.core.types.HyperTypes; -import com.salesforce.datacloud.jdbc.protocol.data.HyperTypeKind; -import java.io.InputStream; -import java.io.Reader; -import java.math.BigDecimal; -import java.net.URL; -import java.sql.Array; -import java.sql.Blob; -import java.sql.Clob; -import java.sql.Date; -import java.sql.Ref; -import java.sql.ResultSet; -import java.sql.ResultSetMetaData; -import java.sql.RowId; -import java.sql.SQLException; -import java.sql.SQLFeatureNotSupportedException; -import java.sql.SQLWarning; -import java.sql.SQLXML; -import java.sql.Time; -import java.sql.Timestamp; -import java.util.Calendar; -import java.util.Map; -import java.util.OptionalLong; -import lombok.AllArgsConstructor; -import lombok.val; - -/** - * A base class for simple result sets. - * - * This class provides a basic implementation of the {@link ResultSet} interface, - * with support for read-only access and forward-only cursors. - * - * Access to SQL values is provided via {@link ColumnAccessor} instances. This class - * already takes care of casting from SQL types to the compatible Java types. - * - *

Planned for removal. This implementation only covers the narrow slice of - * {@link HyperTypeKind} used by JDBC metadata result sets (INT32 for columns like - * {@code DATA_TYPE}, CHAR/VARCHAR for text columns, and INT8-INT64/OID via the - * {@code isIntegerLike} helper) — not the full query-result type universe. Rather - * than expanding the {@link #getLong}, {@link #getDouble}, {@link #getBigDecimal}, - * {@link #getObject} switches to cover every kind, we intend to migrate - * {@code DataCloudMetadataResultSet} to build on {@link StreamingResultSet} so there - * is only one result-set implementation in the driver. Treat this class as - * maintenance-only until that refactor lands. - */ -@AllArgsConstructor -public abstract class SimpleResultSet - implements ReadOnlyResultSet, ForwardOnlyResultSet, ResultSetWithPositionalGetters { - /// The metadata for the result set - protected final DataCloudResultSetMetaData metadata; - /// The accessor functions for the columns - protected final ColumnAccessor[] accessors; - /// Was the previously read value null? - private boolean wasNull = false; - - @Override - public ResultSetMetaData getMetaData() throws SQLException { - return metadata; - } - - @Override - public int findColumn(String columnLabel) throws SQLException { - return metadata.findColumn(columnLabel); - } - - @Override - public int getHoldability() throws SQLException { - // Our result sets are independent of transaction state - return ResultSet.HOLD_CURSORS_OVER_COMMIT; - } - - @Override - public void setFetchSize(int rows) throws SQLException { - throw new SQLFeatureNotSupportedException("setFetchSize is not supported"); - } - - @Override - public int getFetchSize() throws SQLException { - return 0; - } - - @Override - public SQLWarning getWarnings() throws SQLException { - // We don't support per-row warnings. - // Note that `ResultSet.getWarnings()` retrieves per-row warnings. - // Warnings for the overall query would be attached to the Statement, not the ResultSet. - return null; - } - - @Override - public void clearWarnings() throws SQLException { - throw new SQLFeatureNotSupportedException("clearWarnings is not supported"); - } - - @Override - public String getCursorName() throws SQLException { - throw new SQLFeatureNotSupportedException("getCursorName is not supported"); - } - - //////////////////////////////// - /// Accessors for SQL values - - /// Get the accessor for a column - private ColumnAccessor getAccessor(int columnIndex) throws SQLException { - if (columnIndex <= 0 || columnIndex > accessors.length) { - throw new SQLException( - "Column index " + columnIndex + " out of bounds (" + accessors.length + " columns available)"); - } - return accessors[columnIndex - 1]; - } - - @SuppressWarnings("unchecked") - private SELF getSubclass() { - return (SELF) this; - } - - @Override - public boolean wasNull() throws SQLException { - return wasNull; - } - - @Override - public String getString(int columnIndex) throws SQLException { - String value = getAccessor(columnIndex).getString(getSubclass()); - wasNull = value == null; - return value; - } - - @Override - public boolean getBoolean(int columnIndex) throws SQLException { - Boolean value = getAccessor(columnIndex).getBoolean(getSubclass()); - wasNull = value == null; - return value == null ? false : value; - } - - public byte getByte(int columnIndex) throws SQLException { - long v = getLong(columnIndex); - if (wasNull) { - return 0; - } - if (v < Byte.MIN_VALUE || v > Byte.MAX_VALUE) { - throw new SQLException( - "Column " + getMetaData().getColumnName(columnIndex) + " is out of range for a byte"); - } - return (byte) v; - } - - @Override - public short getShort(int columnIndex) throws SQLException { - long v = getLong(columnIndex); - if (wasNull) { - return 0; - } - if (v < Short.MIN_VALUE || v > Short.MAX_VALUE) { - throw new SQLException( - "Column " + getMetaData().getColumnName(columnIndex) + " is out of range for a short"); - } - return (short) v; - } - - @Override - public int getInt(int columnIndex) throws SQLException { - long v = getLong(columnIndex); - if (wasNull) { - return 0; - } - if (v < Integer.MIN_VALUE || v > Integer.MAX_VALUE) { - throw new SQLException( - "Column " + getMetaData().getColumnName(columnIndex) + " is out of range for an int"); - } - return (int) v; - } - - @Override - public long getLong(int columnIndex) throws SQLException { - val type = metadata.getColumn(columnIndex).getType(); - if (!HyperTypes.isIntegerLike(type)) { - throw new SQLException("Unsupported column type for integer-like types: " + type); - } - OptionalLong v = getAccessor(columnIndex).getAnyInteger(getSubclass()); - wasNull = !v.isPresent(); - return v.orElse(0L); - } - - private static final double LONG_MAX_DOUBLE = StrictMath.nextDown((double) Long.MAX_VALUE); - private static final double LONG_MIN_DOUBLE = StrictMath.nextUp((double) Long.MIN_VALUE); - - @Override - public float getFloat(int columnIndex) throws SQLException { - double v = getDouble(columnIndex); - if (wasNull) { - return 0; - } - if (v < -Float.MAX_VALUE || v > Float.MAX_VALUE) { - throw new SQLException( - "Column " + getMetaData().getColumnName(columnIndex) + " is out of range for a float"); - } - return (float) v; - } - - @Override - public double getDouble(int columnIndex) throws SQLException { - val type = metadata.getColumn(columnIndex).getType(); - if (!HyperTypes.isIntegerLike(type)) { - throw new SQLException("Unsupported column type for floating-point types: " + type); - } - OptionalLong v = getAccessor(columnIndex).getAnyInteger(getSubclass()); - wasNull = !v.isPresent(); - return v.orElse(0L); - } - - @Override - public BigDecimal getBigDecimal(int columnIndex) throws SQLException { - val type = metadata.getColumn(columnIndex).getType(); - if (!HyperTypes.isIntegerLike(type)) { - // TODO: apparently, PostgreSQL does not support float/decimal conversion. Double-check this with test - // cases. - throw new SQLException("Unsupported column type for numeric types: " + type); - } - OptionalLong v = getAccessor(columnIndex).getAnyInteger(getSubclass()); - wasNull = !v.isPresent(); - return v.isPresent() ? new BigDecimal(v.getAsLong()) : null; - } - - @Override - public BigDecimal getBigDecimal(int columnIndex, int scale) throws SQLException { - // TODO implement this - throw new UnsupportedOperationException("Unimplemented method 'getBigDecimal'"); - } - - @Override - public byte[] getBytes(int columnIndex) throws SQLException { - // TODO implement this - throw new UnsupportedOperationException("Unimplemented method 'getBytes'"); - } - - @Override - public Date getDate(int columnIndex) throws SQLException { - return getDate(columnIndex, null); - } - - @Override - public Date getDate(int columnIndex, Calendar cal) throws SQLException { - // TODO implement this - throw new UnsupportedOperationException("Unimplemented method 'getDate'"); - } - - @Override - public Time getTime(int columnIndex) throws SQLException { - return getTime(columnIndex, null); - } - - @Override - public Time getTime(int columnIndex, Calendar cal) throws SQLException { - // TODO Auto-generated method stub - throw new UnsupportedOperationException("Unimplemented method 'getTime'"); - } - - @Override - public Timestamp getTimestamp(int columnIndex) throws SQLException { - return getTimestamp(columnIndex, null); - } - - @Override - public Timestamp getTimestamp(int columnIndex, Calendar cal) throws SQLException { - // TODO Auto-generated method stub - throw new UnsupportedOperationException("Unimplemented method 'getTimestamp'"); - } - - @Override - public Array getArray(int columnIndex) throws SQLException { - // TODO implement this - throw new UnsupportedOperationException("Unimplemented method 'getArray'"); - } - - @Override - public Object getObject(int columnIndex) throws SQLException { - val type = metadata.getColumn(columnIndex).getType(); - if (type.getKind() == HyperTypeKind.INT32) { - val v = getInt(columnIndex); - return wasNull ? null : v; - } - if (HyperTypes.isStringLike(type)) { - return getString(columnIndex); - } - throw new SQLException("Unsupported column type in `getObject`: " + type); - } - - @Override - public T getObject(int columnIndex, Class type) throws SQLException { - val v = getObject(columnIndex); - if (v == null) { - return null; - } - if (type.isInstance(v)) { - return type.cast(v); - } - throw new SQLException("Unsupported column type in `getObject`: " - + metadata.getColumn(columnIndex).getType().toString()); - } - - @Override - public Object getObject(int columnIndex, Map> map) throws SQLException { - if (map == null || map.isEmpty()) { - return getObject(columnIndex); - } - throw new UnsupportedOperationException("Unimplemented method 'getObject'"); - } - - //////////////////////////////// - // Unsupported getters - - @Override - public Blob getBlob(int columnIndex) throws SQLException { - throw new SQLFeatureNotSupportedException("Retrieving Blobs is not supported"); - } - - @Override - public Clob getClob(int columnIndex) throws SQLException { - throw new SQLFeatureNotSupportedException("Retrieving Clobs is not supported"); - } - - @Override - public Ref getRef(int columnIndex) throws SQLException { - throw new SQLFeatureNotSupportedException("Retrieving Ref objects is not supported"); - } - - @Override - public URL getURL(int columnIndex) throws SQLException { - throw new SQLFeatureNotSupportedException("Retrieving URLs is not supported"); - } - - @Override - public RowId getRowId(int columnIndex) throws SQLException { - throw new SQLFeatureNotSupportedException("Retrieving row IDs is not supported"); - } - - @Override - public SQLXML getSQLXML(int columnIndex) throws SQLException { - throw new SQLFeatureNotSupportedException("Retrieving SQLXML is not supported"); - } - - @Override - public String getNString(int columnIndex) throws SQLException { - throw new SQLFeatureNotSupportedException("Retrieving NStrings is not supported"); - } - - @Override - public Reader getNCharacterStream(int columnIndex) throws SQLException { - throw new SQLFeatureNotSupportedException("Retrieving NCharacterStreams is not supported"); - } - - @Override - public InputStream getAsciiStream(int columnIndex) throws SQLException { - throw new SQLFeatureNotSupportedException("Retrieving AsciiStreams is not supported"); - } - - @Override - public InputStream getUnicodeStream(int columnIndex) throws SQLException { - throw new SQLFeatureNotSupportedException("Retrieving UnicodeStreams is not supported"); - } - - @Override - public InputStream getBinaryStream(int columnIndex) throws SQLException { - throw new SQLFeatureNotSupportedException("Retrieving BinaryStreams is not supported"); - } - - @Override - public Reader getCharacterStream(int columnIndex) throws SQLException { - throw new SQLFeatureNotSupportedException("Retrieving CharacterStreams is not supported"); - } -} diff --git a/jdbc-core/src/main/java/com/salesforce/datacloud/jdbc/protocol/QueryResultArrowStream.java b/jdbc-core/src/main/java/com/salesforce/datacloud/jdbc/protocol/QueryResultArrowStream.java index c2a1609a..bc578583 100644 --- a/jdbc-core/src/main/java/com/salesforce/datacloud/jdbc/protocol/QueryResultArrowStream.java +++ b/jdbc-core/src/main/java/com/salesforce/datacloud/jdbc/protocol/QueryResultArrowStream.java @@ -7,6 +7,7 @@ import com.google.common.base.Predicates; import com.google.common.collect.FluentIterable; import com.salesforce.datacloud.jdbc.core.ByteStringReadableByteChannel; +import lombok.Value; import lombok.val; import org.apache.arrow.memory.RootAllocator; import org.apache.arrow.vector.ipc.ArrowStreamReader; @@ -19,9 +20,27 @@ public class QueryResultArrowStream { */ public static final OutputFormat OUTPUT_FORMAT = OutputFormat.ARROW_IPC; - private static final int ROOT_ALLOCATOR_MB_FROM_V2 = 100 * 1024 * 1024; + /** + * Per-result-set allocator budget. Hitting this threshold trips a clean + * {@link org.apache.arrow.memory.OutOfMemoryException} from the allocator instead of letting + * the JVM OOM. Reused for the metadata-side allocator in + * {@link com.salesforce.datacloud.jdbc.core.metadata.MetadataResultSets}. + */ + public static final int ROOT_ALLOCATOR_BUDGET_BYTES = 100 * 1024 * 1024; + + /** + * Pair of the {@link ArrowStreamReader} that decodes gRPC chunks and the {@link RootAllocator} + * that backs it. Callers hand ownership to {@link + * com.salesforce.datacloud.jdbc.core.DataCloudResultSet#of} which closes both; the pair is + * never closed directly. + */ + @Value + public static class Result { + ArrowStreamReader reader; + RootAllocator allocator; + } - public static ArrowStreamReader toArrowStreamReader(CloseableIterator iterator) { + public static Result toArrowStreamReader(CloseableIterator iterator) { val byteStringIterator = FluentIterable.from(() -> iterator) .transform( input -> input.hasBinaryPart() ? input.getBinaryPart().getData() : null) @@ -47,6 +66,19 @@ public void close() throws Exception { } }; val channel = new ByteStringReadableByteChannel(closeable); - return new ArrowStreamReader(channel, new RootAllocator(ROOT_ALLOCATOR_MB_FROM_V2)); + RootAllocator allocator = new RootAllocator(ROOT_ALLOCATOR_BUDGET_BYTES); + try { + return new Result(new ArrowStreamReader(channel, allocator), allocator); + } catch (Throwable t) { + // ArrowStreamReader's constructor is benign today, but a future Arrow upgrade could + // add constructor-side validation. Close the allocator on the way out so the budget + // is reclaimed. + try { + allocator.close(); + } catch (Throwable s) { + t.addSuppressed(s); + } + throw t; + } } } diff --git a/jdbc-core/src/main/java/com/salesforce/datacloud/jdbc/protocol/data/ArrowToHyperTypeMapper.java b/jdbc-core/src/main/java/com/salesforce/datacloud/jdbc/protocol/data/ArrowToHyperTypeMapper.java index 1c4e02ac..53eeafed 100644 --- a/jdbc-core/src/main/java/com/salesforce/datacloud/jdbc/protocol/data/ArrowToHyperTypeMapper.java +++ b/jdbc-core/src/main/java/com/salesforce/datacloud/jdbc/protocol/data/ArrowToHyperTypeMapper.java @@ -30,6 +30,19 @@ public static HyperType toHyperType(Field field) { return field.getType().accept(new ArrowTypeVisitor(field)); } + /** + * Translate an Arrow {@link Field} to a full {@link ColumnMetadata}, picking up the optional + * JDBC type-name override stamped under + * {@link HyperTypeToArrow#JDBC_TYPE_NAME_METADATA_KEY} when present. + */ + public static ColumnMetadata toColumnMetadata(Field field) { + HyperType type = toHyperType(field); + String override = field.getMetadata() == null + ? null + : field.getMetadata().get(HyperTypeToArrow.JDBC_TYPE_NAME_METADATA_KEY); + return new ColumnMetadata(field.getName(), type, override); + } + /** Arrow visitor that produces a {@link HyperType} for each supported Arrow type. */ private static class ArrowTypeVisitor implements ArrowType.ArrowTypeVisitor { private final Field field; diff --git a/jdbc-core/src/main/java/com/salesforce/datacloud/jdbc/protocol/data/HyperTypeToArrow.java b/jdbc-core/src/main/java/com/salesforce/datacloud/jdbc/protocol/data/HyperTypeToArrow.java index 55e5e15c..ee8d8f02 100644 --- a/jdbc-core/src/main/java/com/salesforce/datacloud/jdbc/protocol/data/HyperTypeToArrow.java +++ b/jdbc-core/src/main/java/com/salesforce/datacloud/jdbc/protocol/data/HyperTypeToArrow.java @@ -30,13 +30,35 @@ private HyperTypeToArrow() { /** Build an Arrow {@link Field} with the given name and the mapped {@link FieldType}. */ public static Field toField(String name, HyperType type) { + return toField(name, type, null); + } + + /** + * Build an Arrow {@link Field} and stamp an optional JDBC type-name override into the field + * metadata under {@link #JDBC_TYPE_NAME_METADATA_KEY}. The override is how {@link + * ColumnMetadata#getTypeName()} round-trips through Arrow — it lets JDBC-spec labels (e.g. + * {@code "TEXT"} for metadata columns) survive serialisation, which is the only way to + * carry them through an {@link org.apache.arrow.vector.ipc.ArrowStreamReader}-backed code + * path. + */ + public static Field toField(String name, HyperType type, String jdbcTypeName) { + FieldType fieldType = toFieldType(type, jdbcTypeName); if (type.getKind() == HyperTypeKind.ARRAY) { Field childField = toField("$element", type.getElement()); - return new Field(name, toFieldType(type), Collections.singletonList(childField)); + return new Field(name, fieldType, Collections.singletonList(childField)); } - return new Field(name, toFieldType(type), null); + return new Field(name, fieldType, null); } + /** + * Arrow field-metadata key under which the JDBC-spec {@link ColumnMetadata#getTypeName() + * typeName} override is round-tripped. The {@code datacloud-jdbc:} prefix namespaces the + * key so it cannot collide with anything Hyper, query-federator, or another Arrow producer + * might stamp on its own field metadata; the unprefixed {@code jdbc:} namespace is not + * reserved by the Arrow spec. + */ + public static final String JDBC_TYPE_NAME_METADATA_KEY = "datacloud-jdbc:type_name"; + /** * Map a {@link HyperType} to an Arrow {@link FieldType}. * @@ -47,12 +69,26 @@ public static Field toField(String name, HyperType type) { * without loss. */ public static FieldType toFieldType(HyperType type) { + return toFieldType(type, null); + } + + /** + * Overload that stamps an optional JDBC type-name override into the field metadata under + * {@link #JDBC_TYPE_NAME_METADATA_KEY} so {@link ColumnMetadata#getTypeName()} round-trips + * through Arrow without needing a parallel metadata channel. + */ + public static FieldType toFieldType(HyperType type, String jdbcTypeName) { ArrowType arrowType = toArrowType(type); Map metadata = metadataFor(type); - if (type.isNullable()) { - return new FieldType(true, arrowType, null, metadata); + if (jdbcTypeName != null) { + if (metadata == null) { + metadata = new HashMap<>(); + } else { + metadata = new HashMap<>(metadata); + } + metadata.put(JDBC_TYPE_NAME_METADATA_KEY, jdbcTypeName); } - return new FieldType(false, arrowType, null, metadata); + return new FieldType(type.isNullable(), arrowType, null, metadata); } /** Hyper-compatible field metadata for types whose length is not carried in the ArrowType. */ diff --git a/jdbc-core/src/main/java/com/salesforce/datacloud/jdbc/protocol/data/VectorPopulator.java b/jdbc-core/src/main/java/com/salesforce/datacloud/jdbc/protocol/data/VectorPopulator.java index a298e8b4..85d68535 100644 --- a/jdbc-core/src/main/java/com/salesforce/datacloud/jdbc/protocol/data/VectorPopulator.java +++ b/jdbc-core/src/main/java/com/salesforce/datacloud/jdbc/protocol/data/VectorPopulator.java @@ -36,7 +36,15 @@ import org.apache.arrow.vector.VarCharVector; import org.apache.arrow.vector.VectorSchemaRoot; -/** Populates vectors in a VectorSchemaRoot with values from a list of parameters. */ +/** + * Populates vectors in a {@link VectorSchemaRoot} with Java values, dispatching per column by + * {@link HyperTypeKind}. + * + *

The primitive {@link #setCell(ValueVector, HyperTypeKind, int, Object, Calendar)} is the + * single place the driver converts a Java {@link Object} into the right Arrow setter call. Both + * the JDBC parameter-encoding path (single row) and the JDBC metadata path (many rows) go + * through it. + */ public final class VectorPopulator { private VectorPopulator() { @@ -44,14 +52,10 @@ private VectorPopulator() { } /** - * Populates the vectors in the given VectorSchemaRoot using the {@link HyperType} of each - * parameter to decide which typed setter to dispatch to. - * - * @param root the VectorSchemaRoot to populate + * Populate a single-row VSR from a list of {@link ParameterBinding}s. Used by the parameter + * encoding path; the VSR's schema is built from the bindings' {@link HyperType}s. */ public static void populateVectors(VectorSchemaRoot root, List parameters, Calendar calendar) { - VectorValueSetterFactory factory = new VectorValueSetterFactory(calendar); - for (int i = 0; i < parameters.size(); i++) { ParameterBinding binding = parameters.get(i); if (binding == null) { @@ -62,32 +66,59 @@ public static void populateVectors(VectorSchemaRoot root, List HyperTypeKind kind = binding.getType().getKind(); ValueVector vector = root.getVector(root.getSchema().getFields().get(i).getName()); - Object value = binding.getValue(); - - @SuppressWarnings(value = "unchecked") - VectorValueSetter setter = (VectorValueSetter) factory.getSetter(kind); + setCell(vector, kind, 0, binding.getValue(), calendar); + } + root.setRowCount(1); + } - if (setter != null) { - setter.setValue(vector, value); - } else { - throw new UnsupportedOperationException("Unsupported HyperTypeKind for parameter binding: " + kind); + /** + * Populate {@code root} from a row-major list of Java values. {@code columns} supplies the + * per-column {@link HyperType} used to dispatch the setter; row {@code r}, column {@code c} + * is taken from {@code rows.get(r).get(c)} (a missing/short row yields a null cell). + * + *

Row count is set to {@code rows.size()}. Used by the metadata path. + */ + public static void populateVectors( + VectorSchemaRoot root, List columns, List> rows, Calendar calendar) { + int rowCount = rows == null ? 0 : rows.size(); + for (int c = 0; c < columns.size(); c++) { + ValueVector vector = root.getVector(columns.get(c).getName()); + HyperTypeKind kind = columns.get(c).getType().getKind(); + for (int r = 0; r < rowCount; r++) { + List row = rows.get(r); + Object value = row == null || c >= row.size() ? null : row.get(c); + setCell(vector, kind, r, value, calendar); } + vector.setValueCount(rowCount); } - root.setRowCount(1); // Set row count to 1 since we have exactly one row + root.setRowCount(rowCount); + } + + /** Sets cell ({@code vector}, {@code index}) to {@code value}, or null if value is null. */ + static void setCell(ValueVector vector, HyperTypeKind kind, int index, Object value, Calendar calendar) { + @SuppressWarnings("unchecked") + VectorValueSetter setter = + (VectorValueSetter) VectorValueSetterFactory.getSetter(kind, calendar); + if (setter == null) { + throw new UnsupportedOperationException("Unsupported HyperTypeKind for vector population: " + kind); + } + setter.setValue(vector, index, value); } } @FunctionalInterface interface VectorValueSetter { - void setValue(T vector, Object value); + void setValue(T vector, int index, Object value); } -/** Factory for creating appropriate setter instances based on {@link HyperTypeKind}. */ -class VectorValueSetterFactory { - private final Map> setterMap; +/** Factory for indexed setters keyed by {@link HyperTypeKind}. */ +final class VectorValueSetterFactory { + private VectorValueSetterFactory() {} + + private static final Map> SETTERS_NO_CAL = build(null); - VectorValueSetterFactory(Calendar calendar) { - setterMap = ImmutableMap.ofEntries( + private static Map> build(Calendar calendar) { + return ImmutableMap.ofEntries( Maps.immutableEntry(HyperTypeKind.VARCHAR, new VarCharVectorSetter()), Maps.immutableEntry(HyperTypeKind.CHAR, new VarCharVectorSetter()), Maps.immutableEntry(HyperTypeKind.FLOAT4, new Float4VectorSetter()), @@ -104,8 +135,13 @@ class VectorValueSetterFactory { Maps.immutableEntry(HyperTypeKind.INT8, new TinyIntVectorSetter())); } - VectorValueSetter getSetter(HyperTypeKind kind) { - return setterMap.get(kind); + static VectorValueSetter getSetter(HyperTypeKind kind, Calendar calendar) { + if (calendar == null) { + return SETTERS_NO_CAL.get(kind); + } + // Only TIME uses the calendar; build a per-call map rather than caching so tests that + // pass different Calendars cannot race. This path is cold (parameter-binding only). + return build(calendar).get(kind); } } @@ -118,20 +154,20 @@ abstract class BaseVectorSetter implements VectorValue } @Override - public void setValue(T vector, Object value) { + public void setValue(T vector, int index, Object value) { if (value == null) { - setNullValue(vector); + setNullValue(vector, index); } else if (valueType.isInstance(value)) { - setValueInternal(vector, valueType.cast(value)); + setValueInternal(vector, index, valueType.cast(value)); } else { throw new IllegalArgumentException( "Value for " + vector.getClass().getSimpleName() + " must be of type " + valueType.getSimpleName()); } } - protected abstract void setNullValue(T vector); + protected abstract void setNullValue(T vector, int index); - protected abstract void setValueInternal(T vector, V value); + protected abstract void setValueInternal(T vector, int index, V value); } /** Setter implementation for VarCharVector. */ @@ -141,13 +177,13 @@ class VarCharVectorSetter extends BaseVectorSetter { } @Override - protected void setValueInternal(VarCharVector vector, String value) { - vector.setSafe(0, value.getBytes(StandardCharsets.UTF_8)); + protected void setValueInternal(VarCharVector vector, int index, String value) { + vector.setSafe(index, value.getBytes(StandardCharsets.UTF_8)); } @Override - protected void setNullValue(VarCharVector vector) { - vector.setNull(0); + protected void setNullValue(VarCharVector vector, int index) { + vector.setNull(index); } } @@ -158,13 +194,13 @@ class Float4VectorSetter extends BaseVectorSetter { } @Override - protected void setValueInternal(Float4Vector vector, Float value) { - vector.setSafe(0, value); + protected void setValueInternal(Float4Vector vector, int index, Float value) { + vector.setSafe(index, value); } @Override - protected void setNullValue(Float4Vector vector) { - vector.setNull(0); + protected void setNullValue(Float4Vector vector, int index) { + vector.setNull(index); } } @@ -175,64 +211,81 @@ class Float8VectorSetter extends BaseVectorSetter { } @Override - protected void setValueInternal(Float8Vector vector, Double value) { - vector.setSafe(0, value); + protected void setValueInternal(Float8Vector vector, int index, Double value) { + vector.setSafe(index, value); } @Override - protected void setNullValue(Float8Vector vector) { - vector.setNull(0); + protected void setNullValue(Float8Vector vector, int index) { + vector.setNull(index); } } -/** Setter implementation for IntVector. */ -class IntVectorSetter extends BaseVectorSetter { +/** + * Setter implementation for IntVector. Accepts any Number so metadata rows can pass long/short + * values, but range-checks before narrowing to int — silent truncation of an out-of-range Long + * (e.g. binding {@code Long.MAX_VALUE} to an INT32 parameter) is never the right answer. + */ +class IntVectorSetter extends BaseVectorSetter { IntVectorSetter() { - super(Integer.class); + super(Number.class); } @Override - protected void setValueInternal(IntVector vector, Integer value) { - vector.setSafe(0, value); + protected void setValueInternal(IntVector vector, int index, Number value) { + long lv = value.longValue(); + if (lv < Integer.MIN_VALUE || lv > Integer.MAX_VALUE) { + throw new IllegalArgumentException("Value " + lv + " is out of range for INT32"); + } + vector.setSafe(index, value.intValue()); } @Override - protected void setNullValue(IntVector vector) { - vector.setNull(0); + protected void setNullValue(IntVector vector, int index) { + vector.setNull(index); } } -/** Setter implementation for SmallIntVector. */ -class SmallIntVectorSetter extends BaseVectorSetter { +/** Setter implementation for SmallIntVector. Range-checks before narrowing to short. */ +class SmallIntVectorSetter extends BaseVectorSetter { SmallIntVectorSetter() { - super(Short.class); + super(Number.class); } @Override - protected void setValueInternal(SmallIntVector vector, Short value) { - vector.setSafe(0, value); + protected void setValueInternal(SmallIntVector vector, int index, Number value) { + long lv = value.longValue(); + if (lv < Short.MIN_VALUE || lv > Short.MAX_VALUE) { + throw new IllegalArgumentException("Value " + lv + " is out of range for INT16"); + } + vector.setSafe(index, value.shortValue()); } @Override - protected void setNullValue(SmallIntVector vector) { - vector.setNull(0); + protected void setNullValue(SmallIntVector vector, int index) { + vector.setNull(index); } } -/** Setter implementation for BigIntVector. */ -class BigIntVectorSetter extends BaseVectorSetter { +/** + * Setter implementation for BigIntVector. Accepts any Number; the natural range of long is the + * widest integer type the vector encodes, so no range narrowing happens here. Non-integral + * Numbers (e.g. Double) are normalized via Number.longValue, mirroring the rest of the integer + * setters in this file. + */ +class BigIntVectorSetter extends BaseVectorSetter { BigIntVectorSetter() { - super(Long.class); + super(Number.class); } @Override - protected void setValueInternal(BigIntVector vector, Long value) { - vector.setSafe(0, value); + protected void setValueInternal(BigIntVector vector, int index, Number value) { + vector.setSafe(index, value.longValue()); } @Override - protected void setNullValue(BigIntVector vector) { - vector.setNull(0); + protected void setNullValue(BigIntVector vector, int index) { + vector.setNull(index); } } @@ -243,13 +296,13 @@ class BitVectorSetter extends BaseVectorSetter { } @Override - protected void setValueInternal(BitVector vector, Boolean value) { - vector.setSafe(0, Boolean.TRUE.equals(value) ? 1 : 0); + protected void setValueInternal(BitVector vector, int index, Boolean value) { + vector.setSafe(index, Boolean.TRUE.equals(value) ? 1 : 0); } @Override - protected void setNullValue(BitVector vector) { - vector.setNull(0); + protected void setNullValue(BitVector vector, int index) { + vector.setNull(index); } } @@ -260,13 +313,13 @@ class DecimalVectorSetter extends BaseVectorSetter { } @Override - protected void setValueInternal(DecimalVector vector, BigDecimal value) { - vector.setSafe(0, value.unscaledValue().longValue()); + protected void setValueInternal(DecimalVector vector, int index, BigDecimal value) { + vector.setSafe(index, value.unscaledValue().longValue()); } @Override - protected void setNullValue(DecimalVector vector) { - vector.setNull(0); + protected void setNullValue(DecimalVector vector, int index) { + vector.setNull(index); } } @@ -277,14 +330,14 @@ class DateDayVectorSetter extends BaseVectorSetter { } @Override - protected void setValueInternal(DateDayVector vector, Date value) { + protected void setValueInternal(DateDayVector vector, int index, Date value) { long daysSinceEpoch = value.toLocalDate().toEpochDay(); - vector.setSafe(0, (int) daysSinceEpoch); + vector.setSafe(index, (int) daysSinceEpoch); } @Override - protected void setNullValue(DateDayVector vector) { - vector.setNull(0); + protected void setNullValue(DateDayVector vector, int index) { + vector.setNull(index); } } @@ -298,18 +351,18 @@ class TimeMicroVectorSetter extends BaseVectorSetter { } @Override - protected void setValueInternal(TimeMicroVector vector, Time value) { + protected void setValueInternal(TimeMicroVector vector, int index, Time value) { LocalDateTime localDateTime = new Timestamp(value.getTime()).toLocalDateTime(); localDateTime = adjustForCalendar(localDateTime, calendar, TimeZone.getTimeZone("UTC")); long midnightMillis = localDateTime.toLocalTime().toNanoOfDay() / 1_000_000; long microsecondsSinceMidnight = millisToMicrosecondsSinceMidnight(midnightMillis); - vector.setSafe(0, microsecondsSinceMidnight); + vector.setSafe(index, microsecondsSinceMidnight); } @Override - protected void setNullValue(TimeMicroVector vector) { - vector.setNull(0); + protected void setNullValue(TimeMicroVector vector, int index) { + vector.setNull(index); } } @@ -329,15 +382,15 @@ class TimeStampMicroVectorSetter extends BaseVectorSetter { +/** Setter implementation for TinyIntVector. Range-checks before narrowing to byte. */ +class TinyIntVectorSetter extends BaseVectorSetter { TinyIntVectorSetter() { - super(Byte.class); + super(Number.class); } @Override - protected void setValueInternal(TinyIntVector vector, Byte value) { - vector.setSafe(0, value); + protected void setValueInternal(TinyIntVector vector, int index, Number value) { + long lv = value.longValue(); + if (lv < Byte.MIN_VALUE || lv > Byte.MAX_VALUE) { + throw new IllegalArgumentException("Value " + lv + " is out of range for INT8"); + } + vector.setSafe(index, value.byteValue()); } @Override - protected void setNullValue(TinyIntVector vector) { - vector.setNull(0); + protected void setNullValue(TinyIntVector vector, int index) { + vector.setNull(index); } } diff --git a/jdbc-core/src/main/java/com/salesforce/datacloud/jdbc/util/Constants.java b/jdbc-core/src/main/java/com/salesforce/datacloud/jdbc/util/Constants.java index d08fcf47..c6c69ac6 100644 --- a/jdbc-core/src/main/java/com/salesforce/datacloud/jdbc/util/Constants.java +++ b/jdbc-core/src/main/java/com/salesforce/datacloud/jdbc/util/Constants.java @@ -9,6 +9,7 @@ public final class Constants { public static final String INTEGER = "INTEGER"; public static final String TEXT = "TEXT"; public static final String SHORT = "SHORT"; + public static final String BOOL = "BOOL"; // Date Time constants public static final String ISO_TIME_FORMAT = "HH:mm:ss"; diff --git a/jdbc-core/src/test/java/com/salesforce/datacloud/jdbc/core/ArrowStreamReaderCursorTest.java b/jdbc-core/src/test/java/com/salesforce/datacloud/jdbc/core/ArrowStreamReaderCursorTest.java index 287db7e0..4a2f94d7 100644 --- a/jdbc-core/src/test/java/com/salesforce/datacloud/jdbc/core/ArrowStreamReaderCursorTest.java +++ b/jdbc-core/src/test/java/com/salesforce/datacloud/jdbc/core/ArrowStreamReaderCursorTest.java @@ -5,17 +5,21 @@ package com.salesforce.datacloud.jdbc.core; import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.assertThatThrownBy; +import static org.mockito.Mockito.doThrow; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; +import java.io.IOException; import java.time.ZoneId; import java.util.Collections; import java.util.stream.IntStream; import lombok.SneakyThrows; import lombok.val; +import org.apache.arrow.memory.BufferAllocator; import org.apache.arrow.memory.RootAllocator; import org.apache.arrow.vector.VectorSchemaRoot; import org.apache.arrow.vector.ipc.ArrowStreamReader; @@ -37,12 +41,39 @@ class ArrowStreamReaderCursorTest { @Mock protected VectorSchemaRoot root; + @Mock + protected BufferAllocator allocator; + @Test @SneakyThrows - void closesTheReader() { - val sut = new ArrowStreamReaderCursor(reader, ZoneId.systemDefault()); + void closesReaderAndAllocator() { + val sut = new ArrowStreamReaderCursor(reader, allocator, ZoneId.systemDefault()); sut.close(); verify(reader, times(1)).close(); + verify(allocator, times(1)).close(); + } + + /** + * When both reader.close() and allocator.close() throw, the cursor must close the allocator + * even after the reader's close raised, and surface the reader's exception as primary with + * the allocator's exception attached as suppressed. The reader exception is the + * diagnostically interesting one (the leak detector firing on allocator.close is usually a + * symptom of the reader's failure to release buffers); plain try/finally would silently + * replace it with the allocator exception. + */ + @Test + @SneakyThrows + void closeAttachesAllocatorErrorAsSuppressedWhenReaderCloseAlsoThrows() { + val readerError = new IOException("reader close failed"); + val allocatorError = new IllegalStateException("allocator leak detected"); + doThrow(readerError).when(reader).close(); + doThrow(allocatorError).when(allocator).close(); + + val sut = new ArrowStreamReaderCursor(reader, allocator, ZoneId.systemDefault()); + + assertThatThrownBy(sut::close).isSameAs(readerError).hasSuppressedException(allocatorError); + verify(reader, times(1)).close(); + verify(allocator, times(1)).close(); } @Test @@ -56,9 +87,13 @@ void incrementsInternalIndexUntilRowsExhaustedThenLoadsNextBatch() { when(reader.loadNextBatch()).thenReturn(false); when(root.getRowCount()).thenReturn(times); - val sut = new ArrowStreamReaderCursor(reader, ZoneId.systemDefault()); + val sut = new ArrowStreamReaderCursor(reader, allocator, ZoneId.systemDefault()); IntStream.range(0, times + 1).forEach(i -> sut.next()); + // Each next() inspects rowCount once on the per-batch index check. loadNextNonEmptyBatch + // is reached on the (times+1)-th call but only inspects rowCount inside its loop body if + // loadNextBatch returns true; here it returns false, so getRowCount is observed times+1 + // times in total. verify(root, times(times + 1)).getRowCount(); verify(reader, times(1)).loadNextBatch(); } @@ -69,7 +104,7 @@ void firstNextReturnsTrueWhenInitialBatchHasRows() { when(root.getRowCount()).thenReturn(1); when(reader.getVectorSchemaRoot()).thenReturn(root); - val sut = new ArrowStreamReaderCursor(reader, ZoneId.systemDefault()); + val sut = new ArrowStreamReaderCursor(reader, allocator, ZoneId.systemDefault()); assertThat(sut.next()).isTrue(); } @@ -81,7 +116,7 @@ void firstNextReturnsFalseWhenStreamHasNoBatches() { when(reader.getVectorSchemaRoot()).thenReturn(root); when(reader.loadNextBatch()).thenReturn(false); - val sut = new ArrowStreamReaderCursor(reader, ZoneId.systemDefault()); + val sut = new ArrowStreamReaderCursor(reader, allocator, ZoneId.systemDefault()); assertThat(sut.next()).isFalse(); } @@ -118,7 +153,7 @@ void skipsZeroRowBatchAndYieldsSubsequentNonEmptyRows() { try (RootAllocator readAlloc = new RootAllocator(Long.MAX_VALUE); ArrowStreamReader streamReader = new ArrowStreamReader(new ByteArrayInputStream(ipc), readAlloc)) { - val sut = new ArrowStreamReaderCursor(streamReader, ZoneId.systemDefault()); + val sut = new ArrowStreamReaderCursor(streamReader, readAlloc, ZoneId.systemDefault()); assertThat(sut.next()) .as("skips zero-row batch, advances to row in second batch") @@ -157,7 +192,7 @@ void zeroRowOnlyBatchYieldsNoRows() { try (RootAllocator readAlloc = new RootAllocator(Long.MAX_VALUE); ArrowStreamReader streamReader = new ArrowStreamReader(new ByteArrayInputStream(ipc), readAlloc)) { - val sut = new ArrowStreamReaderCursor(streamReader, ZoneId.systemDefault()); + val sut = new ArrowStreamReaderCursor(streamReader, readAlloc, ZoneId.systemDefault()); assertThat(sut.next()).isFalse(); } } diff --git a/jdbc-core/src/test/java/com/salesforce/datacloud/jdbc/core/AsyncStreamingResultSetTest.java b/jdbc-core/src/test/java/com/salesforce/datacloud/jdbc/core/AsyncDataCloudResultSetTest.java similarity index 97% rename from jdbc-core/src/test/java/com/salesforce/datacloud/jdbc/core/AsyncStreamingResultSetTest.java rename to jdbc-core/src/test/java/com/salesforce/datacloud/jdbc/core/AsyncDataCloudResultSetTest.java index b0e43bc5..56a52133 100644 --- a/jdbc-core/src/test/java/com/salesforce/datacloud/jdbc/core/AsyncStreamingResultSetTest.java +++ b/jdbc-core/src/test/java/com/salesforce/datacloud/jdbc/core/AsyncDataCloudResultSetTest.java @@ -22,7 +22,7 @@ import org.junit.jupiter.api.extension.ExtendWith; @ExtendWith(LocalHyperTestBase.class) -public class AsyncStreamingResultSetTest { +public class AsyncDataCloudResultSetTest { private static final int size = 64; private static final String sql = String.format( @@ -55,7 +55,7 @@ public void testNoDataIsLostAsync() { val rs = statement.getResultSet(); assertThat(status.allResultsProduced()).isTrue(); - assertThat(rs).isInstanceOf(StreamingResultSet.class); + assertThat(rs).isInstanceOf(DataCloudResultSet.class); val expected = new AtomicInteger(0); diff --git a/jdbc-core/src/test/java/com/salesforce/datacloud/jdbc/core/DataCloudDatabaseMetadataTest.java b/jdbc-core/src/test/java/com/salesforce/datacloud/jdbc/core/DataCloudDatabaseMetadataTest.java index 15f2d02b..8231b63c 100644 --- a/jdbc-core/src/test/java/com/salesforce/datacloud/jdbc/core/DataCloudDatabaseMetadataTest.java +++ b/jdbc-core/src/test/java/com/salesforce/datacloud/jdbc/core/DataCloudDatabaseMetadataTest.java @@ -1082,9 +1082,14 @@ public void testTestTest() throws SQLException { ResultSet columnResultSet = QueryMetadataUtil.createColumnResultSet( StringUtils.EMPTY, StringUtils.EMPTY, StringUtils.EMPTY, connection); while (columnResultSet.next()) { + // The metadata result set is Arrow-backed; TYPE_NAME carries "TEXT" (preserved from + // the JDBC-spec MetadataSchemas override), while TYPE_NAME's *value* is the HyperType's + // JDBC name ("VARCHAR" for varchar columns). assertThat(columnResultSet.getString("TYPE_NAME")).isEqualTo("VARCHAR"); assertThat(columnResultSet.getInt("DATA_TYPE")).isEqualTo(12); - assertThat(columnResultSet.getBoolean("NULLABLE")).isFalse(); + // NULLABLE is an INTEGER column. Arrow-backed getInt reports the nullability enum; + // 0 (columnNoNulls) for NOT NULL rows, which coerces to false via long→boolean. + assertThat(columnResultSet.getInt("NULLABLE")).isEqualTo(0); assertThat(columnResultSet.getInt("ORDINAL_POSITION")).isEqualTo(ordinalValue); assertThat(columnResultSet.getByte("ORDINAL_POSITION")).isEqualTo(ordinalValue.byteValue()); } @@ -1114,6 +1119,7 @@ public void testMetadataColumnAccessors() throws SQLException { ResultSet columnResultSet = QueryMetadataUtil.createColumnResultSet( StringUtils.EMPTY, StringUtils.EMPTY, StringUtils.EMPTY, connection); while (columnResultSet.next()) { + // Integer accessor widens to all numeric getters. assertThat(columnResultSet.getDouble("DATA_TYPE")).isEqualTo(12); assertThat(columnResultSet.getShort("DATA_TYPE")).isEqualTo(new Short("12")); assertThat(columnResultSet.getFloat("DATA_TYPE")).isEqualTo(12); @@ -1125,19 +1131,25 @@ public void testMetadataColumnAccessors() throws SQLException { assertThat(columnResultSet.getObject("DATA_TYPE", new HashMap<>())).isEqualTo(12); assertThat(columnResultSet.getObject("TYPE_NAME", String.class)).isEqualTo("VARCHAR"); + // Requesting an Integer column as Boolean is not supported by the Arrow int accessor. assertThrows(SQLException.class, () -> columnResultSet.getObject("ORDINAL_POSITION", Boolean.class)); + // Numeric accessors on a VARCHAR column are not supported by the Arrow varchar + // accessor — they throw SQLFeatureNotSupportedException (a SQLException). assertThrows(SQLException.class, () -> columnResultSet.getBigDecimal("TYPE_NAME")); assertThrows(SQLException.class, () -> columnResultSet.getDouble("TYPE_NAME")); assertThrows(SQLException.class, () -> columnResultSet.getLong("TYPE_NAME")); assertThrows(SQLException.class, () -> columnResultSet.getInt("TYPE_NAME")); - assertThrows(SQLException.class, () -> columnResultSet.getByte("ORDINAL_POSITION")); + // getByte on an int column is supported by BaseIntVectorAccessor, so this should NOT + // throw — remove the expectation. + assertThat(columnResultSet.getByte("ORDINAL_POSITION")).isEqualTo(ordinalValue.byteValue()); - assertThrows(UnsupportedOperationException.class, () -> columnResultSet.getDate("ORDINAL_POSITION")); - assertThrows(UnsupportedOperationException.class, () -> columnResultSet.getTimestamp("ORDINAL_POSITION")); - assertThrows(UnsupportedOperationException.class, () -> columnResultSet.getTime("ORDINAL_POSITION")); - assertThrows(UnsupportedOperationException.class, () -> columnResultSet.getDate("ORDINAL_POSITION", null)); - assertThrows(UnsupportedOperationException.class, () -> columnResultSet.getTimestamp("ORDINAL_POSITION")); - assertThrows(UnsupportedOperationException.class, () -> columnResultSet.getTime("ORDINAL_POSITION", null)); + // Date/Time getters on an integer column are not supported by the Arrow int accessor. + assertThrows(SQLException.class, () -> columnResultSet.getDate("ORDINAL_POSITION")); + assertThrows(SQLException.class, () -> columnResultSet.getTimestamp("ORDINAL_POSITION")); + assertThrows(SQLException.class, () -> columnResultSet.getTime("ORDINAL_POSITION")); + assertThrows(SQLException.class, () -> columnResultSet.getDate("ORDINAL_POSITION", null)); + assertThrows(SQLException.class, () -> columnResultSet.getTimestamp("ORDINAL_POSITION")); + assertThrows(SQLException.class, () -> columnResultSet.getTime("ORDINAL_POSITION", null)); assertThrows(SQLFeatureNotSupportedException.class, () -> columnResultSet.getBlob("ORDINAL_POSITION")); assertThrows(SQLFeatureNotSupportedException.class, () -> columnResultSet.getClob("ORDINAL_POSITION")); @@ -1239,6 +1251,12 @@ public void testGetTypeInfo() throws SQLException { int dataType = resultSet.getInt("DATA_TYPE"); assertThat(dataType).isNotEqualTo(0); assertThat(resultSet.getShort("NULLABLE")).isEqualTo((short) java.sql.DatabaseMetaData.typeNullable); + // The four boolean columns are now declared as BIT/BOOLEAN per JDBC spec, so + // getBoolean returns the actual flag instead of throwing on the old VARCHAR path. + resultSet.getBoolean("CASE_SENSITIVE"); + resultSet.getBoolean("UNSIGNED_ATTRIBUTE"); + resultSet.getBoolean("FIXED_PREC_SCALE"); + resultSet.getBoolean("AUTO_INCREMENT"); } assertThat(rowCount) .as("getTypeInfo should return one row per HyperTypeKind") diff --git a/jdbc-core/src/test/java/com/salesforce/datacloud/jdbc/core/DataCloudMetadataResultSetTest.java b/jdbc-core/src/test/java/com/salesforce/datacloud/jdbc/core/DataCloudMetadataResultSetTest.java deleted file mode 100644 index 7b171175..00000000 --- a/jdbc-core/src/test/java/com/salesforce/datacloud/jdbc/core/DataCloudMetadataResultSetTest.java +++ /dev/null @@ -1,102 +0,0 @@ -/** - * This file is part of https://github.com/forcedotcom/datacloud-jdbc which is released under the - * Apache 2.0 license. See https://github.com/forcedotcom/datacloud-jdbc/blob/main/LICENSE.txt - */ -package com.salesforce.datacloud.jdbc.core; - -import static org.assertj.core.api.Assertions.assertThat; -import static org.junit.jupiter.api.Assertions.*; - -import com.salesforce.datacloud.jdbc.core.metadata.DataCloudResultSetMetaData; -import com.salesforce.datacloud.jdbc.core.resultset.SimpleResultSet; -import java.sql.ResultSet; -import java.sql.ResultSetMetaData; -import java.sql.SQLException; -import java.sql.SQLFeatureNotSupportedException; -import lombok.SneakyThrows; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; - -class DataCloudMetadataResultSetTest { - DataCloudMetadataResultSet dataCloudMetadataResultSet; - - @BeforeEach - public void init() throws SQLException { - dataCloudMetadataResultSet = - DataCloudMetadataResultSet.of(new DataCloudResultSetMetaData(MetadataSchemas.COLUMNS), null); - } - - @Test - void getRow() throws SQLException { - assertThat(dataCloudMetadataResultSet.getRow()).isEqualTo(0); - - dataCloudMetadataResultSet.close(); - assertThrows(SQLException.class, () -> dataCloudMetadataResultSet.next()); - } - - @Test - void next() throws SQLException { - dataCloudMetadataResultSet.close(); - assertThrows(SQLException.class, () -> dataCloudMetadataResultSet.next()); - } - - @Test - void isClosed() throws SQLException { - assertFalse(dataCloudMetadataResultSet.isClosed()); - dataCloudMetadataResultSet.close(); - assertTrue(dataCloudMetadataResultSet.isClosed()); - } - - @Test - void getStatement() throws SQLException { - assertThat(dataCloudMetadataResultSet.getStatement()).isNull(); - } - - @Test - void unwrap() throws SQLException { - assertThat(dataCloudMetadataResultSet.unwrap(ResultSetMetaData.class)).isNull(); - } - - @Test - void isWrapperFor() throws SQLException { - assertThat(dataCloudMetadataResultSet.isWrapperFor(SimpleResultSet.class)) - .isFalse(); - } - - @Test - void getHoldability() throws SQLException { - assertThat(dataCloudMetadataResultSet.getHoldability()).isEqualTo(ResultSet.HOLD_CURSORS_OVER_COMMIT); - } - - @Test - void getFetchSize() throws SQLException { - assertThat(dataCloudMetadataResultSet.getFetchSize()).isEqualTo(0); - } - - @Test - void setFetchSize() { - assertThrows(SQLFeatureNotSupportedException.class, () -> dataCloudMetadataResultSet.setFetchSize(0)); - } - - @SneakyThrows - @Test - void getWarnings() { - assertThat((Iterable) dataCloudMetadataResultSet.getWarnings()) - .isNull(); - } - - @Test - void getConcurrency() throws SQLException { - assertThat(dataCloudMetadataResultSet.getConcurrency()).isEqualTo(ResultSet.CONCUR_READ_ONLY); - } - - @Test - void getType() throws SQLException { - assertThat(dataCloudMetadataResultSet.getType()).isEqualTo(ResultSet.TYPE_FORWARD_ONLY); - } - - @Test - void getFetchDirection() throws SQLException { - assertThat(dataCloudMetadataResultSet.getFetchDirection()).isEqualTo(ResultSet.FETCH_FORWARD); - } -} diff --git a/jdbc-core/src/test/java/com/salesforce/datacloud/jdbc/core/StreamingResultSetMethodTest.java b/jdbc-core/src/test/java/com/salesforce/datacloud/jdbc/core/DataCloudResultSetMethodTest.java similarity index 73% rename from jdbc-core/src/test/java/com/salesforce/datacloud/jdbc/core/StreamingResultSetMethodTest.java rename to jdbc-core/src/test/java/com/salesforce/datacloud/jdbc/core/DataCloudResultSetMethodTest.java index 3d43cf69..89bf729b 100644 --- a/jdbc-core/src/test/java/com/salesforce/datacloud/jdbc/core/StreamingResultSetMethodTest.java +++ b/jdbc-core/src/test/java/com/salesforce/datacloud/jdbc/core/DataCloudResultSetMethodTest.java @@ -6,7 +6,11 @@ import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.assertThatThrownBy; +import static org.mockito.Mockito.atLeastOnce; +import static org.mockito.Mockito.spy; +import static org.mockito.Mockito.verify; +import com.salesforce.datacloud.jdbc.protocol.QueryResultArrowStream; import com.salesforce.datacloud.jdbc.util.RootAllocatorTestExtension; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; @@ -14,14 +18,21 @@ import java.sql.ResultSet; import java.sql.SQLException; import java.sql.SQLFeatureNotSupportedException; +import java.time.ZoneId; import java.util.Arrays; +import java.util.Collections; import java.util.stream.Stream; import lombok.SneakyThrows; import lombok.val; +import org.apache.arrow.memory.RootAllocator; import org.apache.arrow.vector.VarCharVector; import org.apache.arrow.vector.VectorSchemaRoot; import org.apache.arrow.vector.ipc.ArrowStreamReader; import org.apache.arrow.vector.ipc.ArrowStreamWriter; +import org.apache.arrow.vector.types.pojo.ArrowType; +import org.apache.arrow.vector.types.pojo.Field; +import org.apache.arrow.vector.types.pojo.FieldType; +import org.apache.arrow.vector.types.pojo.Schema; import org.junit.jupiter.api.Named; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.RegisterExtension; @@ -29,7 +40,7 @@ import org.junit.jupiter.params.provider.Arguments; import org.junit.jupiter.params.provider.MethodSource; -class StreamingResultSetMethodTest { +class DataCloudResultSetMethodTest { @RegisterExtension static RootAllocatorTestExtension ext = new RootAllocatorTestExtension(); @@ -37,19 +48,22 @@ class StreamingResultSetMethodTest { private static final String QUERY_ID = "test-query-id"; @SneakyThrows - private StreamingResultSet createResultSet() { + private DataCloudResultSet createResultSet() { return createSingleVarCharResultSet(false); } @SneakyThrows - private StreamingResultSet createResultSetWithNullValue() { + private DataCloudResultSet createResultSetWithNullValue() { return createSingleVarCharResultSet(true); } @SneakyThrows - private StreamingResultSet createSingleVarCharResultSet(boolean nullValue) { - val allocator = ext.getRootAllocator(); - val vector = new VarCharVector("col1", allocator); + private DataCloudResultSet createSingleVarCharResultSet(boolean nullValue) { + // Build a single-row VARCHAR batch, serialise to IPC bytes, and wrap in an + // ArrowStreamReader. Using a fresh RootAllocator so the result set owns its own + // allocator lifecycle (independent of the shared test extension allocator). + val writeAllocator = ext.getRootAllocator(); + val vector = new VarCharVector("col1", writeAllocator); vector.allocateNew(); if (nullValue) { vector.setNull(0); @@ -58,24 +72,27 @@ private StreamingResultSet createSingleVarCharResultSet(boolean nullValue) { } vector.setValueCount(1); - val root = new VectorSchemaRoot(Arrays.asList(vector.getField()), Arrays.asList(vector)); - root.setRowCount(1); - val out = new ByteArrayOutputStream(); - try (val writer = new ArrowStreamWriter(root, null, out)) { - writer.writeBatch(); + try (VectorSchemaRoot root = new VectorSchemaRoot(Arrays.asList(vector.getField()), Arrays.asList(vector))) { + root.setRowCount(1); + try (ArrowStreamWriter writer = new ArrowStreamWriter(root, null, out)) { + writer.start(); + writer.writeBatch(); + writer.end(); + } } - root.close(); - val reader = new ArrowStreamReader(new ByteArrayInputStream(out.toByteArray()), allocator); - return StreamingResultSet.of(reader, QUERY_ID); + RootAllocator readerAllocator = new RootAllocator(Long.MAX_VALUE); + ArrowStreamReader reader = new ArrowStreamReader(new ByteArrayInputStream(out.toByteArray()), readerAllocator); + return DataCloudResultSet.of( + new QueryResultArrowStream.Result(reader, readerAllocator), QUERY_ID, ZoneId.systemDefault()); } // --- Unsupported methods --- @FunctionalInterface interface ResultSetMethod { - void invoke(StreamingResultSet rs) throws SQLException; + void invoke(DataCloudResultSet rs) throws SQLException; } static Stream unsupportedMethods() { @@ -93,7 +110,7 @@ static Stream unsupportedMethods() { Arguments.of(Named.of("getSQLXML", (ResultSetMethod) rs -> rs.getSQLXML(1))), Arguments.of(Named.of("getNString", (ResultSetMethod) rs -> rs.getNString(1))), Arguments.of(Named.of("getNCharacterStream", (ResultSetMethod) rs -> rs.getNCharacterStream(1))), - Arguments.of(Named.of("getCursorName", (ResultSetMethod) StreamingResultSet::getCursorName))); + Arguments.of(Named.of("getCursorName", (ResultSetMethod) DataCloudResultSet::getCursorName))); } @ParameterizedTest @@ -129,6 +146,37 @@ void getAccessorThrowsOnTooLargeIndex() throws Exception { // --- Lifecycle and navigation --- + @Test + @SneakyThrows + void ofClosingOnFailureClosesAllocatorWhenSchemaIsUnsupported() { + // Build an Arrow IPC stream containing one column of LargeUtf8, which + // ArrowToHyperTypeMapper does not model — DataCloudResultSet.of will throw SQLException. + // Without the leak fix, the RootAllocator passed in would never be closed. + val unsupportedField = new Field("col", new FieldType(true, new ArrowType.LargeUtf8(), null), null); + val schema = new Schema(Collections.singletonList(unsupportedField)); + val out = new ByteArrayOutputStream(); + try (RootAllocator writeAllocator = new RootAllocator(Long.MAX_VALUE); + VectorSchemaRoot root = VectorSchemaRoot.create(schema, writeAllocator)) { + root.setRowCount(0); + try (ArrowStreamWriter writer = new ArrowStreamWriter(root, null, out)) { + writer.start(); + writer.end(); + } + } + + val readerAllocator = spy(new RootAllocator(Long.MAX_VALUE)); + val reader = spy(new ArrowStreamReader(new ByteArrayInputStream(out.toByteArray()), readerAllocator)); + val arrowStream = new QueryResultArrowStream.Result(reader, readerAllocator); + + assertThatThrownBy(() -> DataCloudResultSet.of(arrowStream, QUERY_ID, ZoneId.systemDefault())) + .isInstanceOf(SQLException.class) + .hasMessageContaining("Unsupported column type"); + + // The leak fix must close both the reader and the allocator before re-throwing. + verify(reader, atLeastOnce()).close(); + verify(readerAllocator, atLeastOnce()).close(); + } + @Test void closeAndIsClosed() throws Exception { val rs = createResultSet(); @@ -216,6 +264,18 @@ void getObjectWithSupertypeOrInterfaceReturnsValue() throws Exception { } } + @Test + void getObjectWithNullTypeMapBehavesLikeGetObject() throws Exception { + // JDBC: getObject(int, Map) with a null/empty type map should behave like getObject(int). + try (val rs = createResultSet()) { + rs.next(); + val plain = rs.getObject(1); + assertThat(rs.getObject(1, (java.util.Map>) null)).isEqualTo(plain); + assertThat(rs.getObject(1, java.util.Collections.>emptyMap())) + .isEqualTo(plain); + } + } + @Test void queryId() throws Exception { try (val rs = createResultSet()) { @@ -334,11 +394,11 @@ void getWarningsReturnsNull() throws Exception { @Test void unwrapAndIsWrapperFor() throws Exception { try (val rs = createResultSet()) { - assertThat(rs.isWrapperFor(StreamingResultSet.class)).isTrue(); + assertThat(rs.isWrapperFor(DataCloudResultSet.class)).isTrue(); assertThat(rs.isWrapperFor(DataCloudResultSet.class)).isTrue(); assertThat(rs.isWrapperFor(String.class)).isFalse(); - assertThat(rs.unwrap(StreamingResultSet.class)).isSameAs(rs); + assertThat(rs.unwrap(DataCloudResultSet.class)).isSameAs(rs); assertThatThrownBy(() -> rs.unwrap(String.class)) .isInstanceOf(SQLException.class) .hasMessageContaining("Cannot unwrap"); diff --git a/jdbc-core/src/test/java/com/salesforce/datacloud/jdbc/core/StreamingResultSetTest.java b/jdbc-core/src/test/java/com/salesforce/datacloud/jdbc/core/DataCloudResultSetTest.java similarity index 84% rename from jdbc-core/src/test/java/com/salesforce/datacloud/jdbc/core/StreamingResultSetTest.java rename to jdbc-core/src/test/java/com/salesforce/datacloud/jdbc/core/DataCloudResultSetTest.java index 6a8b1afb..a3c6518e 100644 --- a/jdbc-core/src/test/java/com/salesforce/datacloud/jdbc/core/StreamingResultSetTest.java +++ b/jdbc-core/src/test/java/com/salesforce/datacloud/jdbc/core/DataCloudResultSetTest.java @@ -23,7 +23,7 @@ @Slf4j @ExtendWith(LocalHyperTestBase.class) -public class StreamingResultSetTest { +public class DataCloudResultSetTest { public static String query(String arg) { return String.format( "select cast(a as numeric(38,18)) a, cast(a as numeric(38,18)) b, cast(a as numeric(38,18)) c from generate_series(1, %s) as s(a) order by a asc", @@ -95,7 +95,7 @@ private void withPrepared(String sql, ThrowingBiConsumer TYPE_INFO_NAMES = Arrays.asList( + "TYPE_NAME", + "DATA_TYPE", + "PRECISION", + "LITERAL_PREFIX", + "LITERAL_SUFFIX", + "CREATE_PARAMS", + "NULLABLE", + "CASE_SENSITIVE", + "SEARCHABLE", + "UNSIGNED_ATTRIBUTE", + "FIXED_PREC_SCALE", + "AUTO_INCREMENT", + "LOCAL_TYPE_NAME", + "MINIMUM_SCALE", + "MAXIMUM_SCALE", + "SQL_DATA_TYPE", + "SQL_DATETIME_SUB", + "NUM_PREC_RADIX"); + + private static final List TYPE_INFO_TYPES = Arrays.asList( + "TEXT", "INTEGER", "INTEGER", "TEXT", "TEXT", "TEXT", "SHORT", "BOOL", "SHORT", "BOOL", "BOOL", "BOOL", + "TEXT", "SHORT", "SHORT", "INTEGER", "INTEGER", "INTEGER"); + + private static final List TYPE_INFO_TYPE_IDS = Arrays.asList( + Types.VARCHAR, + Types.INTEGER, + Types.INTEGER, + Types.VARCHAR, + Types.VARCHAR, + Types.VARCHAR, + Types.SMALLINT, + Types.BOOLEAN, + Types.SMALLINT, + Types.BOOLEAN, + Types.BOOLEAN, + Types.BOOLEAN, + Types.VARCHAR, + Types.SMALLINT, + Types.SMALLINT, + Types.INTEGER, + Types.INTEGER, + Types.INTEGER); + @Test void columnsSchemaHasExpectedNames() { List names = @@ -100,4 +144,33 @@ void columnsSchemaHasExpectedJdbcTypeIds() { assertThat(typeIds).hasSize(24); assertThat(typeIds.get(0)).isEqualTo(Types.VARCHAR); } + + @Test + void typeInfoSchemaHasExpectedNames() { + List names = + MetadataSchemas.TYPE_INFO.stream().map(ColumnMetadata::getName).collect(Collectors.toList()); + assertThat(names).isEqualTo(TYPE_INFO_NAMES); + assertThat(names).hasSize(18); + assertThat(names.get(0)).isEqualTo("TYPE_NAME"); + } + + @Test + void typeInfoSchemaHasExpectedTypeNames() { + List typeNames = MetadataSchemas.TYPE_INFO.stream() + .map(ColumnMetadata::getTypeName) + .collect(Collectors.toList()); + assertThat(typeNames).isEqualTo(TYPE_INFO_TYPES); + assertThat(typeNames).hasSize(18); + assertThat(typeNames.get(7)).isEqualTo("BOOL"); + } + + @Test + void typeInfoSchemaHasExpectedJdbcTypeIds() { + List typeIds = MetadataSchemas.TYPE_INFO.stream() + .map(c -> HyperTypes.toJdbcTypeCode(c.getType())) + .collect(Collectors.toList()); + assertThat(typeIds).isEqualTo(TYPE_INFO_TYPE_IDS); + assertThat(typeIds).hasSize(18); + assertThat(typeIds.get(7)).isEqualTo(Types.BOOLEAN); + } } diff --git a/jdbc-core/src/test/java/com/salesforce/datacloud/jdbc/core/StreamCloseTest.java b/jdbc-core/src/test/java/com/salesforce/datacloud/jdbc/core/StreamCloseTest.java index 4d626456..57a1f4aa 100644 --- a/jdbc-core/src/test/java/com/salesforce/datacloud/jdbc/core/StreamCloseTest.java +++ b/jdbc-core/src/test/java/com/salesforce/datacloud/jdbc/core/StreamCloseTest.java @@ -38,16 +38,16 @@ public class StreamCloseTest { /** - * Verifies that closing a StreamingResultSet triggers close on the underlying gRPC iterator + * Verifies that closing a DataCloudResultSet triggers close on the underlying gRPC iterator * through the full Arrow close chain: - * StreamingResultSet.close() → ArrowStreamReaderCursor.close() → ArrowStreamReader.close() + * DataCloudResultSet.close() → ArrowStreamReaderCursor.close() → ArrowStreamReader.close() * → ArrowReader.closeReadSource() → MessageChannelReader → ReadChannel * → ByteStringReadableByteChannel.close() → SQLExceptionQueryResultIterator.close() * → QueryResultIterator.close() → AsyncStreamObserver.close() → gRPC stream cancel. * *

The test wraps a QueryResultIterator in a close-tracking decorator, passes it through the * standard driver path (SQLExceptionQueryResultIterator → QueryResultArrowStream → - * ByteStringReadableByteChannel → ArrowStreamReader → StreamingResultSet), then verifies that + * ByteStringReadableByteChannel → ArrowStreamReader → DataCloudResultSet), then verifies that * closing the ResultSet propagates all the way down to the iterator.

*/ @Test @@ -81,7 +81,7 @@ void closingResultSetClosesUnderlyingIterator() { // ByteStringReadableByteChannel(iterator, resource) → ArrowStreamReader val arrowStream = SQLExceptionQueryResultIterator.createSqlExceptionArrowStreamReader( tracked, false, "test-query", null); - val resultSet = StreamingResultSet.of(arrowStream, "test-query"); + val resultSet = DataCloudResultSet.of(arrowStream, "test-query", java.time.ZoneId.systemDefault()); // Read one row — stream is still open with remaining rows assertThat(resultSet.next()).isTrue(); diff --git a/jdbc-core/src/test/java/com/salesforce/datacloud/jdbc/core/metadata/DataCloudResultSetMetaDataTest.java b/jdbc-core/src/test/java/com/salesforce/datacloud/jdbc/core/metadata/DataCloudResultSetMetaDataTest.java index d8e1f1ba..f83ba615 100644 --- a/jdbc-core/src/test/java/com/salesforce/datacloud/jdbc/core/metadata/DataCloudResultSetMetaDataTest.java +++ b/jdbc-core/src/test/java/com/salesforce/datacloud/jdbc/core/metadata/DataCloudResultSetMetaDataTest.java @@ -6,7 +6,6 @@ import static org.assertj.core.api.Assertions.assertThat; -import com.salesforce.datacloud.jdbc.core.DataCloudMetadataResultSet; import com.salesforce.datacloud.jdbc.core.MetadataSchemas; import com.salesforce.datacloud.jdbc.protocol.data.ColumnMetadata; import com.salesforce.datacloud.jdbc.protocol.data.HyperType; @@ -24,9 +23,7 @@ class DataCloudResultSetMetaDataTest { @BeforeEach public void init() throws SQLException { - DataCloudMetadataResultSet dataCloudMetadataResultSet = - DataCloudMetadataResultSet.of(new DataCloudResultSetMetaData(COLUMNS_SCHEMA), null); - resultSetMetaData = dataCloudMetadataResultSet.getMetaData(); + resultSetMetaData = new DataCloudResultSetMetaData(COLUMNS_SCHEMA); } @Test diff --git a/jdbc-core/src/test/java/com/salesforce/datacloud/jdbc/core/metadata/MetadataResultSetsTest.java b/jdbc-core/src/test/java/com/salesforce/datacloud/jdbc/core/metadata/MetadataResultSetsTest.java new file mode 100644 index 00000000..5bf407b1 --- /dev/null +++ b/jdbc-core/src/test/java/com/salesforce/datacloud/jdbc/core/metadata/MetadataResultSetsTest.java @@ -0,0 +1,78 @@ +/** + * This file is part of https://github.com/forcedotcom/datacloud-jdbc which is released under the + * Apache 2.0 license. See https://github.com/forcedotcom/datacloud-jdbc/blob/main/LICENSE.txt + */ +package com.salesforce.datacloud.jdbc.core.metadata; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.assertThatThrownBy; + +import com.salesforce.datacloud.jdbc.protocol.data.ColumnMetadata; +import com.salesforce.datacloud.jdbc.protocol.data.HyperType; +import java.util.Arrays; +import java.util.Collections; +import java.util.List; +import lombok.val; +import org.junit.jupiter.api.Test; + +/** + * Tests the {@link MetadataResultSets#of} arity contract: rows must match the schema column + * count; null rows are allowed as the all-nulls shape (matching the legacy {@code coerceRows} + * convention). Generic JDBC {@link java.sql.ResultSet} shape (closeable, forward-only, + * holdability, etc.) is exercised by {@code DataCloudResultSetMethodTest} since metadata + * result sets share the {@link com.salesforce.datacloud.jdbc.core.DataCloudResultSet} plumbing. + */ +class MetadataResultSetsTest { + + private static final List THREE_COLUMNS = Arrays.asList( + new ColumnMetadata("a", HyperType.varcharUnlimited(true)), + new ColumnMetadata("b", HyperType.int32(true)), + new ColumnMetadata("c", HyperType.bool(true))); + + @Test + void shortRowRejected() { + val rows = Collections.singletonList(Arrays.asList("only-one")); + assertThatThrownBy(() -> MetadataResultSets.of(THREE_COLUMNS, rows)) + .hasMessageContaining("3 columns") + .hasMessageContaining("1 elements"); + } + + @Test + void longRowRejected() { + val rows = Collections.singletonList(Arrays.asList("a", 1, true, "extra")); + assertThatThrownBy(() -> MetadataResultSets.of(THREE_COLUMNS, rows)) + .hasMessageContaining("3 columns") + .hasMessageContaining("4 elements"); + } + + @Test + void rightArityAccepted() throws Exception { + val rows = Collections.singletonList(Arrays.asList("a", 1, true)); + try (val rs = MetadataResultSets.of(THREE_COLUMNS, rows)) { + assertThat(rs.next()).isTrue(); + assertThat(rs.getString(1)).isEqualTo("a"); + assertThat(rs.getInt(2)).isEqualTo(1); + assertThat(rs.getBoolean(3)).isTrue(); + } + } + + @Test + void nullRowAcceptedAsAllNulls() throws Exception { + val rows = Collections.>singletonList(null); + try (val rs = MetadataResultSets.of(THREE_COLUMNS, rows)) { + assertThat(rs.next()).isTrue(); + assertThat(rs.getString(1)).isNull(); + rs.getInt(2); + assertThat(rs.wasNull()).isTrue(); + rs.getBoolean(3); + assertThat(rs.wasNull()).isTrue(); + } + } + + @Test + void emptyRowsAccepted() throws Exception { + try (val rs = MetadataResultSets.of(THREE_COLUMNS, Collections.emptyList())) { + assertThat(rs.next()).isFalse(); + } + } +} diff --git a/jdbc-core/src/test/java/com/salesforce/datacloud/jdbc/core/resultset/ColumnAccessorTest.java b/jdbc-core/src/test/java/com/salesforce/datacloud/jdbc/core/resultset/ColumnAccessorTest.java deleted file mode 100644 index d919154b..00000000 --- a/jdbc-core/src/test/java/com/salesforce/datacloud/jdbc/core/resultset/ColumnAccessorTest.java +++ /dev/null @@ -1,31 +0,0 @@ -/** - * This file is part of https://github.com/forcedotcom/datacloud-jdbc which is released under the - * Apache 2.0 license. See https://github.com/forcedotcom/datacloud-jdbc/blob/main/LICENSE.txt - */ -package com.salesforce.datacloud.jdbc.core.resultset; - -import static org.junit.jupiter.api.Assertions.*; - -import org.junit.jupiter.api.Test; -import org.mockito.Mockito; - -class ColumnAccessorTest { - - @Test - public void shouldThrowUnsupportedError() { - ColumnAccessor columnAccessor = Mockito.mock(ColumnAccessor.class, Mockito.CALLS_REAL_METHODS); - SimpleResultSet resultSet = Mockito.mock(SimpleResultSet.class, Mockito.CALLS_REAL_METHODS); - - // Test methods from SimpleResultSet that throw SQLFeatureNotSupportedException - assertThrows(UnsupportedOperationException.class, () -> columnAccessor.getBoolean(resultSet)); - assertThrows(UnsupportedOperationException.class, () -> columnAccessor.getAnyInteger(resultSet)); - assertThrows(UnsupportedOperationException.class, () -> columnAccessor.getBigDecimal(resultSet)); - assertThrows(UnsupportedOperationException.class, () -> columnAccessor.getAnyFloatingPoint(resultSet)); - assertThrows(UnsupportedOperationException.class, () -> columnAccessor.getString(resultSet)); - assertThrows(UnsupportedOperationException.class, () -> columnAccessor.getBytes(resultSet)); - assertThrows(UnsupportedOperationException.class, () -> columnAccessor.getDate(resultSet)); - assertThrows(UnsupportedOperationException.class, () -> columnAccessor.getTime(resultSet)); - assertThrows(UnsupportedOperationException.class, () -> columnAccessor.getTimestamp(resultSet)); - assertThrows(UnsupportedOperationException.class, () -> columnAccessor.getArray(resultSet)); - } -} diff --git a/jdbc-core/src/test/java/com/salesforce/datacloud/jdbc/core/resultset/SimpleResultSetTest.java b/jdbc-core/src/test/java/com/salesforce/datacloud/jdbc/core/resultset/SimpleResultSetTest.java deleted file mode 100644 index 02198bb0..00000000 --- a/jdbc-core/src/test/java/com/salesforce/datacloud/jdbc/core/resultset/SimpleResultSetTest.java +++ /dev/null @@ -1,462 +0,0 @@ -/** - * This file is part of https://github.com/forcedotcom/datacloud-jdbc which is released under the - * Apache 2.0 license. See https://github.com/forcedotcom/datacloud-jdbc/blob/main/LICENSE.txt - */ -package com.salesforce.datacloud.jdbc.core.resultset; - -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertFalse; -import static org.junit.jupiter.api.Assertions.assertNull; -import static org.junit.jupiter.api.Assertions.assertThrows; -import static org.junit.jupiter.api.Assertions.assertTrue; - -import com.salesforce.datacloud.jdbc.core.DataCloudMetadataResultSet; -import com.salesforce.datacloud.jdbc.core.MetadataSchemas; -import com.salesforce.datacloud.jdbc.core.metadata.DataCloudResultSetMetaData; -import com.salesforce.datacloud.jdbc.protocol.data.ColumnMetadata; -import com.salesforce.datacloud.jdbc.protocol.data.HyperType; -import java.io.InputStream; -import java.io.Reader; -import java.math.BigDecimal; -import java.sql.Blob; -import java.sql.Clob; -import java.sql.NClob; -import java.sql.Ref; -import java.sql.ResultSet; -import java.sql.RowId; -import java.sql.SQLException; -import java.sql.SQLFeatureNotSupportedException; -import java.sql.SQLXML; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Calendar; -import java.util.Collections; -import java.util.List; -import java.util.Map; -import org.junit.jupiter.api.Test; -import org.junit.jupiter.api.function.Executable; -import org.mockito.Mockito; - -class SimpleResultSetTest { - - @Test - public void shouldThrowUnsupportedError() { - SimpleResultSet resultSet = Mockito.mock(SimpleResultSet.class, Mockito.CALLS_REAL_METHODS); - - // Test methods from SimpleResultSet that throw SQLFeatureNotSupportedException - assertThrows(SQLFeatureNotSupportedException.class, resultSet::clearWarnings); - assertThrows(SQLFeatureNotSupportedException.class, resultSet::getCursorName); - assertThrows(SQLFeatureNotSupportedException.class, () -> resultSet.getBlob(1)); - assertThrows(SQLFeatureNotSupportedException.class, () -> resultSet.getClob(1)); - assertThrows(SQLFeatureNotSupportedException.class, () -> resultSet.getNClob(1)); - assertThrows(SQLFeatureNotSupportedException.class, () -> resultSet.getRef(1)); - assertThrows(SQLFeatureNotSupportedException.class, () -> resultSet.getURL(1)); - assertThrows(SQLFeatureNotSupportedException.class, () -> resultSet.getRowId(1)); - assertThrows(SQLFeatureNotSupportedException.class, () -> resultSet.getSQLXML(1)); - assertThrows(SQLFeatureNotSupportedException.class, () -> resultSet.getNString(1)); - assertThrows(SQLFeatureNotSupportedException.class, () -> resultSet.getNCharacterStream(1)); - assertThrows(SQLFeatureNotSupportedException.class, () -> resultSet.getAsciiStream(1)); - assertThrows(SQLFeatureNotSupportedException.class, () -> resultSet.getUnicodeStream(1)); - assertThrows(SQLFeatureNotSupportedException.class, () -> resultSet.getBinaryStream(1)); - assertThrows(SQLFeatureNotSupportedException.class, () -> resultSet.getCharacterStream(1)); - - // Test un-implemented methods from SimpleResultSet that throw UnsupportedOperationException - assertThrows(UnsupportedOperationException.class, () -> resultSet.getDate(1)); - assertThrows(UnsupportedOperationException.class, () -> resultSet.getTimestamp(1)); - assertThrows(UnsupportedOperationException.class, () -> resultSet.getTime(1)); - assertThrows(UnsupportedOperationException.class, () -> resultSet.getDate(1, null)); - assertThrows(UnsupportedOperationException.class, () -> resultSet.getTimestamp(1, null)); - assertThrows(UnsupportedOperationException.class, () -> resultSet.getTime(1, null)); - assertThrows(UnsupportedOperationException.class, () -> resultSet.getBigDecimal(1, 1)); - assertThrows(UnsupportedOperationException.class, () -> resultSet.getBytes(1)); - assertThrows(UnsupportedOperationException.class, () -> resultSet.getArray(1)); - - // Test methods from ForwardOnlyResultSet interface - assertThrows(SQLFeatureNotSupportedException.class, () -> resultSet.setFetchDirection(ResultSet.FETCH_REVERSE)); - assertThrows(SQLFeatureNotSupportedException.class, resultSet::isBeforeFirst); - assertThrows(SQLFeatureNotSupportedException.class, resultSet::isAfterLast); - assertThrows(SQLFeatureNotSupportedException.class, resultSet::isFirst); - assertThrows(SQLFeatureNotSupportedException.class, resultSet::isLast); - assertThrows(SQLFeatureNotSupportedException.class, resultSet::first); - assertThrows(SQLFeatureNotSupportedException.class, resultSet::last); - assertThrows(SQLFeatureNotSupportedException.class, () -> resultSet.absolute(1)); - assertThrows(SQLFeatureNotSupportedException.class, () -> resultSet.relative(1)); - assertThrows(SQLFeatureNotSupportedException.class, resultSet::previous); - assertThrows(SQLFeatureNotSupportedException.class, resultSet::beforeFirst); - assertThrows(SQLFeatureNotSupportedException.class, resultSet::afterLast); - - // Test methods from ReadOnlyResultSet interface - assertThrows(SQLFeatureNotSupportedException.class, resultSet::rowUpdated); - assertThrows(SQLFeatureNotSupportedException.class, resultSet::rowInserted); - assertThrows(SQLFeatureNotSupportedException.class, resultSet::rowDeleted); - assertThrows(SQLFeatureNotSupportedException.class, resultSet::insertRow); - assertThrows(SQLFeatureNotSupportedException.class, resultSet::updateRow); - assertThrows(SQLFeatureNotSupportedException.class, resultSet::deleteRow); - assertThrows(SQLFeatureNotSupportedException.class, resultSet::cancelRowUpdates); - assertThrows(SQLFeatureNotSupportedException.class, resultSet::refreshRow); - assertThrows(SQLFeatureNotSupportedException.class, resultSet::moveToInsertRow); - assertThrows(SQLFeatureNotSupportedException.class, resultSet::moveToCurrentRow); - assertThrows(SQLFeatureNotSupportedException.class, () -> resultSet.updateNull(1)); - assertThrows(SQLFeatureNotSupportedException.class, () -> resultSet.updateBoolean(1, true)); - assertThrows(SQLFeatureNotSupportedException.class, () -> resultSet.updateByte(1, (byte) 1)); - assertThrows(SQLFeatureNotSupportedException.class, () -> resultSet.updateShort(1, (short) 1)); - assertThrows(SQLFeatureNotSupportedException.class, () -> resultSet.updateInt(1, 1)); - assertThrows(SQLFeatureNotSupportedException.class, () -> resultSet.updateLong(1, 1L)); - assertThrows(SQLFeatureNotSupportedException.class, () -> resultSet.updateFloat(1, 1.0f)); - assertThrows(SQLFeatureNotSupportedException.class, () -> resultSet.updateDouble(1, 1.0)); - assertThrows(SQLFeatureNotSupportedException.class, () -> resultSet.updateBigDecimal(1, BigDecimal.ONE)); - assertThrows(SQLFeatureNotSupportedException.class, () -> resultSet.updateString(1, "test")); - assertThrows(SQLFeatureNotSupportedException.class, () -> resultSet.updateBytes(1, new byte[0])); - assertThrows(SQLFeatureNotSupportedException.class, () -> resultSet.updateDate(1, new java.sql.Date(0))); - assertThrows(SQLFeatureNotSupportedException.class, () -> resultSet.updateTime(1, new java.sql.Time(0))); - assertThrows( - SQLFeatureNotSupportedException.class, () -> resultSet.updateTimestamp(1, new java.sql.Timestamp(0))); - assertThrows( - SQLFeatureNotSupportedException.class, - () -> resultSet.updateAsciiStream(1, Mockito.mock(InputStream.class), 1)); - assertThrows( - SQLFeatureNotSupportedException.class, - () -> resultSet.updateBinaryStream(1, Mockito.mock(InputStream.class), 1)); - assertThrows( - SQLFeatureNotSupportedException.class, - () -> resultSet.updateCharacterStream(1, Mockito.mock(Reader.class), 1)); - assertThrows(SQLFeatureNotSupportedException.class, () -> resultSet.updateObject(1, new Object(), 1)); - assertThrows(SQLFeatureNotSupportedException.class, () -> resultSet.updateObject(1, new Object())); - assertThrows(SQLFeatureNotSupportedException.class, () -> resultSet.updateNull("col")); - assertThrows(SQLFeatureNotSupportedException.class, () -> resultSet.updateBoolean("col", true)); - assertThrows(SQLFeatureNotSupportedException.class, () -> resultSet.updateByte("col", (byte) 1)); - assertThrows(SQLFeatureNotSupportedException.class, () -> resultSet.updateShort("col", (short) 1)); - assertThrows(SQLFeatureNotSupportedException.class, () -> resultSet.updateInt("col", 1)); - assertThrows(SQLFeatureNotSupportedException.class, () -> resultSet.updateLong("col", 1L)); - assertThrows(SQLFeatureNotSupportedException.class, () -> resultSet.updateFloat("col", 1.0f)); - assertThrows(SQLFeatureNotSupportedException.class, () -> resultSet.updateDouble("col", 1.0)); - assertThrows(SQLFeatureNotSupportedException.class, () -> resultSet.updateBigDecimal("col", BigDecimal.ONE)); - assertThrows(SQLFeatureNotSupportedException.class, () -> resultSet.updateString("col", "test")); - assertThrows(SQLFeatureNotSupportedException.class, () -> resultSet.updateBytes("col", new byte[0])); - assertThrows(SQLFeatureNotSupportedException.class, () -> resultSet.updateDate("col", new java.sql.Date(0))); - assertThrows(SQLFeatureNotSupportedException.class, () -> resultSet.updateTime("col", new java.sql.Time(0))); - assertThrows( - SQLFeatureNotSupportedException.class, - () -> resultSet.updateTimestamp("col", new java.sql.Timestamp(0))); - assertThrows( - SQLFeatureNotSupportedException.class, - () -> resultSet.updateAsciiStream("col", Mockito.mock(InputStream.class), 1)); - assertThrows( - SQLFeatureNotSupportedException.class, - () -> resultSet.updateBinaryStream("col", Mockito.mock(InputStream.class), 1)); - assertThrows( - SQLFeatureNotSupportedException.class, - () -> resultSet.updateCharacterStream("col", Mockito.mock(Reader.class), 1)); - assertThrows(SQLFeatureNotSupportedException.class, () -> resultSet.updateObject("col", new Object(), 1)); - assertThrows(SQLFeatureNotSupportedException.class, () -> resultSet.updateObject("col", new Object())); - assertThrows(SQLFeatureNotSupportedException.class, () -> resultSet.updateRef(1, Mockito.mock(Ref.class))); - assertThrows(SQLFeatureNotSupportedException.class, () -> resultSet.updateRef("col", Mockito.mock(Ref.class))); - assertThrows(SQLFeatureNotSupportedException.class, () -> resultSet.updateBlob(1, Mockito.mock(Blob.class))); - assertThrows( - SQLFeatureNotSupportedException.class, () -> resultSet.updateBlob("col", Mockito.mock(Blob.class))); - assertThrows(SQLFeatureNotSupportedException.class, () -> resultSet.updateClob(1, Mockito.mock(Clob.class))); - assertThrows( - SQLFeatureNotSupportedException.class, () -> resultSet.updateClob("col", Mockito.mock(Clob.class))); - assertThrows( - SQLFeatureNotSupportedException.class, - () -> resultSet.updateArray(1, Mockito.mock(java.sql.Array.class))); - assertThrows( - SQLFeatureNotSupportedException.class, - () -> resultSet.updateArray("col", Mockito.mock(java.sql.Array.class))); - assertThrows(SQLFeatureNotSupportedException.class, () -> resultSet.updateRowId(1, Mockito.mock(RowId.class))); - assertThrows( - SQLFeatureNotSupportedException.class, () -> resultSet.updateRowId("col", Mockito.mock(RowId.class))); - assertThrows(SQLFeatureNotSupportedException.class, () -> resultSet.updateNString(1, "test")); - assertThrows(SQLFeatureNotSupportedException.class, () -> resultSet.updateNString("col", "test")); - assertThrows(SQLFeatureNotSupportedException.class, () -> resultSet.updateNClob(1, Mockito.mock(NClob.class))); - assertThrows( - SQLFeatureNotSupportedException.class, () -> resultSet.updateNClob("col", Mockito.mock(NClob.class))); - assertThrows( - SQLFeatureNotSupportedException.class, () -> resultSet.updateSQLXML(1, Mockito.mock(SQLXML.class))); - assertThrows( - SQLFeatureNotSupportedException.class, () -> resultSet.updateSQLXML("col", Mockito.mock(SQLXML.class))); - assertThrows( - SQLFeatureNotSupportedException.class, - () -> resultSet.updateNCharacterStream(1, Mockito.mock(Reader.class), 1L)); - assertThrows( - SQLFeatureNotSupportedException.class, - () -> resultSet.updateNCharacterStream("col", Mockito.mock(Reader.class), 1L)); - assertThrows( - SQLFeatureNotSupportedException.class, - () -> resultSet.updateAsciiStream(1, Mockito.mock(InputStream.class), 1L)); - assertThrows( - SQLFeatureNotSupportedException.class, - () -> resultSet.updateBinaryStream(1, Mockito.mock(InputStream.class), 1L)); - assertThrows( - SQLFeatureNotSupportedException.class, - () -> resultSet.updateCharacterStream(1, Mockito.mock(Reader.class), 1L)); - assertThrows( - SQLFeatureNotSupportedException.class, - () -> resultSet.updateAsciiStream("col", Mockito.mock(InputStream.class), 1L)); - assertThrows( - SQLFeatureNotSupportedException.class, - () -> resultSet.updateBinaryStream("col", Mockito.mock(InputStream.class), 1L)); - assertThrows( - SQLFeatureNotSupportedException.class, - () -> resultSet.updateCharacterStream("col", Mockito.mock(Reader.class), 1L)); - assertThrows( - SQLFeatureNotSupportedException.class, - () -> resultSet.updateBlob(1, Mockito.mock(InputStream.class), 1L)); - assertThrows( - SQLFeatureNotSupportedException.class, - () -> resultSet.updateBlob("col", Mockito.mock(InputStream.class), 1L)); - assertThrows( - SQLFeatureNotSupportedException.class, () -> resultSet.updateClob(1, Mockito.mock(Reader.class), 1L)); - assertThrows( - SQLFeatureNotSupportedException.class, - () -> resultSet.updateClob("col", Mockito.mock(Reader.class), 1L)); - assertThrows( - SQLFeatureNotSupportedException.class, () -> resultSet.updateNClob(1, Mockito.mock(Reader.class), 1L)); - assertThrows( - SQLFeatureNotSupportedException.class, - () -> resultSet.updateNClob("col", Mockito.mock(Reader.class), 1L)); - assertThrows( - SQLFeatureNotSupportedException.class, - () -> resultSet.updateNCharacterStream(1, Mockito.mock(Reader.class))); - assertThrows( - SQLFeatureNotSupportedException.class, - () -> resultSet.updateNCharacterStream("col", Mockito.mock(Reader.class))); - assertThrows( - SQLFeatureNotSupportedException.class, - () -> resultSet.updateAsciiStream(1, Mockito.mock(InputStream.class))); - assertThrows( - SQLFeatureNotSupportedException.class, - () -> resultSet.updateBinaryStream(1, Mockito.mock(InputStream.class))); - assertThrows( - SQLFeatureNotSupportedException.class, - () -> resultSet.updateCharacterStream(1, Mockito.mock(Reader.class))); - assertThrows( - SQLFeatureNotSupportedException.class, - () -> resultSet.updateAsciiStream("col", Mockito.mock(InputStream.class))); - assertThrows( - SQLFeatureNotSupportedException.class, - () -> resultSet.updateBinaryStream("col", Mockito.mock(InputStream.class))); - assertThrows( - SQLFeatureNotSupportedException.class, - () -> resultSet.updateCharacterStream("col", Mockito.mock(Reader.class))); - assertThrows( - SQLFeatureNotSupportedException.class, () -> resultSet.updateBlob(1, Mockito.mock(InputStream.class))); - assertThrows( - SQLFeatureNotSupportedException.class, - () -> resultSet.updateBlob("col", Mockito.mock(InputStream.class))); - assertThrows(SQLFeatureNotSupportedException.class, () -> resultSet.updateClob(1, Mockito.mock(Reader.class))); - assertThrows( - SQLFeatureNotSupportedException.class, () -> resultSet.updateClob("col", Mockito.mock(Reader.class))); - assertThrows(SQLFeatureNotSupportedException.class, () -> resultSet.updateNClob(1, Mockito.mock(Reader.class))); - assertThrows( - SQLFeatureNotSupportedException.class, () -> resultSet.updateNClob("col", Mockito.mock(Reader.class))); - } - - @Test - void wasNullReflectsNullAndNonNullColumnValues() throws SQLException { - List data = Arrays.asList(Collections.singletonList("TABLE"), Collections.singletonList(null)); - SimpleResultSet resultSet = - DataCloudMetadataResultSet.of(new DataCloudResultSetMetaData(MetadataSchemas.TABLE_TYPES), data); - - assertTrue(resultSet.next()); - assertEquals("TABLE", resultSet.getString(1)); - assertFalse(resultSet.wasNull()); - - assertTrue(resultSet.next()); - assertNull(resultSet.getString(1)); - assertTrue(resultSet.wasNull()); - } - - /** - * Covers branches in SimpleMetadataResultSet's private getValue (invoked via getString/getInt - * etc.): closed result set, no current row, row index out of bounds, row not a List, column - * index out of bounds for row. - */ - @Test - void simpleMetadataResultSetStatusValue() throws SQLException { - // 1. ResultSet is closed - SimpleResultSet rs = DataCloudMetadataResultSet.of( - new DataCloudResultSetMetaData(MetadataSchemas.TABLE_TYPES), - Arrays.asList(Collections.singletonList("x"))); - rs.close(); - SQLException ex = assertThrows(SQLException.class, () -> rs.getString(1)); - assertTrue(ex.getMessage().contains("ResultSet is closed"), "message: " + ex.getMessage()); - - // 2. No current row (get before next()) - SimpleResultSet rs2 = DataCloudMetadataResultSet.of( - new DataCloudResultSetMetaData(MetadataSchemas.TABLE_TYPES), - Arrays.asList(Collections.singletonList("x"))); - ex = assertThrows(SQLException.class, () -> rs2.getString(1)); - assertTrue(ex.getMessage().contains("No current row"), "message: " + ex.getMessage()); - - // 3. Row index out of bounds (past last row) - SimpleResultSet rs3 = DataCloudMetadataResultSet.of( - new DataCloudResultSetMetaData(MetadataSchemas.TABLE_TYPES), - Arrays.asList(Collections.singletonList("x"))); - assertTrue(rs3.next()); - assertFalse(rs3.next()); - ex = assertThrows(SQLException.class, () -> rs3.getString(1)); - assertTrue(ex.getMessage().contains("Row index out of bounds"), "message: " + ex.getMessage()); - - // 4. Data row is not a List - SimpleResultSet rs4 = DataCloudMetadataResultSet.of( - new DataCloudResultSetMetaData(MetadataSchemas.TABLE_TYPES), Arrays.asList(123)); - assertTrue(rs4.next()); - ex = assertThrows(SQLException.class, () -> rs4.getString(1)); - assertTrue(ex.getMessage().contains("Data row is not a List"), "message: " + ex.getMessage()); - - // 5. Column index out of bounds for row (row has fewer columns than requested) - SimpleResultSet rs5 = DataCloudMetadataResultSet.of( - new DataCloudResultSetMetaData(MetadataSchemas.COLUMNS), - Arrays.asList(Collections.singletonList("only one"))); - assertTrue(rs5.next()); - ex = assertThrows(SQLException.class, () -> rs5.getString(5)); - assertTrue(ex.getMessage().contains("out of bounds for row"), "message: " + ex.getMessage()); - } - - /** - * Covers the out-of-range branches in SimpleResultSet for getByte, getShort, and getInt: when - * the long value is outside the target type's range, the getter throws SQLException. (getFloat - * uses getDouble and would need a double column to trigger out-of-range; current metadata has - * only integer/varchar columns.) - */ - @Test - void getNumericTypesThrowWhenValueOutOfRange() throws SQLException { - // GET_COLUMNS: column 5 (1-based) is DATA_TYPE (INTEGER) - int col = 5; - - SimpleResultSet resultSet = DataCloudMetadataResultSet.of( - new DataCloudResultSetMetaData(MetadataSchemas.COLUMNS), - Arrays.asList( - rowWithLongAt(col, (long) Byte.MAX_VALUE + 1), - rowWithLongAt(col, (long) Byte.MIN_VALUE - 1), - rowWithLongAt(col, (long) Short.MAX_VALUE + 1), - rowWithLongAt(col, (long) Short.MIN_VALUE - 1), - rowWithLongAt(col, (long) Integer.MAX_VALUE + 1), - rowWithLongAt(col, (long) Integer.MIN_VALUE - 1))); - - // getByte: above and below byte range - assertTrue(resultSet.next()); - assertThrowsOutOfRange("byte", () -> resultSet.getByte(col)); - assertTrue(resultSet.next()); - assertThrowsOutOfRange("byte", () -> resultSet.getByte(col)); - - // getShort: above and below short range - assertTrue(resultSet.next()); - assertThrowsOutOfRange("short", () -> resultSet.getShort(col)); - assertTrue(resultSet.next()); - assertThrowsOutOfRange("short", () -> resultSet.getShort(col)); - - // getInt: above and below int range - assertTrue(resultSet.next()); - assertThrowsOutOfRange("int", () -> resultSet.getInt(col)); - assertTrue(resultSet.next()); - assertThrowsOutOfRange("int", () -> resultSet.getInt(col)); - - assertFalse(resultSet.next()); - } - - private static List rowWithLongAt(int oneBasedColumnIndex, long value) { - List row = new ArrayList<>(Collections.nCopies(24, null)); - row.set(oneBasedColumnIndex - 1, value); - return row; - } - - private static void assertThrowsOutOfRange(String typeName, Executable executable) { - SQLException ex = assertThrows(SQLException.class, executable); - String suffix = "byte".equals(typeName) || "short".equals(typeName) ? "a " + typeName : "an " + typeName; - assertTrue(ex.getMessage().contains("out of range for " + suffix), "message: " + ex.getMessage()); - } - - /** - * Exercises the label-based getters from {@link ResultSetWithPositionalGetters} so that the - * interface default methods (which delegate via findColumn) are covered. Uses a real - * {@link DataCloudMetadataResultSet} (not a mock) so JaCoCo attributes execution to the - * interface and Codecov reports coverage for ResultSetWithPositionalGetters. - */ - @Test - void resultSetWithPositionalGettersDelegateAndThrow() throws SQLException { - ResultSetWithPositionalGetters resultSet = - DataCloudMetadataResultSet.of(new DataCloudResultSetMetaData(MetadataSchemas.TABLE_TYPES), null); - String col = "TABLE_TYPE"; // single column from GET_TABLE_TYPES - - // UnsupportedOperationException from SimpleResultSet positional implementations - assertThrows(UnsupportedOperationException.class, () -> resultSet.getBytes(col)); - assertThrows(UnsupportedOperationException.class, () -> resultSet.getDate(col)); - assertThrows(UnsupportedOperationException.class, () -> resultSet.getDate(col, (Calendar) null)); - assertThrows(UnsupportedOperationException.class, () -> resultSet.getTime(col)); - assertThrows(UnsupportedOperationException.class, () -> resultSet.getTime(col, (Calendar) null)); - assertThrows(UnsupportedOperationException.class, () -> resultSet.getTimestamp(col)); - assertThrows(UnsupportedOperationException.class, () -> resultSet.getTimestamp(col, (Calendar) null)); - assertThrows(UnsupportedOperationException.class, () -> resultSet.getBigDecimal(col, 1)); - assertThrows(UnsupportedOperationException.class, () -> resultSet.getArray(col)); - - // SQLFeatureNotSupportedException from SimpleResultSet positional implementations - assertThrows(SQLFeatureNotSupportedException.class, () -> resultSet.getAsciiStream(col)); - assertThrows(SQLFeatureNotSupportedException.class, () -> resultSet.getUnicodeStream(col)); - assertThrows(SQLFeatureNotSupportedException.class, () -> resultSet.getBinaryStream(col)); - assertThrows(SQLFeatureNotSupportedException.class, () -> resultSet.getCharacterStream(col)); - assertThrows(SQLFeatureNotSupportedException.class, () -> resultSet.getRef(col)); - assertThrows(SQLFeatureNotSupportedException.class, () -> resultSet.getBlob(col)); - assertThrows(SQLFeatureNotSupportedException.class, () -> resultSet.getClob(col)); - assertThrows(SQLFeatureNotSupportedException.class, () -> resultSet.getURL(col)); - assertThrows(SQLFeatureNotSupportedException.class, () -> resultSet.getRowId(col)); - assertThrows(SQLFeatureNotSupportedException.class, () -> resultSet.getNClob(col)); - assertThrows(SQLFeatureNotSupportedException.class, () -> resultSet.getSQLXML(col)); - assertThrows(SQLFeatureNotSupportedException.class, () -> resultSet.getNString(col)); - assertThrows(SQLFeatureNotSupportedException.class, () -> resultSet.getNCharacterStream(col)); - - // getObject(String, Map) -> UnsupportedOperationException (non-empty map) - Map> map = Collections.singletonMap("key", String.class); - assertThrows(UnsupportedOperationException.class, () -> resultSet.getObject(col, map)); - - // No current row: delegate to positional getters which throw SQLException - assertThrows(SQLException.class, () -> resultSet.getObject(col)); - assertThrows(SQLException.class, () -> resultSet.getObject(col, String.class)); - assertThrows(SQLException.class, () -> resultSet.getBigDecimal(col)); - } - - @Test - void tinyintColumnSupportsGetLongGetDoubleGetBigDecimal() throws SQLException { - List schema = - Collections.singletonList(new ColumnMetadata("val", HyperType.int8(true), "TINYINT")); - SimpleResultSet rs = DataCloudMetadataResultSet.of( - new DataCloudResultSetMetaData(schema), Arrays.asList(Collections.singletonList(42L))); - - assertTrue(rs.next()); - assertEquals(42L, rs.getLong(1)); - assertFalse(rs.wasNull()); - } - - @Test - void tinyintColumnSupportsGetDouble() throws SQLException { - List schema = - Collections.singletonList(new ColumnMetadata("val", HyperType.int8(true), "TINYINT")); - SimpleResultSet rs = DataCloudMetadataResultSet.of( - new DataCloudResultSetMetaData(schema), Arrays.asList(Collections.singletonList(7L))); - - assertTrue(rs.next()); - assertEquals(7.0, rs.getDouble(1)); - } - - @Test - void tinyintColumnSupportsGetBigDecimal() throws SQLException { - List schema = - Collections.singletonList(new ColumnMetadata("val", HyperType.int8(true), "TINYINT")); - SimpleResultSet rs = DataCloudMetadataResultSet.of( - new DataCloudResultSetMetaData(schema), Arrays.asList(Collections.singletonList(99L))); - - assertTrue(rs.next()); - assertEquals(new BigDecimal(99), rs.getBigDecimal(1)); - } - - @Test - void getFloatAcceptsNegativeValuesWithinRange() throws SQLException { - int col = 5; - SimpleResultSet rs = DataCloudMetadataResultSet.of( - new DataCloudResultSetMetaData(MetadataSchemas.COLUMNS), Arrays.asList(rowWithLongAt(col, -42L))); - - assertTrue(rs.next()); - assertEquals(-42.0f, rs.getFloat(col)); - } -} diff --git a/jdbc-core/src/test/java/com/salesforce/datacloud/jdbc/examples/RowBasedPaginationTest.java b/jdbc-core/src/test/java/com/salesforce/datacloud/jdbc/examples/RowBasedPaginationTest.java index 803542d3..87390c83 100644 --- a/jdbc-core/src/test/java/com/salesforce/datacloud/jdbc/examples/RowBasedPaginationTest.java +++ b/jdbc-core/src/test/java/com/salesforce/datacloud/jdbc/examples/RowBasedPaginationTest.java @@ -10,7 +10,6 @@ import com.salesforce.datacloud.jdbc.core.DataCloudConnection; import com.salesforce.datacloud.jdbc.core.DataCloudResultSet; import com.salesforce.datacloud.jdbc.core.DataCloudStatement; -import com.salesforce.datacloud.jdbc.core.StreamingResultSet; import com.salesforce.datacloud.jdbc.hyper.HyperServerManager; import com.salesforce.datacloud.jdbc.hyper.HyperServerManager.ConfigFile; import com.salesforce.datacloud.jdbc.hyper.HyperServerProcess; @@ -55,7 +54,7 @@ public void testRowBasedPagination() throws SQLException { final DataCloudStatement stmt = conn.createStatement().unwrap(DataCloudStatement.class)) { // Set the initial page size stmt.setResultSetConstraints(pageSize); - final StreamingResultSet rs = stmt.executeQuery(sql).unwrap(StreamingResultSet.class); + final DataCloudResultSet rs = stmt.executeQuery(sql).unwrap(DataCloudResultSet.class); // Save the queryId for retrieving subsequent pages queryId = stmt.getQueryId(); diff --git a/jdbc-core/src/test/java/com/salesforce/datacloud/jdbc/protocol/QueryResultArrowStreamTest.java b/jdbc-core/src/test/java/com/salesforce/datacloud/jdbc/protocol/QueryResultArrowStreamTest.java index 0049ebad..5affe777 100644 --- a/jdbc-core/src/test/java/com/salesforce/datacloud/jdbc/protocol/QueryResultArrowStreamTest.java +++ b/jdbc-core/src/test/java/com/salesforce/datacloud/jdbc/protocol/QueryResultArrowStreamTest.java @@ -32,8 +32,13 @@ void testArrowStreamWithSimpleSelectQuery() { val queryClient = QueryAccessGrpcClient.of(queryId, stubProvider.getStub()); val chunkIterator = ChunkRangeIterator.of(queryClient, 0, 3, false, QueryResultArrowStream.OUTPUT_FORMAT); - // Create ArrowStreamReader from the iterator - try (val reader = QueryResultArrowStream.toArrowStreamReader(chunkIterator)) { + // Create ArrowStreamReader from the iterator. + // Close order matters: the reader must close before the allocator because + // try-with-resources closes in reverse declaration order, and closing the allocator + // while the reader still holds buffers trips the leak detector. + val arrowStream = QueryResultArrowStream.toArrowStreamReader(chunkIterator); + try (val allocator = arrowStream.getAllocator(); + val reader = arrowStream.getReader()) { int rowCount = 0; // Count all rows in the arrow stream @@ -62,8 +67,13 @@ void testArrowStreamWithNoColumnsQuery() { val queryClient = QueryAccessGrpcClient.of(queryId, stubProvider.getStub()); val chunkIterator = ChunkRangeIterator.of(queryClient, 0, 3, false, QueryResultArrowStream.OUTPUT_FORMAT); - // Create ArrowStreamReader from the iterator - try (val reader = QueryResultArrowStream.toArrowStreamReader(chunkIterator)) { + // Create ArrowStreamReader from the iterator. + // Close order matters: the reader must close before the allocator because + // try-with-resources closes in reverse declaration order, and closing the allocator + // while the reader still holds buffers trips the leak detector. + val arrowStream = QueryResultArrowStream.toArrowStreamReader(chunkIterator); + try (val allocator = arrowStream.getAllocator(); + val reader = arrowStream.getReader()) { int rowCount = 0; // Count all rows in the arrow stream diff --git a/jdbc-core/src/test/java/com/salesforce/datacloud/jdbc/protocol/data/ArrowToHyperTypeMapperTest.java b/jdbc-core/src/test/java/com/salesforce/datacloud/jdbc/protocol/data/ArrowToHyperTypeMapperTest.java new file mode 100644 index 00000000..f6ec6200 --- /dev/null +++ b/jdbc-core/src/test/java/com/salesforce/datacloud/jdbc/protocol/data/ArrowToHyperTypeMapperTest.java @@ -0,0 +1,67 @@ +/** + * This file is part of https://github.com/forcedotcom/datacloud-jdbc which is released under the + * Apache 2.0 license. See https://github.com/forcedotcom/datacloud-jdbc/blob/main/LICENSE.txt + */ +package com.salesforce.datacloud.jdbc.protocol.data; + +import static org.assertj.core.api.Assertions.assertThat; + +import java.util.Collections; +import lombok.val; +import org.apache.arrow.vector.types.pojo.ArrowType; +import org.apache.arrow.vector.types.pojo.Field; +import org.apache.arrow.vector.types.pojo.FieldType; +import org.junit.jupiter.api.Test; + +/** + * Pin the {@link ArrowToHyperTypeMapper#toColumnMetadata(Field)} contract around the + * {@link HyperTypeToArrow#JDBC_TYPE_NAME_METADATA_KEY} field-metadata override. + * + *

Two paths must work: + *

    + *
  • An Arrow field that does stamp the override (the metadata path) returns a + * {@code ColumnMetadata} whose {@code typeName} matches the override exactly. + *
  • An Arrow field that does not stamp the override (every real-Hyper query stream) + * returns a {@code ColumnMetadata} whose {@code typeName} is {@code null}, so the JDBC + * layer falls back to the type-derived default. The fallback is implicit in the rest of + * the test suite — every functional test against local Hyper goes through this code + * path — but no assertion pinned it. This test does. + *
+ */ +class ArrowToHyperTypeMapperTest { + + @Test + void typeNameOverrideIsPickedUpWhenStamped() { + val metadata = Collections.singletonMap(HyperTypeToArrow.JDBC_TYPE_NAME_METADATA_KEY, "TEXT"); + val field = new Field("c", new FieldType(true, new ArrowType.Utf8(), null, metadata), null); + + val column = ArrowToHyperTypeMapper.toColumnMetadata(field); + + assertThat(column.getName()).isEqualTo("c"); + assertThat(column.getType()).isEqualTo(HyperType.varcharUnlimited(true)); + assertThat(column.getTypeName()).isEqualTo("TEXT"); + } + + @Test + void typeNameOverrideIsNullWhenAbsent() { + // Mirrors what a real Hyper Arrow stream looks like: no datacloud-jdbc:type_name key. + val field = new Field("c", new FieldType(true, new ArrowType.Utf8(), null), null); + + val column = ArrowToHyperTypeMapper.toColumnMetadata(field); + + assertThat(column.getName()).isEqualTo("c"); + assertThat(column.getType()).isEqualTo(HyperType.varcharUnlimited(true)); + // Null override means the JDBC layer falls back to HyperType-derived "VARCHAR". + assertThat(column.getTypeName()).isNull(); + } + + @Test + void typeNameOverrideIsNullWhenMetadataIsEmptyButPresent() { + val field = + new Field("c", new FieldType(true, new ArrowType.Int(32, true), null, Collections.emptyMap()), null); + + val column = ArrowToHyperTypeMapper.toColumnMetadata(field); + + assertThat(column.getTypeName()).isNull(); + } +} diff --git a/jdbc-core/src/test/java/com/salesforce/datacloud/jdbc/protocol/data/IntegerVectorSetterRangeCheckTest.java b/jdbc-core/src/test/java/com/salesforce/datacloud/jdbc/protocol/data/IntegerVectorSetterRangeCheckTest.java new file mode 100644 index 00000000..ce779e98 --- /dev/null +++ b/jdbc-core/src/test/java/com/salesforce/datacloud/jdbc/protocol/data/IntegerVectorSetterRangeCheckTest.java @@ -0,0 +1,114 @@ +/** + * This file is part of https://github.com/forcedotcom/datacloud-jdbc which is released under the + * Apache 2.0 license. See https://github.com/forcedotcom/datacloud-jdbc/blob/main/LICENSE.txt + */ +package com.salesforce.datacloud.jdbc.protocol.data; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.assertThatThrownBy; + +import lombok.val; +import org.apache.arrow.memory.RootAllocator; +import org.apache.arrow.vector.BigIntVector; +import org.apache.arrow.vector.IntVector; +import org.apache.arrow.vector.SmallIntVector; +import org.apache.arrow.vector.TinyIntVector; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; + +/** + * Pin the range-check behavior on the integer-family vector setters: every narrowing setter + * (TinyInt/SmallInt/Int) refuses out-of-range Number inputs rather than silently truncating. + * BigInt accepts the full long range. + * + *

Both code paths (parameter binding via DataCloudPreparedStatement.setObject and metadata + * row population via MetadataResultSets) reach these same setters, so strict checks here mean + * strict checks on both paths. + */ +class IntegerVectorSetterRangeCheckTest { + + private RootAllocator allocator; + + @BeforeEach + void setUp() { + allocator = new RootAllocator(Long.MAX_VALUE); + } + + @AfterEach + void tearDown() { + allocator.close(); + } + + @Test + void intVectorSetterAcceptsValuesInRange() { + try (val vector = new IntVector("col", allocator)) { + vector.allocateNew(3); + val setter = new IntVectorSetter(); + setter.setValueInternal(vector, 0, 0); + setter.setValueInternal(vector, 1, Integer.MAX_VALUE); + setter.setValueInternal(vector, 2, Long.valueOf(Integer.MIN_VALUE)); + vector.setValueCount(3); + assertThat(vector.get(0)).isEqualTo(0); + assertThat(vector.get(1)).isEqualTo(Integer.MAX_VALUE); + assertThat(vector.get(2)).isEqualTo(Integer.MIN_VALUE); + } + } + + @Test + void intVectorSetterRejectsLongAboveRange() { + try (val vector = new IntVector("col", allocator)) { + vector.allocateNew(1); + val setter = new IntVectorSetter(); + assertThatThrownBy(() -> setter.setValueInternal(vector, 0, Long.MAX_VALUE)) + .isInstanceOf(IllegalArgumentException.class) + .hasMessageContaining("out of range for INT32"); + } + } + + @Test + void intVectorSetterRejectsLongBelowRange() { + try (val vector = new IntVector("col", allocator)) { + vector.allocateNew(1); + val setter = new IntVectorSetter(); + assertThatThrownBy(() -> setter.setValueInternal(vector, 0, Long.MIN_VALUE)) + .isInstanceOf(IllegalArgumentException.class) + .hasMessageContaining("out of range for INT32"); + } + } + + @Test + void smallIntVectorSetterRejectsValueAboveRange() { + try (val vector = new SmallIntVector("col", allocator)) { + vector.allocateNew(1); + val setter = new SmallIntVectorSetter(); + assertThatThrownBy(() -> setter.setValueInternal(vector, 0, (long) Short.MAX_VALUE + 1)) + .isInstanceOf(IllegalArgumentException.class) + .hasMessageContaining("out of range for INT16"); + } + } + + @Test + void tinyIntVectorSetterRejectsValueAboveRange() { + try (val vector = new TinyIntVector("col", allocator)) { + vector.allocateNew(1); + val setter = new TinyIntVectorSetter(); + assertThatThrownBy(() -> setter.setValueInternal(vector, 0, (long) Byte.MAX_VALUE + 1)) + .isInstanceOf(IllegalArgumentException.class) + .hasMessageContaining("out of range for INT8"); + } + } + + @Test + void bigIntVectorSetterAcceptsFullLongRange() { + try (val vector = new BigIntVector("col", allocator)) { + vector.allocateNew(2); + val setter = new BigIntVectorSetter(); + setter.setValueInternal(vector, 0, Long.MAX_VALUE); + setter.setValueInternal(vector, 1, Long.MIN_VALUE); + vector.setValueCount(2); + assertThat(vector.get(0)).isEqualTo(Long.MAX_VALUE); + assertThat(vector.get(1)).isEqualTo(Long.MIN_VALUE); + } + } +} diff --git a/jdbc-core/src/test/java/com/salesforce/datacloud/jdbc/protocol/data/VarCharVectorSetterStrictTypeTest.java b/jdbc-core/src/test/java/com/salesforce/datacloud/jdbc/protocol/data/VarCharVectorSetterStrictTypeTest.java new file mode 100644 index 00000000..f312641f --- /dev/null +++ b/jdbc-core/src/test/java/com/salesforce/datacloud/jdbc/protocol/data/VarCharVectorSetterStrictTypeTest.java @@ -0,0 +1,81 @@ +/** + * This file is part of https://github.com/forcedotcom/datacloud-jdbc which is released under the + * Apache 2.0 license. See https://github.com/forcedotcom/datacloud-jdbc/blob/main/LICENSE.txt + */ +package com.salesforce.datacloud.jdbc.protocol.data; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.assertThatThrownBy; + +import java.nio.charset.StandardCharsets; +import lombok.val; +import org.apache.arrow.memory.RootAllocator; +import org.apache.arrow.vector.VarCharVector; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; + +/** + * Pin the strict-String contract on VarCharVectorSetter: non-String / non-null payloads are + * rejected by the BaseVectorSetter type guard rather than silently coerced via toString. Without + * this test, a future widening of the generic from String back to Object would slip past CI and + * re-introduce the typeInfoRows Boolean-as-VARCHAR regression that motivated the strict typing. + */ +class VarCharVectorSetterStrictTypeTest { + + private RootAllocator allocator; + + @BeforeEach + void setUp() { + allocator = new RootAllocator(Long.MAX_VALUE); + } + + @AfterEach + void tearDown() { + allocator.close(); + } + + @Test + void varCharSetterAcceptsString() { + try (val vector = new VarCharVector("col", allocator)) { + vector.allocateNew(1); + val setter = new VarCharVectorSetter(); + setter.setValue(vector, 0, "hello"); + vector.setValueCount(1); + assertThat(new String(vector.get(0), StandardCharsets.UTF_8)).isEqualTo("hello"); + } + } + + @Test + void varCharSetterRejectsBoolean() { + try (val vector = new VarCharVector("col", allocator)) { + vector.allocateNew(1); + val setter = new VarCharVectorSetter(); + assertThatThrownBy(() -> setter.setValue(vector, 0, Boolean.TRUE)) + .isInstanceOf(IllegalArgumentException.class) + .hasMessageContaining("must be of type String"); + } + } + + @Test + void varCharSetterRejectsByteArray() { + try (val vector = new VarCharVector("col", allocator)) { + vector.allocateNew(1); + val setter = new VarCharVectorSetter(); + assertThatThrownBy(() -> setter.setValue(vector, 0, new byte[] {1, 2, 3})) + .isInstanceOf(IllegalArgumentException.class) + .hasMessageContaining("must be of type String"); + } + } + + @Test + void varCharSetterRejectsNumber() { + try (val vector = new VarCharVector("col", allocator)) { + vector.allocateNew(1); + val setter = new VarCharVectorSetter(); + assertThatThrownBy(() -> setter.setValue(vector, 0, 42)) + .isInstanceOf(IllegalArgumentException.class) + .hasMessageContaining("must be of type String"); + } + } +}