diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 56aad787b544..9d2c9dca0bbc 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -420,81 +420,16 @@ jobs: touch gib-impacted.log cat < .github/test-matrix.yaml include: - - modules: - - client/trino-jdbc - - core/trino-spi - - core/trino-web-ui - - modules: - - plugin/trino-base-jdbc - - plugin/trino-faker - - plugin/trino-geospatial - - plugin/trino-memory - - plugin/trino-openlineage - - plugin/trino-thrift - - modules: - - lib/trino-orc - - lib/trino-parquet - - modules: - - lib/trino-filesystem - - lib/trino-filesystem-azure - - lib/trino-filesystem-alluxio - - lib/trino-filesystem-cache-alluxio - - lib/trino-filesystem-gcs - - lib/trino-filesystem-manager - - lib/trino-filesystem-s3 - - lib/trino-hdfs - - lib/trino-hive-formats - - { modules: core/trino-main } - - { modules: lib/trino-filesystem-azure, profile: cloud-tests } - - { modules: lib/trino-filesystem-gcs, profile: cloud-tests } - - { modules: lib/trino-filesystem-s3, profile: cloud-tests } - - { modules: lib/trino-hdfs, profile: cloud-tests } - - { modules: plugin/trino-bigquery } - - { modules: plugin/trino-bigquery, profile: cloud-tests-2 } - - { modules: plugin/trino-cassandra } - - { modules: plugin/trino-clickhouse } - - { modules: plugin/trino-delta-lake } - - { modules: plugin/trino-delta-lake, profile: cloud-tests } - - { modules: plugin/trino-delta-lake, profile: fte-tests } - - { modules: plugin/trino-druid } - - { modules: plugin/trino-duckdb } - - { modules: plugin/trino-elasticsearch } - - { modules: plugin/trino-exasol } - - { modules: plugin/trino-google-sheets } - - { modules: plugin/trino-hive } - - { modules: plugin/trino-hive, profile: fte-tests } - - { modules: plugin/trino-hive, profile: test-parquet } - - { modules: plugin/trino-hudi } - - { modules: plugin/trino-iceberg } - - { modules: plugin/trino-iceberg, profile: cloud-tests } - - { modules: plugin/trino-iceberg, profile: fte-tests } - - { modules: plugin/trino-iceberg, profile: minio-and-avro } - - { modules: plugin/trino-ignite } - - { modules: plugin/trino-kafka } - - { modules: plugin/trino-lakehouse } - - { modules: plugin/trino-mariadb } - - { modules: plugin/trino-mongodb } - - { modules: plugin/trino-mysql } - - { modules: plugin/trino-openlineage } - - { modules: plugin/trino-opensearch } - { modules: plugin/trino-oracle } - - { modules: plugin/trino-pinot } - - { modules: plugin/trino-postgresql } - - { modules: plugin/trino-redis } - - { modules: plugin/trino-redshift } - - { modules: plugin/trino-redshift, profile: cloud-tests } - - { modules: plugin/trino-redshift, profile: fte-tests } - - { modules: plugin/trino-resource-group-managers } - - { modules: plugin/trino-singlestore } - - { modules: plugin/trino-snowflake } - - { modules: plugin/trino-snowflake, profile: cloud-tests } - - { modules: plugin/trino-sqlserver } - - { modules: plugin/trino-vertica } - - { modules: testing/trino-faulttolerant-tests, profile: default } - - { modules: testing/trino-faulttolerant-tests, profile: test-fault-tolerant-delta } - - { modules: testing/trino-faulttolerant-tests, profile: test-fault-tolerant-hive } - - { modules: testing/trino-faulttolerant-tests, profile: test-fault-tolerant-iceberg } - - { modules: testing/trino-tests } + - { modules: plugin/trino-oracle } + - { modules: plugin/trino-oracle } + - { modules: plugin/trino-oracle } + - { modules: plugin/trino-oracle } + - { modules: plugin/trino-oracle } + - { modules: plugin/trino-oracle } + - { modules: plugin/trino-oracle } + - { modules: plugin/trino-oracle } + - { modules: plugin/trino-oracle } EOF ./.github/bin/build-matrix-from-impacted.py -v -i gib-impacted.log -m .github/test-matrix.yaml -o matrix.json echo "Matrix: $(jq '.' matrix.json)" @@ -806,289 +741,3 @@ jobs: check_name: ${{ github.job }} with secrets conclusion: ${{ job.status }} github_token: ${{ secrets.GITHUB_TOKEN }} - - build-pt: - runs-on: ubuntu-latest - outputs: - matrix: ${{ steps.set-matrix.outputs.matrix }} - product-tests-changed: ${{ steps.filter.outputs.product-tests }} - steps: - - uses: actions/checkout@v5 - with: - fetch-depth: 0 # checkout all commits to be able to determine merge base for GIB - ref: | - ${{ github.event_name == 'repository_dispatch' && - github.event.client_payload.pull_request.head.sha == github.event.client_payload.slash_command.args.named.sha && - format('refs/pull/{0}/head', github.event.client_payload.pull_request.number) || '' }} - - uses: ./.github/actions/setup - timeout-minutes: 10 - with: - cache: restore - cleanup-node: true - - uses: dorny/paths-filter@de90cc6fb38fc0963ad72b210f1f284cd68cea36 # v3 - id: filter - with: - filters: | - product-tests: - - 'testing/trino-product-tests*/**' - - 'testing/trino-testing-services/**' - # run all tests when there are any changes in the trino-server Maven module - # because it doesn't define it's Trino dependencies and - # it relies on the Provisio plugin to find the right artifacts - - 'core/trino-server/**' - - '.github/**' - - name: Maven Install - run: | - export MAVEN_OPTS="${MAVEN_INSTALL_OPTS}" - $MAVEN clean install ${MAVEN_FAST_INSTALL} -pl '!:trino-docs' - - name: Map impacted plugins to features - run: | - export MAVEN_OPTS="${MAVEN_INSTALL_OPTS}" - # build a list of impacted modules, ignoring modules that cannot affect either product tests or Trino - $MAVEN validate ${MAVEN_FAST_INSTALL} ${MAVEN_GIB} -Dgib.logImpactedTo=gib-impacted.log -pl '!:trino-docs,!:trino-tests,!:trino-faulttolerant-tests' - # GIB doesn't run on master, so make sure the file always exist - touch gib-impacted.log - testing/trino-plugin-reader/target/trino-plugin-reader-*-executable.jar -i gib-impacted.log -p core/trino-server/target/trino-server-*-hardlinks/plugin > impacted-features.log - echo "Impacted plugin features:" - cat impacted-features.log - - name: Product tests artifact - uses: actions/upload-artifact@v4 - with: - name: product tests and server tarball - path: | - core/trino-server/target/*.tar.gz - impacted-features.log - testing/trino-product-tests-launcher/target/*.jar - testing/trino-product-tests/target/*-executable.jar - client/trino-cli/target/*-executable.jar - retention-days: 1 - - id: prepare-matrix-template - run: | - cat < .github/test-pt-matrix.yaml - config: - - default - suite: - - suite-1 - - suite-2 - - suite-3 - # suite-4 does not exist - - suite-5 - - suite-6-non-generic - - suite-7-non-generic - - suite-hive-transactional - - suite-azure - - suite-delta-lake-databricks113 - - suite-delta-lake-databricks122 - - suite-delta-lake-databricks133 - - suite-delta-lake-databricks143 - - suite-delta-lake-databricks154 - - suite-delta-lake-databricks164 - - suite-ranger - - suite-gcs - - suite-clients - - suite-functions - - suite-tpch - - suite-tpcds - - suite-storage-formats-detailed - - suite-parquet - - suite-oauth2 - - suite-ldap - - suite-compatibility - - suite-all-connectors-smoke - - suite-delta-lake-oss - - suite-kafka - - suite-cassandra - - suite-clickhouse - - suite-mysql - - suite-iceberg - - suite-snowflake - - suite-hudi - - suite-ignite - exclude: - - suite: suite-azure - ignore exclusion if: >- - ${{ env.CI_SKIP_SECRETS_PRESENCE_CHECKS != '' || - vars.AZURE_ABFS_HIERARCHICAL_CONTAINER != '' || - vars.AZURE_ABFS_HIERARCHICAL_ACCOUNT != '' || - secrets.AZURE_ABFS_HIERARCHICAL_ACCESS_KEY != '' }} - - - suite: suite-gcs - ignore exclusion if: >- - ${{ env.CI_SKIP_SECRETS_PRESENCE_CHECKS != '' || secrets.GCP_CREDENTIALS_KEY != '' }} - - - suite: suite-delta-lake-databricks113 - ignore exclusion if: >- - ${{ env.CI_SKIP_SECRETS_PRESENCE_CHECKS != '' || secrets.DATABRICKS_TOKEN != '' }} - - suite: suite-delta-lake-databricks122 - ignore exclusion if: >- - ${{ env.CI_SKIP_SECRETS_PRESENCE_CHECKS != '' || secrets.DATABRICKS_TOKEN != '' }} - - suite: suite-delta-lake-databricks133 - ignore exclusion if: >- - ${{ env.CI_SKIP_SECRETS_PRESENCE_CHECKS != '' || secrets.DATABRICKS_TOKEN != '' }} - - suite: suite-delta-lake-databricks143 - ignore exclusion if: >- - ${{ env.CI_SKIP_SECRETS_PRESENCE_CHECKS != '' || secrets.DATABRICKS_TOKEN != '' }} - - suite: suite-delta-lake-databricks154 - ignore exclusion if: >- - ${{ env.CI_SKIP_SECRETS_PRESENCE_CHECKS != '' || secrets.DATABRICKS_TOKEN != '' }} - - suite: suite-delta-lake-databricks164 - ignore exclusion if: >- - ${{ env.CI_SKIP_SECRETS_PRESENCE_CHECKS != '' || secrets.DATABRICKS_TOKEN != '' }} - - suite: suite-snowflake - ignore exclusion if: >- - ${{ env.CI_SKIP_SECRETS_PRESENCE_CHECKS != '' || secrets.SNOWFLAKE_PASSWORD != '' }} - - ignore exclusion if: - # Do not use this property outside of the matrix configuration. - # - # This is added to all matrix entries so they may be conditionally - # excluded by adding them to the excludes list with a GHA expression - # for this property. - # - If the expression evaluates to true, it will never match the a - # actual value of the property, and will therefore not be excluded. - # - If the expression evaluates to false, it will match the actual - # value of the property, and the exclusion will apply normally. - - "false" - include: - # this suite is designed specifically for apache-hive3. TODO remove the suite once we can run all regular tests on apache-hive3. - - config: apache-hive3 - suite: suite-hms-only - EOF - - name: Build PT matrix (all) - if: | - github.event_name != 'pull_request' || - steps.filter.outputs.product-tests == 'true' || - contains(github.event.pull_request.labels.*.name, 'tests:all') || - contains(github.event.pull_request.labels.*.name, 'tests:all-product') - run: | - # converts entire YAML file into JSON - no filtering since we want all PTs to run - ./.github/bin/build-pt-matrix-from-impacted-connectors.py -v -m .github/test-pt-matrix.yaml -o matrix.json - - name: Build PT matrix (impacted-features) - if: | - github.event_name == 'pull_request' && - steps.filter.outputs.product-tests == 'false' && - !contains(github.event.pull_request.labels.*.name, 'tests:all') && - !contains(github.event.pull_request.labels.*.name, 'product-tests:all') - # all these envs are required to be set by some product test environments - env: - ABFS_CONTAINER: "" - ABFS_ACCOUNT: "" - ABFS_ACCESS_KEY: "" - S3_BUCKET: "" - AWS_REGION: "" - TRINO_AWS_ACCESS_KEY_ID: "" - TRINO_AWS_SECRET_ACCESS_KEY: "" - DATABRICKS_113_JDBC_URL: "" - DATABRICKS_122_JDBC_URL: "" - DATABRICKS_133_JDBC_URL: "" - DATABRICKS_143_JDBC_URL: "" - DATABRICKS_154_JDBC_URL: "" - DATABRICKS_164_JDBC_URL: "" - DATABRICKS_LOGIN: "" - DATABRICKS_TOKEN: "" - GCP_CREDENTIALS_KEY: "" - GCP_STORAGE_BUCKET: "" - SNOWFLAKE_URL: "" - SNOWFLAKE_USER: "" - SNOWFLAKE_PASSWORD: "" - SNOWFLAKE_DATABASE: "" - SNOWFLAKE_ROLE: "" - SNOWFLAKE_WAREHOUSE: "" - TESTCONTAINERS_NEVER_PULL: true - run: | - # converts filtered YAML file into JSON - ./.github/bin/build-pt-matrix-from-impacted-connectors.py -v -m .github/test-pt-matrix.yaml -i impacted-features.log -o matrix.json - - id: set-matrix - run: | - echo "Matrix: $(jq '.' matrix.json)" - echo "matrix=$(cat matrix.json)" >> $GITHUB_OUTPUT - - pt: - runs-on: 'ubuntu-latest' - # explicitly define the name to avoid adding the value of the `ignore exclusion if` matrix item - name: pt (${{ matrix.config }}, ${{ matrix.suite }}, ${{ matrix.jdk }}) - if: needs.build-pt.outputs.matrix != '{}' - strategy: - fail-fast: false - matrix: ${{ fromJson(needs.build-pt.outputs.matrix) }} - # PT Launcher's timeout defaults to 2h, add some margin - timeout-minutes: 130 - needs: build-pt - steps: - - uses: actions/checkout@v5 - with: - fetch-depth: 0 # checkout all commits, as the build result depends on `git describe` equivalent - ref: | - ${{ github.event_name == 'repository_dispatch' && - github.event.client_payload.pull_request.head.sha == github.event.client_payload.slash_command.args.named.sha && - format('refs/pull/{0}/head', github.event.client_payload.pull_request.number) || '' }} - - uses: ./.github/actions/setup - timeout-minutes: 10 - with: - # The job doesn't build anything, so the ~/.m2/repository cache isn't useful - cache: 'false' - - name: Product tests artifact - uses: actions/download-artifact@v5 - with: - name: product tests and server tarball - - name: Fix artifact permissions - run: | - find . -type f -name \*-executable.jar -exec chmod 0777 {} \; - - name: Enable impact analysis - if: | - needs.build-pt.outputs.product-tests-changed == 'false' && - github.event_name == 'pull_request' && - !contains(github.event.pull_request.labels.*.name, 'tests:all') && - !contains(github.event.pull_request.labels.*.name, 'tests:all-product') - run: echo "PTL_OPTS=--impacted-features impacted-features.log" >> $GITHUB_ENV - - name: Product Tests - id: tests - env: - ABFS_CONTAINER: ${{ vars.AZURE_ABFS_HIERARCHICAL_CONTAINER }} - ABFS_ACCOUNT: ${{ vars.AZURE_ABFS_HIERARCHICAL_ACCOUNT }} - ABFS_ACCESS_KEY: ${{ secrets.AZURE_ABFS_HIERARCHICAL_ACCESS_KEY }} - S3_BUCKET: ${{ vars.TRINO_S3_BUCKET }} - AWS_REGION: ${{ vars.TRINO_AWS_REGION }} - TRINO_AWS_ACCESS_KEY_ID: ${{ vars.TRINO_AWS_ACCESS_KEY_ID }} - TRINO_AWS_SECRET_ACCESS_KEY: ${{ secrets.TRINO_AWS_SECRET_ACCESS_KEY }} - DATABRICKS_113_JDBC_URL: ${{ vars.DATABRICKS_113_JDBC_URL }} - DATABRICKS_122_JDBC_URL: ${{ vars.DATABRICKS_122_JDBC_URL }} - DATABRICKS_133_JDBC_URL: ${{ vars.DATABRICKS_133_JDBC_URL }} - DATABRICKS_143_JDBC_URL: ${{ vars.DATABRICKS_143_JDBC_URL }} - DATABRICKS_154_JDBC_URL: ${{ vars.DATABRICKS_154_JDBC_URL }} - DATABRICKS_164_JDBC_URL: ${{ vars.DATABRICKS_164_JDBC_URL }} - DATABRICKS_LOGIN: token - DATABRICKS_TOKEN: ${{ secrets.DATABRICKS_TOKEN }} - GCP_CREDENTIALS_KEY: ${{ secrets.GCP_CREDENTIALS_KEY }} - GCP_STORAGE_BUCKET: ${{ vars.GCP_STORAGE_BUCKET }} - SNOWFLAKE_URL: ${{ vars.SNOWFLAKE_URL }} - SNOWFLAKE_USER: ${{ vars.SNOWFLAKE_USER }} - SNOWFLAKE_PASSWORD: ${{ secrets.SNOWFLAKE_PASSWORD }} - SNOWFLAKE_DATABASE: ${{ vars.SNOWFLAKE_DATABASE }} - SNOWFLAKE_ROLE: ${{ vars.SNOWFLAKE_ROLE }} - SNOWFLAKE_WAREHOUSE: ${{ vars.SNOWFLAKE_WAREHOUSE }} - run: | - exec testing/trino-product-tests-launcher/target/trino-product-tests-launcher-*-executable.jar suite run \ - --suite ${{ matrix.suite }} \ - --config config-${{ matrix.config }} \ - ${PTL_OPTS:-} \ - --bind=off --logs-dir logs/ --timeout 2h - - name: Upload test results - uses: ./.github/actions/process-test-results - if: always() - with: - artifact-name: pt (${{ matrix.config }}, ${{ matrix.suite }}, ${{ matrix.jdk }}) - has-failed-tests: ${{ steps.tests.outcome == 'failure' }} - upload-heap-dump: ${{ env.SECRETS_PRESENT == '' && github.event_name == 'pull_request' && github.event.pull_request.head.repo.full_name != github.repository }} - - name: Update PR check - uses: ./.github/actions/update-check - if: >- - failure() && - github.event_name == 'repository_dispatch' && - github.event.client_payload.slash_command.args.named.sha != '' && - github.event.client_payload.pull_request.head.sha == github.event.client_payload.slash_command.args.named.sha - with: - pull_request_number: ${{ github.event.client_payload.pull_request.number }} - check_name: ${{ github.job }} with secrets - conclusion: ${{ job.status }} - github_token: ${{ secrets.GITHUB_TOKEN }} diff --git a/client/trino-jdbc/pom.xml b/client/trino-jdbc/pom.xml index c15efe44a4ea..78ad16ed913d 100644 --- a/client/trino-jdbc/pom.xml +++ b/client/trino-jdbc/pom.xml @@ -345,7 +345,7 @@ org.testcontainers - oracle-xe + oracle-free test diff --git a/client/trino-jdbc/src/test/java/io/trino/jdbc/TestJdbcVendorCompatibility.java b/client/trino-jdbc/src/test/java/io/trino/jdbc/TestJdbcVendorCompatibility.java index 395cc792d00d..6f20fd67f45a 100644 --- a/client/trino-jdbc/src/test/java/io/trino/jdbc/TestJdbcVendorCompatibility.java +++ b/client/trino-jdbc/src/test/java/io/trino/jdbc/TestJdbcVendorCompatibility.java @@ -24,8 +24,8 @@ import org.junit.jupiter.api.Test; import org.junit.jupiter.api.TestInstance; import org.junit.jupiter.api.parallel.Execution; -import org.testcontainers.containers.OracleContainer; import org.testcontainers.containers.PostgreSQLContainer; +import org.testcontainers.oracle.OracleContainer; import java.io.Closeable; import java.sql.Connection; @@ -539,7 +539,7 @@ private static class OracleReferenceDriver OracleReferenceDriver() { - oracleServer = new OracleContainer("gvenzl/oracle-xe:11.2.0.2-full") + oracleServer = new OracleContainer("gvenzl/oracle-free:23.9-slim") .usingSid(); oracleServer.start(); } diff --git a/docs/src/main/sphinx/connector/oracle.md b/docs/src/main/sphinx/connector/oracle.md index 034defa9ff1d..88ae659d2ff4 100644 --- a/docs/src/main/sphinx/connector/oracle.md +++ b/docs/src/main/sphinx/connector/oracle.md @@ -18,7 +18,7 @@ like Oracle and Hive, or different Oracle database instances. To connect to Oracle, you need: -- Oracle 19 or higher. +- Oracle 23 or higher. - Network access from the Trino coordinator and workers to Oracle. Port 1521 is the default port. diff --git a/plugin/trino-oracle/pom.xml b/plugin/trino-oracle/pom.xml index cf5abbcc7896..bb39f2e307d5 100644 --- a/plugin/trino-oracle/pom.xml +++ b/plugin/trino-oracle/pom.xml @@ -251,7 +251,7 @@ org.testcontainers - oracle-xe + oracle-free test diff --git a/plugin/trino-oracle/src/test/java/io/trino/plugin/oracle/BaseOracleConnectorTest.java b/plugin/trino-oracle/src/test/java/io/trino/plugin/oracle/BaseOracleConnectorTest.java index 435a0fc2f07f..7a8f7d595466 100644 --- a/plugin/trino-oracle/src/test/java/io/trino/plugin/oracle/BaseOracleConnectorTest.java +++ b/plugin/trino-oracle/src/test/java/io/trino/plugin/oracle/BaseOracleConnectorTest.java @@ -137,7 +137,7 @@ public void testInformationSchemaFiltering() @Override protected boolean isColumnNameRejected(Exception exception, String columnName, boolean delimited) { - if (columnName.equals("a\"quote") && exception.getMessage().contains("ORA-03001: unimplemented feature")) { + if (columnName.equals("a\"quote") && exception.getMessage().contains("ORA-25716: The identifier contains a double quotation mark (\") character")) { return true; } @@ -375,6 +375,14 @@ public void testTooLargeDomainCompactionThreshold() "SELECT * from nation", "Domain compaction threshold \\(10000\\) cannot exceed 1000"); } + @Test + @Override // Override because Oracle allows SELECT query in execute procedure + public void testExecuteProcedureWithInvalidQuery() + { + assertUpdate("CALL system.execute('SELECT 1')"); + assertQueryFails("CALL system.execute('invalid')", "(?s)Failed to execute query.*"); + } + @Test @Override public void testNativeQuerySimple() @@ -444,37 +452,37 @@ protected void verifyConcurrentAddColumnFailurePermissible(Exception e) @Override protected OptionalInt maxSchemaNameLength() { - return OptionalInt.of(30); + return OptionalInt.of(128); } @Override protected void verifySchemaNameLengthFailurePermissible(Throwable e) { - assertThat(e).hasMessageContaining("ORA-00972: identifier is too long"); + assertThat(e).hasMessageContaining("ORA-00972"); } @Override protected OptionalInt maxTableNameLength() { - return OptionalInt.of(30); + return OptionalInt.of(128); } @Override protected void verifyTableNameLengthFailurePermissible(Throwable e) { - assertThat(e).hasMessageContaining("ORA-00972: identifier is too long"); + assertThat(e).hasMessageContaining("ORA-00972"); } @Override protected OptionalInt maxColumnNameLength() { - return OptionalInt.of(30); + return OptionalInt.of(128); } @Override protected void verifyColumnNameLengthFailurePermissible(Throwable e) { - assertThat(e).hasMessageContaining("ORA-00972: identifier is too long"); + assertThat(e).hasMessageContaining("ORA-00972"); } @Override diff --git a/plugin/trino-oracle/src/test/java/io/trino/plugin/oracle/TestingOracleServer.java b/plugin/trino-oracle/src/test/java/io/trino/plugin/oracle/TestingOracleServer.java index 8b089c5ce314..b4f63a91b4fe 100644 --- a/plugin/trino-oracle/src/test/java/io/trino/plugin/oracle/TestingOracleServer.java +++ b/plugin/trino-oracle/src/test/java/io/trino/plugin/oracle/TestingOracleServer.java @@ -24,7 +24,7 @@ import io.trino.plugin.jdbc.credential.StaticCredentialProvider; import io.trino.plugin.jdbc.jmx.StatisticsAwareConnectionFactory; import oracle.jdbc.OracleDriver; -import org.testcontainers.containers.OracleContainer; +import org.testcontainers.oracle.OracleContainer; import org.testcontainers.utility.MountableFile; import java.io.Closeable; @@ -73,11 +73,11 @@ public TestingOracleServer() private void createContainer() { - OracleContainer container = new OracleContainer("gvenzl/oracle-xe:11.2.0.2-full") + OracleContainer container = new OracleContainer("gvenzl/oracle-free:23.9-slim") .withCopyFileToContainer(MountableFile.forClasspathResource("init.sql"), "/container-entrypoint-initdb.d/01-init.sql") .withCopyFileToContainer(MountableFile.forClasspathResource("restart.sh"), "/container-entrypoint-initdb.d/02-restart.sh") .withCopyFileToContainer(MountableFile.forHostPath(createConfigureScript()), "/container-entrypoint-initdb.d/03-create-users.sql") - .usingSid(); + .withStartupTimeoutSeconds(180); try { this.cleanup = startOrReuse(container); this.container = container; @@ -95,6 +95,7 @@ private Path createConfigureScript() File tempFile = File.createTempFile("init-", ".sql"); Files.write(Joiner.on("\n").join( + format("ALTER SESSION SET CONTAINER=FREEPDB1;"), format("CREATE TABLESPACE %s DATAFILE 'test_db.dat' SIZE 100M ONLINE;", TEST_TABLESPACE), format("CREATE USER %s IDENTIFIED BY %s DEFAULT TABLESPACE %s;", TEST_USER, TEST_PASS, TEST_TABLESPACE), format("GRANT UNLIMITED TABLESPACE TO %s;", TEST_USER), diff --git a/plugin/trino-resource-group-managers/pom.xml b/plugin/trino-resource-group-managers/pom.xml index b0fdc009d6ae..87eddd070eff 100644 --- a/plugin/trino-resource-group-managers/pom.xml +++ b/plugin/trino-resource-group-managers/pom.xml @@ -256,7 +256,7 @@ org.testcontainers - oracle-xe + oracle-free test diff --git a/plugin/trino-resource-group-managers/src/test/java/io/trino/plugin/resourcegroups/db/TestDbResourceGroupsOracleFlywayMigration.java b/plugin/trino-resource-group-managers/src/test/java/io/trino/plugin/resourcegroups/db/TestDbResourceGroupsOracleFlywayMigration.java index de706e7cde9d..e0ae3dd874f3 100644 --- a/plugin/trino-resource-group-managers/src/test/java/io/trino/plugin/resourcegroups/db/TestDbResourceGroupsOracleFlywayMigration.java +++ b/plugin/trino-resource-group-managers/src/test/java/io/trino/plugin/resourcegroups/db/TestDbResourceGroupsOracleFlywayMigration.java @@ -15,7 +15,7 @@ import org.jdbi.v3.core.Handle; import org.testcontainers.containers.JdbcDatabaseContainer; -import org.testcontainers.containers.OracleContainer; +import org.testcontainers.oracle.OracleContainer; import java.sql.DatabaseMetaData; import java.sql.ResultSet; @@ -28,7 +28,7 @@ public class TestDbResourceGroupsOracleFlywayMigration @Override protected final JdbcDatabaseContainer startContainer() { - JdbcDatabaseContainer container = new OracleContainer("gvenzl/oracle-xe:18.4.0-slim") + JdbcDatabaseContainer container = new OracleContainer("gvenzl/oracle-free:23.9-slim") .withPassword("trino") .withEnv("ORACLE_PASSWORD", "trino"); container.start();