diff --git a/.github/config/labeler-config.yml b/.github/config/labeler-config.yml
index 4f44778847d5..7acd4f68b68b 100644
--- a/.github/config/labeler-config.yml
+++ b/.github/config/labeler-config.yml
@@ -131,10 +131,6 @@ sqlserver:
- changed-files:
- any-glob-to-any-file: 'plugin/trino-sqlserver/**'
-vertica:
- - changed-files:
- - any-glob-to-any-file: 'plugin/trino-vertica/**'
-
docs:
- changed-files:
- any-glob-to-any-file: 'docs/**'
diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index a1ba6e09d4b5..7e3e78424b49 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -375,7 +375,6 @@ jobs:
!:trino-test-jdbc-compatibility-old-server,
!:trino-tests,
!:trino-thrift,
- !:trino-vertica,
!:trino-web-ui'
- name: Upload test results
uses: ./.github/actions/process-test-results
@@ -489,7 +488,6 @@ jobs:
- { modules: plugin/trino-snowflake }
- { modules: plugin/trino-snowflake, profile: cloud-tests }
- { modules: plugin/trino-sqlserver }
- - { modules: plugin/trino-vertica }
- { modules: testing/trino-faulttolerant-tests, profile: default }
- { modules: testing/trino-faulttolerant-tests, profile: test-fault-tolerant-delta }
- { modules: testing/trino-faulttolerant-tests, profile: test-fault-tolerant-hive }
diff --git a/core/trino-server/src/main/provisio/trino.xml b/core/trino-server/src/main/provisio/trino.xml
index 97a366b39290..b1f0b04b14bd 100644
--- a/core/trino-server/src/main/provisio/trino.xml
+++ b/core/trino-server/src/main/provisio/trino.xml
@@ -307,10 +307,4 @@
-
-
-
-
-
-
diff --git a/docs/release-template.md b/docs/release-template.md
index 85bb27954de3..aa341f4f3869 100644
--- a/docs/release-template.md
+++ b/docs/release-template.md
@@ -80,6 +80,4 @@
## TPC-DS connector
-## Vertica connector
-
## SPI
diff --git a/docs/src/main/sphinx/connector.md b/docs/src/main/sphinx/connector.md
index a954b30cf059..6805fdca440a 100644
--- a/docs/src/main/sphinx/connector.md
+++ b/docs/src/main/sphinx/connector.md
@@ -45,7 +45,6 @@ System
Thrift
TPC-DS
TPC-H
-Vertica
```
```{toctree}
diff --git a/docs/src/main/sphinx/connector/vertica.md b/docs/src/main/sphinx/connector/vertica.md
deleted file mode 100644
index 82dd42508df0..000000000000
--- a/docs/src/main/sphinx/connector/vertica.md
+++ /dev/null
@@ -1,236 +0,0 @@
----
-myst:
- substitutions:
- default_domain_compaction_threshold: '`256`'
----
-
-# Vertica connector
-
-```{raw} html
-
-```
-
-The Vertica connector allows querying a [Vertica database, also known as OpenText
-Analytics Database](https://www.opentext.com/products/analytics-database), as an
-external data source.
-
-## Requirements
-
-To connect to Vertica, you need:
-
-- Vertica 11.x or higher.
-- Network access from the coordinator and workers to the Vertica server.
- Port 5433 is the default port.
-
-## Configuration
-
-Create a catalog properties file in `etc/catalog` named `example.properties` to
-access the configured Vertica database in the `example` catalog. Replace example
-with your database name or some other descriptive name of the catalog. Configure
-the usage of the connector by specifying the name `vertica` and replace the
-connection properties as appropriate for your setup.
-
-```properties
-connector.name=vertica
-connection-url=jdbc:vertica://example.net:5433/test_db
-connection-user=root
-connection-password=secret
-```
-
-The `connection-user` and `connection-password` are typically required and
-determine the user credentials for the connection, often a service user. You can
-use [secrets](/security/secrets) to avoid actual values in the catalog
-properties files.
-
-```{include} jdbc-common-configurations.fragment
-```
-
-```{include} query-comment-format.fragment
-```
-
-```{include} jdbc-domain-compaction-threshold.fragment
-```
-
-```{include} jdbc-case-insensitive-matching.fragment
-```
-
-## Type mapping
-
-Because Trino and Vertica each support types that the other does not, this
-connector [modifies some types](type-mapping-overview) when reading or writing
-data. Data types may not map the same way in both directions between Trino and
-the data source. Refer to the following sections for type mapping in each
-direction.
-
-### Vertica to Trino type mapping
-
-The connector maps Vertica types to the corresponding Trino types according to
-the following table:
-
-:::{list-table} Vertica to Trino type mapping
-:widths: 35, 25, 40
-:header-rows: 1
-
-* - Vertica type
- - Trino type
- - Notes
-* - `BOOLEAN`
- - `BOOLEAN`
- -
-* - `BIGINT`
- - `BIGINT`
- - Vertica treats TINYINT, SMALLINT, INTEGER, and BIGINT as synonyms for the
- same 64-bit BIGINT data type
-* - `DOUBLE PRECISION (FLOAT)`
- - `DOUBLE`
- - Vertica treats FLOAT and REAL as the same 64-bit IEEE FLOAT
-* - `DECIMAL(p, s)`
- - `DECIMAL(p, s)`
- -
-* - `CHAR, CHAR(n)`
- - `CHAR, CHAR(n)`
- -
-* - `VARCHAR`, `LONG VARCHAR`, `VARCHAR(n)`, `LONG VARCHAR(n)`
- - `VARCHAR(n)`
- -
-* - `VARBINARY`, `LONG VARBINARY`, `VARBINARY(n)`, `LONG VARBINARY(n)`
- - `VARBINARY(n)`
- -
-* - `DATE`
- - `DATE`
- -
-:::
-
-No other types are supported.
-
-Unsupported Vertica types can be converted to `VARCHAR` with the
-`vertica.unsupported_type_handling` session property. The default value for
-this property is `IGNORE`.
-
-```sql
-SET SESSION vertica.unsupported_type_handling='CONVERT_TO_VARCHAR';
-```
-
-### Trino to Vertica type mapping
-
-The connector maps Trino types to the corresponding Vertica types according to
-the following table:
-
-:::{list-table} Trino to Vertica type mapping
-:widths: 50, 50
-:header-rows: 1
-
-* - Trino type
- - Vertica type
-* - `BOOLEAN`
- - `BOOLEAN`
-* - `TINYINT`
- - `BIGINT`
-* - `SMALLINT`
- - `BIGINT`
-* - `INTEGER`
- - `BIGINT`
-* - `BIGINT`
- - `BIGINT`
-* - `REAL`
- - `DOUBLE PRECISION`
-* - `DOUBLE`
- - `DOUBLE PRECISION`
-* - `DECIMAL(p, s)`
- - `DECIMAL(p, s)`
-* - `CHAR`
- - `CHAR`
-* - `VARCHAR`
- - `VARCHAR`
-* - `VARBINARY`
- - `VARBINARY`
-* - `DATE`
- - `DATE`
-:::
-
-No other types are supported.
-
-```{include} jdbc-type-mapping.fragment
-```
-
-(vertica-sql-support)=
-## SQL support
-
-The connector provides read and write access to data and metadata in Vertica. In
-addition to the [globally available](sql-globally-available) and [read
-operation](sql-read-operations) statements, the connector supports the following
-features:
-
-- [](sql-data-management)
-- [](/sql/create-table)
-- [](/sql/create-table-as)
-- [](/sql/drop-table)
-- [](/sql/alter-table) excluding `DROP COLUMN`, see also [](vertica-alter-table)
-- [](/sql/create-schema)
-- [](/sql/drop-schema)
-- [](vertica-table-functions)
-
-(vertica-alter-table)=
-```{include} alter-table-limitation.fragment
-```
-
-(vertica-table-functions)=
-## Table functions
-
-The connector provides specific [table functions](/functions/table) to
-access Vertica.
-
-(vertica-query-function)=
-### `query(VARCHAR) -> table`
-
-The `query` function allows you to query the underlying database directly. It
-requires syntax native to the data source, because the full query is pushed down
-and processed in the data source. This can be useful for accessing native
-features or for improving query performance in situations where running a query
-natively may be faster.
-
-The `query` table function is available in the `system` schema of any
-catalog that uses the Vertica connector, such as `example`. The
-following example passes `myQuery` to the data source. `myQuery` has to be a
-valid query for the data source, and is required to return a table as a result:
-
-```sql
-SELECT
- *
-FROM
- TABLE(
- example.system.query(
- query => 'myQuery'
- )
- );
-```
-
-```{include} query-table-function-ordering.fragment
-```
-
-## Performance
-
-The connector includes a number of performance features, detailed in the
-following sections.
-
-### Pushdown
-
-The connector supports pushdown for a number of operations:
-
-- [](join-pushdown)
-- [](limit-pushdown)
-
-```{include} join-pushdown-enabled-false.fragment
-```
-
-### Table statistics
-
-The [cost-based optimizer](/optimizer/cost-based-optimizations) can use table
-statistics from the Vertica database to improve query performance.
-
-Support for table statistics is disabled by default. You can enable it with the
-catalog property `statistics.enabled` set to `true`. In addition, the
-`connection-user` configured in the catalog must have superuser permissions in
-Vertica to gather and populate statistics.
-
-You can view statistics using [](/sql/show-stats).
diff --git a/docs/src/main/sphinx/ext/download.py b/docs/src/main/sphinx/ext/download.py
index 2dce7a7c4ef2..66d1ccdafa5c 100644
--- a/docs/src/main/sphinx/ext/download.py
+++ b/docs/src/main/sphinx/ext/download.py
@@ -77,7 +77,6 @@
'thrift': ('trino-thrift', 'zip'),
'tpcds': ('trino-tpcds', 'zip'),
'tpch': ('trino-tpch', 'zip'),
- 'vertica': ('trino-vertica', 'zip'),
}
def filename(artifact, version, extension):
diff --git a/docs/src/main/sphinx/installation/plugins.md b/docs/src/main/sphinx/installation/plugins.md
index 6537ca6f41cf..7f0fe350d2b0 100644
--- a/docs/src/main/sphinx/installation/plugins.md
+++ b/docs/src/main/sphinx/installation/plugins.md
@@ -360,8 +360,4 @@ with the listed coordinates.
- [](/connector/tpch)
- [io.trino:trino-tpch](https://central.sonatype.com/search?q=io.trino%3Atrino-tpch)
- {download_gh}`tpch`
-* - vertica
- - [](/connector/vertica)
- - [io.trino:trino-vertica](https://central.sonatype.com/search?q=io.trino%3Atrino-vertica)
- - {download_gh}`vertica`
-:::
\ No newline at end of file
+:::
diff --git a/docs/src/main/sphinx/redirects.txt b/docs/src/main/sphinx/redirects.txt
index d9343e1c7b14..08d4b6435cfe 100644
--- a/docs/src/main/sphinx/redirects.txt
+++ b/docs/src/main/sphinx/redirects.txt
@@ -12,6 +12,7 @@ connector/accumulo.md connector/removed.md
connector/kinesis.md connector/removed.md
connector/kudu.md connector/removed.md
connector/phoenix.md connector/removed.md
+connector/vertica.md connector/removed.md
object-storage/legacy-azure.md object-storage/file-system-azure.md
object-storage/legacy-cos.md object-storage/file-system-s3.md
object-storage/legacy-gcs.md object-storage/file-system-gcs.md
diff --git a/docs/src/main/sphinx/release/release-464.md b/docs/src/main/sphinx/release/release-464.md
index 56093cfed154..c659bdf52d29 100644
--- a/docs/src/main/sphinx/release/release-464.md
+++ b/docs/src/main/sphinx/release/release-464.md
@@ -4,7 +4,7 @@
* {{breaking}} Require JDK 23 to run Trino, including updated [](jvm-config). ({issue}`21316`)
* Add the [](/connector/faker) for easy generation of data. ({issue}`23691`)
-* Add the [](/connector/vertica). ({issue}`23948`)
+* Add the Vertica connector. ({issue}`23948`)
* Rename the
`fault-tolerant-execution-eager-speculative-tasks-node_memory-overcommit`
configuration property to
@@ -56,4 +56,4 @@
## Phoenix connector
-* {{breaking}} Require JVM configuration to allow the Java security manager. ({issue}`24207`)
\ No newline at end of file
+* {{breaking}} Require JVM configuration to allow the Java security manager. ({issue}`24207`)
diff --git a/docs/src/main/sphinx/static/img/vertica.png b/docs/src/main/sphinx/static/img/vertica.png
deleted file mode 100644
index 9b935a6ba594..000000000000
Binary files a/docs/src/main/sphinx/static/img/vertica.png and /dev/null differ
diff --git a/plugin/trino-vertica/pom.xml b/plugin/trino-vertica/pom.xml
deleted file mode 100644
index 61243d0cf745..000000000000
--- a/plugin/trino-vertica/pom.xml
+++ /dev/null
@@ -1,269 +0,0 @@
-
-
- 4.0.0
-
- io.trino
- trino-root
- 478-SNAPSHOT
- ../../pom.xml
-
-
- trino-vertica
- trino-plugin
- ${project.artifactId}
- Trino - Vertica Connector
-
-
-
- com.fasterxml.jackson.core
- jackson-core
-
-
-
- com.fasterxml.jackson.core
- jackson-databind
-
-
-
- com.fasterxml.jackson.dataformat
- jackson-dataformat-xml
- ${dep.jackson.version}
-
-
- com.fasterxml.woodstox
- woodstox-core
-
-
-
-
-
- com.google.guava
- guava
-
-
-
- com.google.inject
- guice
- classes
-
-
-
- com.vertica.jdbc
- vertica-jdbc
- 25.3.0-0
-
-
-
- io.airlift
- configuration
-
-
-
- io.airlift
- log
-
-
-
- io.trino
- trino-base-jdbc
-
-
-
- io.trino
- trino-plugin-toolkit
-
-
-
- org.jdbi
- jdbi3-core
-
-
-
- com.fasterxml.jackson.core
- jackson-annotations
- provided
-
-
-
- io.airlift
- slice
- provided
-
-
-
- io.opentelemetry
- opentelemetry-api
- provided
-
-
-
- io.opentelemetry
- opentelemetry-api-incubator
- provided
-
-
-
- io.opentelemetry
- opentelemetry-context
- provided
-
-
-
- io.trino
- trino-spi
- provided
-
-
-
- org.openjdk.jol
- jol-core
- provided
-
-
-
- com.google.errorprone
- error_prone_annotations
- runtime
-
-
-
- io.airlift
- log-manager
- runtime
-
-
-
- org.jetbrains
- annotations
- runtime
-
-
-
- com.github.docker-java
- docker-java-api
- test
-
-
-
- com.h2database
- h2
- test
-
-
-
- io.airlift
- junit-extensions
- test
-
-
-
- io.airlift
- testing
- test
-
-
-
- io.trino
- trino-base-jdbc
- test-jar
- test
-
-
-
- io.trino
- trino-jdbc
- test
-
-
-
- io.trino
- trino-jmx
- test
-
-
-
- io.trino
- trino-main
- test
-
-
-
- io.trino
- trino-main
- test-jar
- test
-
-
-
- io.trino
- trino-memory
- test
-
-
-
- io.trino
- trino-testing
- test
-
-
-
- io.trino
- trino-testing-containers
- test
-
-
-
- io.trino
- trino-testing-services
- test
-
-
-
- io.trino
- trino-tpch
- test
-
-
-
- io.trino.tpch
- tpch
- test
-
-
-
- org.assertj
- assertj-core
- test
-
-
-
- org.junit.jupiter
- junit-jupiter-api
- test
-
-
-
- org.junit.jupiter
- junit-jupiter-engine
- test
-
-
-
- org.junit.jupiter
- junit-jupiter-params
- test
-
-
-
- org.testcontainers
- jdbc
- test
-
-
-
- org.testcontainers
- testcontainers
- test
-
-
-
diff --git a/plugin/trino-vertica/src/main/java/io/trino/plugin/vertica/VerticaClient.java b/plugin/trino-vertica/src/main/java/io/trino/plugin/vertica/VerticaClient.java
deleted file mode 100644
index 2b160ba666d1..000000000000
--- a/plugin/trino-vertica/src/main/java/io/trino/plugin/vertica/VerticaClient.java
+++ /dev/null
@@ -1,438 +0,0 @@
-/*
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package io.trino.plugin.vertica;
-
-import com.google.common.collect.ImmutableSet;
-import com.google.inject.Inject;
-import io.trino.plugin.base.expression.ConnectorExpressionRewriter;
-import io.trino.plugin.base.mapping.IdentifierMapping;
-import io.trino.plugin.jdbc.BaseJdbcClient;
-import io.trino.plugin.jdbc.BaseJdbcConfig;
-import io.trino.plugin.jdbc.ColumnMapping;
-import io.trino.plugin.jdbc.ConnectionFactory;
-import io.trino.plugin.jdbc.JdbcColumnHandle;
-import io.trino.plugin.jdbc.JdbcJoinCondition;
-import io.trino.plugin.jdbc.JdbcMetadata;
-import io.trino.plugin.jdbc.JdbcStatisticsConfig;
-import io.trino.plugin.jdbc.JdbcTableHandle;
-import io.trino.plugin.jdbc.JdbcTypeHandle;
-import io.trino.plugin.jdbc.LongWriteFunction;
-import io.trino.plugin.jdbc.PreparedQuery;
-import io.trino.plugin.jdbc.QueryBuilder;
-import io.trino.plugin.jdbc.WriteMapping;
-import io.trino.plugin.jdbc.expression.JdbcConnectorExpressionRewriterBuilder;
-import io.trino.plugin.jdbc.expression.ParameterizedExpression;
-import io.trino.plugin.jdbc.logging.RemoteQueryModifier;
-import io.trino.spi.TrinoException;
-import io.trino.spi.connector.ColumnHandle;
-import io.trino.spi.connector.ConnectorSession;
-import io.trino.spi.connector.SchemaTableName;
-import io.trino.spi.expression.ConnectorExpression;
-import io.trino.spi.statistics.TableStatistics;
-import io.trino.spi.type.CharType;
-import io.trino.spi.type.DecimalType;
-import io.trino.spi.type.Decimals;
-import io.trino.spi.type.Type;
-import io.trino.spi.type.VarbinaryType;
-import io.trino.spi.type.VarcharType;
-
-import java.sql.Connection;
-import java.sql.PreparedStatement;
-import java.sql.ResultSet;
-import java.sql.SQLException;
-import java.sql.Types;
-import java.time.LocalDate;
-import java.time.format.DateTimeFormatter;
-import java.time.format.DateTimeFormatterBuilder;
-import java.time.temporal.ChronoField;
-import java.util.Map;
-import java.util.Optional;
-import java.util.OptionalInt;
-import java.util.OptionalLong;
-import java.util.function.BiFunction;
-
-import static com.google.common.base.Preconditions.checkArgument;
-import static com.google.common.base.Throwables.throwIfInstanceOf;
-import static com.google.common.base.Verify.verify;
-import static io.trino.plugin.jdbc.ColumnMapping.doubleMapping;
-import static io.trino.plugin.jdbc.DecimalConfig.DecimalMapping.ALLOW_OVERFLOW;
-import static io.trino.plugin.jdbc.DecimalSessionSessionProperties.getDecimalDefaultScale;
-import static io.trino.plugin.jdbc.DecimalSessionSessionProperties.getDecimalRounding;
-import static io.trino.plugin.jdbc.DecimalSessionSessionProperties.getDecimalRoundingMode;
-import static io.trino.plugin.jdbc.JdbcErrorCode.JDBC_ERROR;
-import static io.trino.plugin.jdbc.PredicatePushdownController.DISABLE_PUSHDOWN;
-import static io.trino.plugin.jdbc.StandardColumnMappings.bigintColumnMapping;
-import static io.trino.plugin.jdbc.StandardColumnMappings.bigintWriteFunction;
-import static io.trino.plugin.jdbc.StandardColumnMappings.booleanColumnMapping;
-import static io.trino.plugin.jdbc.StandardColumnMappings.booleanWriteFunction;
-import static io.trino.plugin.jdbc.StandardColumnMappings.charColumnMapping;
-import static io.trino.plugin.jdbc.StandardColumnMappings.charWriteFunction;
-import static io.trino.plugin.jdbc.StandardColumnMappings.decimalColumnMapping;
-import static io.trino.plugin.jdbc.StandardColumnMappings.doubleWriteFunction;
-import static io.trino.plugin.jdbc.StandardColumnMappings.longDecimalWriteFunction;
-import static io.trino.plugin.jdbc.StandardColumnMappings.realWriteFunction;
-import static io.trino.plugin.jdbc.StandardColumnMappings.shortDecimalWriteFunction;
-import static io.trino.plugin.jdbc.StandardColumnMappings.varbinaryColumnMapping;
-import static io.trino.plugin.jdbc.StandardColumnMappings.varbinaryWriteFunction;
-import static io.trino.plugin.jdbc.StandardColumnMappings.varcharColumnMapping;
-import static io.trino.plugin.jdbc.StandardColumnMappings.varcharWriteFunction;
-import static io.trino.plugin.jdbc.TypeHandlingJdbcSessionProperties.getUnsupportedTypeHandling;
-import static io.trino.plugin.jdbc.UnsupportedTypeHandling.CONVERT_TO_VARCHAR;
-import static io.trino.plugin.vertica.VerticaTableStatisticsReader.readTableStatistics;
-import static io.trino.spi.StandardErrorCode.NOT_SUPPORTED;
-import static io.trino.spi.connector.JoinCondition.Operator.IDENTICAL;
-import static io.trino.spi.type.BigintType.BIGINT;
-import static io.trino.spi.type.BooleanType.BOOLEAN;
-import static io.trino.spi.type.CharType.createCharType;
-import static io.trino.spi.type.DateType.DATE;
-import static io.trino.spi.type.DecimalType.createDecimalType;
-import static io.trino.spi.type.DoubleType.DOUBLE;
-import static io.trino.spi.type.IntegerType.INTEGER;
-import static io.trino.spi.type.RealType.REAL;
-import static io.trino.spi.type.SmallintType.SMALLINT;
-import static io.trino.spi.type.TinyintType.TINYINT;
-import static io.trino.spi.type.VarcharType.createVarcharType;
-import static java.lang.Math.max;
-import static java.lang.Math.min;
-import static java.lang.String.format;
-import static java.util.Locale.ENGLISH;
-import static java.util.Objects.requireNonNull;
-
-public class VerticaClient
- extends BaseJdbcClient
-{
- // Date format is different between read and write in Vertica
- private static final DateTimeFormatter DATE_READ_FORMATTER = DateTimeFormatter.ofPattern("u-MM-dd");
- private static final DateTimeFormatter DATE_WRITE_FORMATTER = new DateTimeFormatterBuilder()
- .appendValueReduced(ChronoField.YEAR, 4, 7, 1000)
- .appendPattern("-MM-dd[ G]")
- .toFormatter();
-
- private final boolean statisticsEnabled;
- private final ConnectorExpressionRewriter connectorExpressionRewriter;
-
- @Inject
- public VerticaClient(
- BaseJdbcConfig config,
- JdbcStatisticsConfig statisticsConfig,
- ConnectionFactory connectionFactory,
- QueryBuilder queryBuilder,
- IdentifierMapping identifierMapping,
- RemoteQueryModifier queryModifier)
- {
- super("\"", connectionFactory, queryBuilder, config.getJdbcTypesMappedToVarchar(), identifierMapping, queryModifier, false);
- this.statisticsEnabled = requireNonNull(statisticsConfig, "statisticsConfig is null").isEnabled();
- this.connectorExpressionRewriter = JdbcConnectorExpressionRewriterBuilder.newBuilder()
- .addStandardRules(this::quoted)
- .withTypeClass("supported_type", ImmutableSet.of("tinyint", "smallint", "integer", "bigint", "decimal", "real", "char", "varchar"))
- .map("$equal(left: supported_type, right: supported_type)").to("left = right")
- .map("$not_equal(left: supported_type, right: supported_type)").to("left <> right")
- .map("$less_than(left: supported_type, right: supported_type)").to("left < right")
- .map("$less_than_or_equal(left: supported_type, right: supported_type)").to("left <= right")
- .map("$greater_than(left: supported_type, right: supported_type)").to("left > right")
- .map("$greater_than_or_equal(left: supported_type, right: supported_type)").to("left >= right")
- .build();
- }
-
- @Override
- public Optional getTableComment(ResultSet resultSet)
- {
- // Don't return a comment until the connector supports creating tables with comment
- return Optional.empty();
- }
-
- @Override
- public TableStatistics getTableStatistics(ConnectorSession session, JdbcTableHandle handle)
- {
- if (!statisticsEnabled) {
- return TableStatistics.empty();
- }
- if (!handle.isNamedRelation()) {
- return TableStatistics.empty();
- }
- try (Connection connection = connectionFactory.openConnection(session)) {
- return readTableStatistics(connection, handle, () -> JdbcMetadata.getColumns(session, this, handle));
- }
- catch (SQLException | RuntimeException e) {
- throwIfInstanceOf(e, TrinoException.class);
- throw new TrinoException(JDBC_ERROR, "Failed fetching statistics for table: " + handle, e);
- }
- }
-
- @Override
- public PreparedStatement getPreparedStatement(Connection connection, String sql, Optional columnCount)
- throws SQLException
- {
- connection.setAutoCommit(false);
- PreparedStatement statement = connection.prepareStatement(sql);
- statement.setFetchSize(1000);
- return statement;
- }
-
- @Override
- public Optional toColumnMapping(ConnectorSession session, Connection connection, JdbcTypeHandle typeHandle)
- {
- Optional mappingToVarchar = getForcedMappingToVarchar(typeHandle);
- if (mappingToVarchar.isPresent()) {
- return mappingToVarchar;
- }
-
- switch (typeHandle.jdbcType()) {
- case Types.BIT:
- case Types.BOOLEAN:
- return Optional.of(booleanColumnMapping());
- // Vertica's integer type is a 64-bit type for all tiny/small/int/bigint data
- // Vertica does not support the JDBC TINYINT/SMALLINT/INTEGER types, only BIGINT
- case Types.TINYINT:
- case Types.SMALLINT:
- case Types.INTEGER:
- case Types.BIGINT:
- return Optional.of(bigintColumnMapping());
- case Types.DOUBLE:
- case Types.FLOAT:
- case Types.REAL:
- // Disabling pushdown - Vertica is dropping/rounding precision for these types
- return Optional.of(doubleMapping(DOUBLE, ResultSet::getDouble, doubleWriteFunction(), DISABLE_PUSHDOWN));
- case Types.NUMERIC:
- int decimalDigits = typeHandle.requiredDecimalDigits();
- int precision = typeHandle.requiredColumnSize() + max(-decimalDigits, 0);
- if (getDecimalRounding(session) == ALLOW_OVERFLOW && precision > Decimals.MAX_PRECISION) {
- int scale = min(decimalDigits, getDecimalDefaultScale(session));
- return Optional.of(decimalColumnMapping(createDecimalType(Decimals.MAX_PRECISION, scale), getDecimalRoundingMode(session)));
- }
- if (precision > Decimals.MAX_PRECISION) {
- break;
- }
- return Optional.of(decimalColumnMapping(createDecimalType(precision, max(decimalDigits, 0))));
- case Types.CHAR:
- return Optional.of(charColumnMapping(createCharType(typeHandle.requiredColumnSize()), true));
- case Types.VARCHAR:
- case Types.LONGVARCHAR:
- return Optional.of(varcharColumnMapping(createVarcharType(typeHandle.requiredColumnSize()), true));
- case Types.VARBINARY:
- case Types.LONGVARBINARY:
- return Optional.of(varbinaryColumnMapping());
- case Types.DATE:
- return Optional.of(ColumnMapping.longMapping(
- DATE,
- (resultSet, index) -> LocalDate.parse(resultSet.getString(index), DATE_READ_FORMATTER).toEpochDay(),
- dateWriteFunctionUsingString()));
- }
-
- if (getUnsupportedTypeHandling(session) == CONVERT_TO_VARCHAR) {
- return mapToUnboundedVarchar(typeHandle);
- }
-
- return Optional.empty();
- }
-
- @Override
- public WriteMapping toWriteMapping(ConnectorSession session, Type type)
- {
- if (type == BOOLEAN) {
- return WriteMapping.booleanMapping("boolean", booleanWriteFunction());
- }
-
- // Vertica's integer type is a 64-bit type for all tiny/small/int/bigint data
- // Vertica does not support the JDBC TINYINT/SMALLINT/INTEGER types, only BIGINT
- if (type == TINYINT || type == SMALLINT || type == INTEGER || type == BIGINT) {
- return WriteMapping.longMapping("bigint", bigintWriteFunction());
- }
-
- if (type == REAL) {
- return WriteMapping.longMapping("real", realWriteFunction());
- }
-
- if (type == DOUBLE) {
- return WriteMapping.doubleMapping("double precision", doubleWriteFunction());
- }
-
- if (type instanceof DecimalType decimalType) {
- String dataType = format("decimal(%s, %s)", decimalType.getPrecision(), decimalType.getScale());
- if (decimalType.isShort()) {
- return WriteMapping.longMapping(dataType, shortDecimalWriteFunction(decimalType));
- }
- return WriteMapping.objectMapping(dataType, longDecimalWriteFunction(decimalType));
- }
-
- if (type instanceof CharType charType) {
- // TODO Handle cases where the value has multi-byte characters
- // e.g. a CHAR(1) with value U+1F600 (3 bytes) will not fit in a CHAR(1) in Vertica.
- // Trino counts codepoints/characters while Vertica counts bytes/octets.
- int length = charType.getLength();
- checkArgument(length <= 65000, "Char length is greater than 65,000");
- return WriteMapping.sliceMapping("char(" + length + ")", charWriteFunction());
- }
-
- if (type instanceof VarcharType varcharType) {
- // TODO Handle cases where the value has multi-byte characters
- // e.g. a VARCHAR(1) with value U+1F600 (3 bytes) will not fit in a VARCHAR(1) in Vertica.
- // Trino counts codepoints/characters while Vertica counts bytes/octets.
-
- // Prefer VARCHAR for lengths <= the maximum 65,000, else use LONG VARCHAR
- String dataType;
- if (varcharType.isUnbounded()) {
- dataType = "long varchar";
- }
- else if (varcharType.getBoundedLength() <= 65_000) {
- dataType = "varchar(" + varcharType.getBoundedLength() + ")";
- }
- else {
- checkArgument(varcharType.getBoundedLength() <= 32_000_000, "Varchar length is greater than 32,000,000");
- dataType = "long varchar(" + varcharType.getBoundedLength() + ")";
- }
-
- return WriteMapping.sliceMapping(dataType, varcharWriteFunction());
- }
-
- if (type instanceof VarbinaryType) {
- // Vertica will implicitly cast VARBINARY to LONG VARBINARY but not the other way, so we use LONG VARBINARY
- return WriteMapping.sliceMapping("long varbinary", varbinaryWriteFunction());
- }
-
- if (type == DATE) {
- return WriteMapping.longMapping("date", dateWriteFunctionUsingString());
- }
-
- throw new TrinoException(NOT_SUPPORTED, "Unsupported column type: " + type.getDisplayName());
- }
-
- private static LongWriteFunction dateWriteFunctionUsingString()
- {
- return (statement, index, day) -> statement.setString(index, DATE_WRITE_FORMATTER.format(LocalDate.ofEpochDay(day)));
- }
-
- @Override
- public void dropColumn(ConnectorSession session, JdbcTableHandle handle, JdbcColumnHandle column)
- {
- throw new TrinoException(NOT_SUPPORTED, "This connector does not support dropping columns");
- }
-
- @Override
- public void setColumnType(ConnectorSession session, JdbcTableHandle handle, JdbcColumnHandle column, Type type)
- {
- // TODO: Remove this override. Vertica supports ALTER COLUMN ... SET DATA TYPE syntax.
- throw new TrinoException(NOT_SUPPORTED, "This connector does not support setting column types");
- }
-
- @Override
- public OptionalLong delete(ConnectorSession session, JdbcTableHandle handle)
- {
- checkArgument(handle.isNamedRelation(), "Unable to delete from synthetic table: %s", handle);
- checkArgument(handle.getLimit().isEmpty(), "Unable to delete when limit is set: %s", handle);
- checkArgument(handle.getSortOrder().isEmpty(), "Unable to delete when sort order is set: %s", handle);
- try (Connection connection = connectionFactory.openConnection(session)) {
- verify(connection.getAutoCommit());
- PreparedQuery preparedQuery = queryBuilder.prepareDeleteQuery(this, session, connection, handle.getRequiredNamedRelation(), handle.getConstraint(), Optional.empty());
- try (PreparedStatement preparedStatement = queryBuilder.prepareStatement(this, session, connection, preparedQuery, Optional.empty())) {
- int affectedRowsCount = preparedStatement.executeUpdate();
- // Vertica requires an explicit commit, else it will discard the transaction
- connection.commit();
- return OptionalLong.of(affectedRowsCount);
- }
- }
- catch (SQLException e) {
- throw new TrinoException(JDBC_ERROR, e);
- }
- }
-
- @Override
- public OptionalLong update(ConnectorSession session, JdbcTableHandle handle)
- {
- checkArgument(handle.isNamedRelation(), "Unable to update from synthetic table: %s", handle);
- checkArgument(handle.getLimit().isEmpty(), "Unable to update when limit is set: %s", handle);
- checkArgument(handle.getSortOrder().isEmpty(), "Unable to update when sort order is set: %s", handle);
- checkArgument(!handle.getUpdateAssignments().isEmpty(), "Unable to update when update assignments are not set: %s", handle);
- try (Connection connection = connectionFactory.openConnection(session)) {
- verify(connection.getAutoCommit());
- PreparedQuery preparedQuery = queryBuilder.prepareUpdateQuery(
- this,
- session,
- connection,
- handle.getRequiredNamedRelation(),
- handle.getConstraint(),
- getAdditionalPredicate(handle.getConstraintExpressions(), Optional.empty()),
- handle.getUpdateAssignments());
- try (PreparedStatement preparedStatement = queryBuilder.prepareStatement(this, session, connection, preparedQuery, Optional.empty())) {
- int affectedRows = preparedStatement.executeUpdate();
- // Vertica requires an explicit commit, else it will discard the transaction
- connection.commit();
- return OptionalLong.of(affectedRows);
- }
- }
- catch (SQLException e) {
- throw new TrinoException(JDBC_ERROR, e);
- }
- }
-
- @Override
- protected void renameTable(ConnectorSession session, String catalogName, String schemaName, String tableName, SchemaTableName newTable)
- {
- if (!schemaName.equals(newTable.getSchemaName())) {
- throw new TrinoException(NOT_SUPPORTED, "This connector does not support renaming tables across schemas");
- }
-
- try (Connection connection = connectionFactory.openConnection(session)) {
- String newTableName = newTable.getTableName();
- if (connection.getMetaData().storesUpperCaseIdentifiers()) {
- newTableName = newTableName.toUpperCase(ENGLISH);
- }
- String sql = format(
- "ALTER TABLE %s RENAME TO %s",
- quoted(catalogName, schemaName, tableName),
- quoted(newTableName));
- execute(session, connection, sql);
- }
- catch (SQLException e) {
- throw new TrinoException(JDBC_ERROR, e);
- }
- }
-
- @Override
- public Optional convertPredicate(ConnectorSession session, ConnectorExpression expression, Map assignments)
- {
- return connectorExpressionRewriter.rewrite(session, expression, assignments);
- }
-
- @Override
- public void renameSchema(ConnectorSession session, String schemaName, String newSchemaName)
- {
- throw new TrinoException(NOT_SUPPORTED, "This connector does not support renaming schemas");
- }
-
- @Override
- protected Optional> limitFunction()
- {
- return Optional.of((sql, limit) -> sql + " LIMIT " + limit);
- }
-
- @Override
- public boolean isLimitGuaranteed(ConnectorSession session)
- {
- return true;
- }
-
- @Override
- protected boolean isSupportedJoinCondition(ConnectorSession session, JdbcJoinCondition joinCondition)
- {
- // Vertica does not support IS DISTINCT FROM
- return !joinCondition.getOperator().equals(IDENTICAL);
- }
-
- @Override
- public OptionalInt getMaxColumnNameLength(ConnectorSession session)
- {
- return this.getMaxColumnNameLengthFromDatabaseMetaData(session);
- }
-}
diff --git a/plugin/trino-vertica/src/main/java/io/trino/plugin/vertica/VerticaClientModule.java b/plugin/trino-vertica/src/main/java/io/trino/plugin/vertica/VerticaClientModule.java
deleted file mode 100644
index 06ce49a9054a..000000000000
--- a/plugin/trino-vertica/src/main/java/io/trino/plugin/vertica/VerticaClientModule.java
+++ /dev/null
@@ -1,69 +0,0 @@
-/*
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package io.trino.plugin.vertica;
-
-import com.google.inject.Binder;
-import com.google.inject.Provides;
-import com.google.inject.Scopes;
-import com.google.inject.Singleton;
-import io.airlift.configuration.AbstractConfigurationAwareModule;
-import io.opentelemetry.api.OpenTelemetry;
-import io.trino.plugin.jdbc.BaseJdbcConfig;
-import io.trino.plugin.jdbc.ConnectionFactory;
-import io.trino.plugin.jdbc.DecimalModule;
-import io.trino.plugin.jdbc.DriverConnectionFactory;
-import io.trino.plugin.jdbc.ForBaseJdbc;
-import io.trino.plugin.jdbc.JdbcClient;
-import io.trino.plugin.jdbc.JdbcJoinPushdownSupportModule;
-import io.trino.plugin.jdbc.JdbcStatisticsConfig;
-import io.trino.plugin.jdbc.credential.CredentialProvider;
-import io.trino.plugin.jdbc.ptf.Query;
-import io.trino.spi.function.table.ConnectorTableFunction;
-
-import static com.google.inject.multibindings.Multibinder.newSetBinder;
-import static io.airlift.configuration.ConfigBinder.configBinder;
-
-public class VerticaClientModule
- extends AbstractConfigurationAwareModule
-{
- @Override
- protected void setup(Binder binder)
- {
- binder.bind(VerticaClient.class).in(Scopes.SINGLETON);
- binder.bind(JdbcClient.class).annotatedWith(ForBaseJdbc.class).to(VerticaClient.class).in(Scopes.SINGLETON);
- configBinder(binder).bindConfig(BaseJdbcConfig.class);
- configBinder(binder).bindConfig(JdbcStatisticsConfig.class);
- configBinder(binder).bindConfigDefaults(JdbcStatisticsConfig.class, config -> {
- // Disabled by default because the user must be superuser to run EXPORT_STATISTICS function in Vertica
- config.setEnabled(false);
- });
-
- binder.install(new DecimalModule());
- install(new JdbcJoinPushdownSupportModule());
-
- @SuppressWarnings("TrinoExperimentalSpi")
- Class clazz = ConnectorTableFunction.class;
- newSetBinder(binder, clazz).addBinding().toProvider(Query.class).in(Scopes.SINGLETON);
- }
-
- @Provides
- @Singleton
- @ForBaseJdbc
- public static ConnectionFactory createConnectionFactory(BaseJdbcConfig config, CredentialProvider credentialProvider, OpenTelemetry openTelemetry)
- {
- return DriverConnectionFactory.builder(new VerticaDriver(), config.getConnectionUrl(), credentialProvider)
- .setOpenTelemetry(openTelemetry)
- .build();
- }
-}
diff --git a/plugin/trino-vertica/src/main/java/io/trino/plugin/vertica/VerticaDriver.java b/plugin/trino-vertica/src/main/java/io/trino/plugin/vertica/VerticaDriver.java
deleted file mode 100644
index 4f689b3418fd..000000000000
--- a/plugin/trino-vertica/src/main/java/io/trino/plugin/vertica/VerticaDriver.java
+++ /dev/null
@@ -1,68 +0,0 @@
-/*
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package io.trino.plugin.vertica;
-
-import com.vertica.core.VConnectionPropertyKey;
-import com.vertica.core.VDriver;
-import com.vertica.jdbc.hybrid.HybridAbstractDriver;
-import com.vertica.utilities.JDBCVersion;
-
-import java.sql.SQLException;
-import java.sql.SQLFeatureNotSupportedException;
-import java.util.Properties;
-import java.util.logging.Logger;
-
-/**
- * Original Driver class has broken JDK version detection that throws during initialization.
- * This version works correctly on JDK 17 and beyond.
- *
- * See: com.vertica.jdbc.Driver.runningJDBCVersion
- */
-public class VerticaDriver
- extends HybridAbstractDriver
-{
- @Override
- protected JDBCVersion getJDBCVersion()
- {
- return JDBCVersion.JDBC42;
- }
-
- @Override
- protected String getSubProtocol()
- {
- return "vertica";
- }
-
- @Override
- protected boolean parseSubName(String subName, Properties properties)
- {
- return VConnectionPropertyKey.parseSubName(subName, properties);
- }
-
- @Override
- public Logger getParentLogger()
- throws SQLFeatureNotSupportedException
- {
- throw new SQLFeatureNotSupportedException("java.util.logging not used");
- }
-
- static {
- try {
- HybridAbstractDriver.initialize(new VerticaDriver(), VDriver.class.getName());
- HybridAbstractDriver.setErrorMessageComponentName("Vertica");
- }
- catch (SQLException _) {
- }
- }
-}
diff --git a/plugin/trino-vertica/src/main/java/io/trino/plugin/vertica/VerticaPlugin.java b/plugin/trino-vertica/src/main/java/io/trino/plugin/vertica/VerticaPlugin.java
deleted file mode 100644
index 9987e3855522..000000000000
--- a/plugin/trino-vertica/src/main/java/io/trino/plugin/vertica/VerticaPlugin.java
+++ /dev/null
@@ -1,25 +0,0 @@
-/*
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package io.trino.plugin.vertica;
-
-import io.trino.plugin.jdbc.JdbcPlugin;
-
-public class VerticaPlugin
- extends JdbcPlugin
-{
- public VerticaPlugin()
- {
- super("vertica", VerticaClientModule::new);
- }
-}
diff --git a/plugin/trino-vertica/src/main/java/io/trino/plugin/vertica/VerticaTableStatisticsReader.java b/plugin/trino-vertica/src/main/java/io/trino/plugin/vertica/VerticaTableStatisticsReader.java
deleted file mode 100644
index bb65217fd0b9..000000000000
--- a/plugin/trino-vertica/src/main/java/io/trino/plugin/vertica/VerticaTableStatisticsReader.java
+++ /dev/null
@@ -1,312 +0,0 @@
-/*
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package io.trino.plugin.vertica;
-
-import com.fasterxml.jackson.annotation.JsonAlias;
-import com.fasterxml.jackson.annotation.JsonCreator;
-import com.fasterxml.jackson.annotation.JsonProperty;
-import com.fasterxml.jackson.core.JsonProcessingException;
-import com.fasterxml.jackson.databind.ObjectMapper;
-import com.fasterxml.jackson.dataformat.xml.JacksonXmlModule;
-import com.fasterxml.jackson.dataformat.xml.XmlMapper;
-import com.fasterxml.jackson.dataformat.xml.annotation.JacksonXmlElementWrapper;
-import com.fasterxml.jackson.dataformat.xml.annotation.JacksonXmlProperty;
-import io.airlift.log.Logger;
-import io.trino.plugin.jdbc.JdbcColumnHandle;
-import io.trino.plugin.jdbc.JdbcTableHandle;
-import io.trino.plugin.jdbc.RemoteTableName;
-import io.trino.spi.statistics.DoubleRange;
-import io.trino.spi.statistics.Estimate;
-import io.trino.spi.statistics.TableStatistics;
-import io.trino.spi.type.BigintType;
-import io.trino.spi.type.DecimalType;
-import io.trino.spi.type.Type;
-import org.jdbi.v3.core.Handle;
-import org.jdbi.v3.core.Jdbi;
-
-import java.sql.Connection;
-import java.util.AbstractMap;
-import java.util.List;
-import java.util.Map;
-import java.util.Optional;
-import java.util.function.Supplier;
-import java.util.stream.Collectors;
-
-import static com.fasterxml.jackson.databind.DeserializationFeature.FAIL_ON_IGNORED_PROPERTIES;
-import static com.fasterxml.jackson.databind.DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES;
-import static com.google.common.base.Preconditions.checkArgument;
-import static com.google.common.collect.ImmutableMap.toImmutableMap;
-import static com.google.common.collect.Iterables.getOnlyElement;
-import static io.trino.spi.type.DoubleType.DOUBLE;
-import static java.lang.String.format;
-import static java.util.Objects.requireNonNull;
-
-public final class VerticaTableStatisticsReader
-{
- private static final Logger log = Logger.get(VerticaTableStatisticsReader.class);
-
- private static final ObjectMapper OBJECT_MAPPER = new XmlMapper(new JacksonXmlModule())
- .disable(FAIL_ON_IGNORED_PROPERTIES, FAIL_ON_UNKNOWN_PROPERTIES);
-
- // We don't know null fraction in case of nan and null rows, but having no null fraction will make CBO useless. Assume some arbitrary value.
- private static final Estimate UNKNOWN_NULL_FRACTION_REPLACEMENT = Estimate.of(0.1);
-
- private VerticaTableStatisticsReader() {}
-
- public static TableStatistics readTableStatistics(Connection connection, JdbcTableHandle table, Supplier> columnSupplier)
- {
- checkArgument(table.isNamedRelation(), "Relation is not a table: %s", table);
-
- log.debug("Reading statistics for %s", table);
- try (Handle handle = Jdbi.open(connection)) {
- StatisticsDao statisticsDao = new StatisticsDao(handle);
-
- Long rowCount = statisticsDao.getRowCount(table);
- log.debug("Estimated row count of table %s is %s", table, rowCount);
-
- if (rowCount == null) {
- // Table not found, or is a view.
- return TableStatistics.empty();
- }
-
- TableStatistics.Builder tableStatistics = TableStatistics.builder();
- tableStatistics.setRowCount(Estimate.of(rowCount));
-
- Schema schema = statisticsDao.getSchemaStatistics(table);
- if (schema == null || schema.tables().size() == 0) {
- return TableStatistics.empty();
- }
- Map columnsStatistics = getOnlyElement(schema.tables()).columns().stream()
- .collect(toImmutableMap(Column::columnName, Column::stats));
- Map columnsDataSize = statisticsDao.getColumnDataSize(table);
-
- for (JdbcColumnHandle column : columnSupplier.get()) {
- io.trino.spi.statistics.ColumnStatistics.Builder columnStatisticsBuilder = io.trino.spi.statistics.ColumnStatistics.builder();
-
- String columnName = column.getColumnName();
-
- if (columnsDataSize.containsKey(columnName)) {
- columnStatisticsBuilder.setDataSize(Estimate.of(columnsDataSize.get(columnName)));
- }
-
- ColumnStatistics columnStatistics = columnsStatistics.get(columnName);
- if (columnStatistics != null) {
- log.debug("Reading column statistics for %s, %s from index statistics: %s", table, columnName, columnStatistics);
-
- Optional nullsCount = columnStatistics.histogram().category().stream()
- .filter(category -> category.bound().isNull())
- .map(category -> category.count().value())
- .findFirst();
-
- long distinctValuesCount = columnStatistics.distinct().value;
- // Vertica includes NULL values in the distinct values count
- if (nullsCount.isPresent() || columnStatistics.minValue().isNull() || columnStatistics.maxValue().isNull()) {
- distinctValuesCount = Math.max(columnStatistics.distinct().value - 1, 0);
- }
-
- columnStatisticsBuilder.setDistinctValuesCount(Estimate.of(distinctValuesCount));
- if (isNumeric(column.getColumnType())) {
- columnStatisticsBuilder.setRange(createNumericRange(
- columnStatistics.minValue().getValue(),
- columnStatistics.maxValue().getValue()));
- }
-
- columnStatisticsBuilder.setNullsFraction(getNullsFraction(rowCount, nullsCount, columnStatistics));
- }
-
- tableStatistics.setColumnStatistics(column, columnStatisticsBuilder.build());
- }
-
- tableStatistics.setRowCount(Estimate.of(rowCount));
- return tableStatistics.build();
- }
- }
-
- private static Estimate getNullsFraction(Long rowCount, Optional nullsCount, ColumnStatistics columnStatistics)
- {
- if (nullsCount.isPresent()) {
- return Estimate.of(Math.min(1, (double) nullsCount.get() / rowCount));
- }
- else if (columnStatistics.distinct().value == 2 && (columnStatistics.minValue().isNan() || columnStatistics.maxValue().isNan())) {
- // We can't determine nulls fraction when the rows are only nan and null because the exported XML doesn't distinguish nan and null count
- return UNKNOWN_NULL_FRACTION_REPLACEMENT;
- }
- else if (columnStatistics.minValue().isNull() || columnStatistics.maxValue().isNull()) {
- long nonNullCount = columnStatistics.histogram().category().stream()
- .mapToLong(category -> category.count().value())
- .sum();
- return Estimate.of(((double) rowCount - nonNullCount) / rowCount);
- }
- return Estimate.zero();
- }
-
- private static boolean isNumeric(Type type)
- {
- // TINYINT, SMALLINT and INTEGER is mapped to BIGINT in SEP
- // REAL in Vertica is mapped to DOUBLE in SEP
- return type == BigintType.BIGINT || type == DOUBLE || type instanceof DecimalType;
- }
-
- private static Optional createNumericRange(Optional minValue, Optional maxValue)
- {
- if (minValue.isEmpty() && maxValue.isEmpty()) {
- return Optional.empty();
- }
-
- return Optional.of(new DoubleRange(
- minValue
- .filter(value -> !value.equals("-nan")) // Vertica returns -nan (=NaN) as the minimum value, but Trino doesn't support the value in statistics
- .flatMap(VerticaTableStatisticsReader::tryParseDouble)
- .orElse(Double.NEGATIVE_INFINITY),
- maxValue
- .filter(value -> !value.equals("nan")) // Vertica returns nan (=NaN) as the maximum value, but Trino doesn't support the value in statistics
- .flatMap(VerticaTableStatisticsReader::tryParseDouble)
- .orElse(Double.POSITIVE_INFINITY)));
- }
-
- private static Optional tryParseDouble(String value)
- {
- try {
- return Optional.of(Double.valueOf(value));
- }
- catch (NumberFormatException e) {
- return Optional.empty();
- }
- }
-
- private static class StatisticsDao
- {
- private final Handle handle;
-
- public StatisticsDao(Handle handle)
- {
- this.handle = requireNonNull(handle, "handle is null");
- }
-
- Long getRowCount(JdbcTableHandle table)
- {
- RemoteTableName remoteTableName = table.getRequiredNamedRelation().getRemoteTableName();
- return handle.createQuery("" +
- "SELECT row_count FROM v_monitor.projection_storage " +
- "WHERE anchor_table_schema = :schema AND anchor_table_name = :table_name")
- .bind("schema", remoteTableName.getCatalogName().orElse(null))
- .bind("table_name", remoteTableName.getTableName())
- .mapTo(Long.class)
- .findOne()
- .orElse(null);
- }
-
- Schema getSchemaStatistics(JdbcTableHandle table)
- {
- RemoteTableName remoteTableName = table.getRequiredNamedRelation().getRemoteTableName();
- // The empty '' returns XML to standard output
- return handle.createQuery("SELECT EXPORT_STATISTICS('', :schema_table_name)")
- .bind("schema_table_name", format("%s.%s", remoteTableName.getSchemaName().orElse(null), remoteTableName.getTableName()))
- .map((rs, ctx) -> {
- try {
- String exportStatistics = rs.getString("EXPORT_STATISTICS");
- return OBJECT_MAPPER.readValue(exportStatistics, Schema.class);
- }
- catch (JsonProcessingException e) {
- log.warn(e, "Failed to read statistics");
- return null;
- }
- })
- .one();
- }
-
- Map getColumnDataSize(JdbcTableHandle table)
- {
- RemoteTableName remoteTableName = table.getRequiredNamedRelation().getRemoteTableName();
- return handle.createQuery("" +
- "SELECT column_name, SUM(used_bytes) AS size FROM v_monitor.column_storage " +
- "WHERE anchor_table_schema = :schema AND anchor_table_name = :table_name " +
- "GROUP BY column_name")
- .bind("schema", remoteTableName.getCatalogName().orElse(null))
- .bind("table_name", remoteTableName.getTableName())
- .map((rs, ctx) -> new AbstractMap.SimpleEntry<>(rs.getString("column_name"), rs.getLong("size")))
- .stream()
- .collect(Collectors.toMap(AbstractMap.SimpleEntry::getKey, AbstractMap.SimpleEntry::getValue));
- }
- }
-
- public record Schema(@JacksonXmlProperty(localName = "tables") List tables) {}
-
- public record Table(
- @JacksonXmlProperty(localName = "schema") String schemaName,
- @JacksonXmlProperty(localName = "name") String tableName,
- @JacksonXmlProperty(localName = "columns") List columns) {}
-
- public record Column(
- @JacksonXmlProperty(localName = "name") String columnName,
- @JacksonXmlProperty(localName = "dataType") String dataType,
- @JsonProperty("intStats") @JsonAlias({"stringStats", "floatStats", "numericStats"}) ColumnStatistics stats) {}
-
- public record ColumnStatistics(
- @JacksonXmlProperty(localName = "distinct") NumericValue distinct,
- @JacksonXmlProperty(localName = "minValue") NullableValue minValue,
- @JacksonXmlProperty(localName = "maxValue") NullableValue maxValue,
- @JacksonXmlProperty(localName = "histogram") Histogram histogram) {}
-
- public record Histogram(@JacksonXmlElementWrapper(useWrapping = false) @JacksonXmlProperty(localName = "category") List category)
- {
- public record Category(
- @JacksonXmlProperty(localName = "bound") NullableValue bound,
- @JacksonXmlProperty(localName = "count") NumericValue count) {}
- }
-
- public static class NullableValue
- {
- private final String value;
- private final String nullValue;
-
- @JsonCreator
- public NullableValue(String value)
- {
- this(null, value);
- }
-
- @JsonCreator
- public NullableValue(
- @JacksonXmlProperty(localName = "nullValue", isAttribute = true) String nullValue,
- @JacksonXmlProperty(localName = " ") String value)
- {
- this.value = value;
- this.nullValue = nullValue;
- }
-
- public Optional getValue()
- {
- return Optional.ofNullable(value);
- }
-
- public boolean isNull()
- {
- if (nullValue == null) {
- return false;
- }
- return nullValue.equals("true");
- }
-
- public boolean isNan()
- {
- if (value == null) {
- return false;
- }
- return value.equals("-nan") || value.equals("nan");
- }
- }
-
- public record NumericValue(@JacksonXmlProperty(localName = "value", isAttribute = true) long value) {}
-}
diff --git a/plugin/trino-vertica/src/test/java/io/trino/plugin/vertica/BaseVerticaConnectorSmokeTest.java b/plugin/trino-vertica/src/test/java/io/trino/plugin/vertica/BaseVerticaConnectorSmokeTest.java
deleted file mode 100644
index 25df780431bd..000000000000
--- a/plugin/trino-vertica/src/test/java/io/trino/plugin/vertica/BaseVerticaConnectorSmokeTest.java
+++ /dev/null
@@ -1,31 +0,0 @@
-/*
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package io.trino.plugin.vertica;
-
-import io.trino.plugin.jdbc.BaseJdbcConnectorSmokeTest;
-import io.trino.testing.TestingConnectorBehavior;
-
-public abstract class BaseVerticaConnectorSmokeTest
- extends BaseJdbcConnectorSmokeTest
-{
- @Override
- protected boolean hasBehavior(TestingConnectorBehavior connectorBehavior)
- {
- return switch (connectorBehavior) {
- case SUPPORTS_RENAME_SCHEMA,
- SUPPORTS_RENAME_TABLE_ACROSS_SCHEMAS -> false;
- default -> super.hasBehavior(connectorBehavior);
- };
- }
-}
diff --git a/plugin/trino-vertica/src/test/java/io/trino/plugin/vertica/TestVerticaConnectorSmokeTest.java b/plugin/trino-vertica/src/test/java/io/trino/plugin/vertica/TestVerticaConnectorSmokeTest.java
deleted file mode 100644
index 2c8e1cf511bd..000000000000
--- a/plugin/trino-vertica/src/test/java/io/trino/plugin/vertica/TestVerticaConnectorSmokeTest.java
+++ /dev/null
@@ -1,31 +0,0 @@
-/*
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package io.trino.plugin.vertica;
-
-import io.trino.testing.QueryRunner;
-
-import static io.trino.plugin.vertica.TestingVerticaServer.DEFAULT_VERSION;
-
-public class TestVerticaConnectorSmokeTest
- extends BaseVerticaConnectorSmokeTest
-{
- @Override
- protected QueryRunner createQueryRunner()
- throws Exception
- {
- return VerticaQueryRunner.builder(closeAfterClass(new TestingVerticaServer(DEFAULT_VERSION)))
- .setTables(REQUIRED_TPCH_TABLES)
- .build();
- }
-}
diff --git a/plugin/trino-vertica/src/test/java/io/trino/plugin/vertica/TestVerticaConnectorTest.java b/plugin/trino-vertica/src/test/java/io/trino/plugin/vertica/TestVerticaConnectorTest.java
deleted file mode 100644
index fb38c381de52..000000000000
--- a/plugin/trino-vertica/src/test/java/io/trino/plugin/vertica/TestVerticaConnectorTest.java
+++ /dev/null
@@ -1,466 +0,0 @@
-/*
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package io.trino.plugin.vertica;
-
-import io.trino.Session;
-import io.trino.plugin.jdbc.BaseJdbcConnectorTest;
-import io.trino.plugin.jdbc.JoinOperator;
-import io.trino.spi.connector.JoinCondition;
-import io.trino.testing.MaterializedResult;
-import io.trino.testing.QueryRunner;
-import io.trino.testing.TestingConnectorBehavior;
-import io.trino.testing.sql.SqlExecutor;
-import io.trino.testing.sql.TestTable;
-import org.junit.jupiter.api.Test;
-
-import java.util.List;
-import java.util.Optional;
-import java.util.OptionalInt;
-import java.util.stream.Stream;
-
-import static com.google.common.collect.ImmutableList.toImmutableList;
-import static com.google.common.collect.MoreCollectors.toOptional;
-import static io.trino.plugin.jdbc.JoinOperator.FULL_JOIN;
-import static io.trino.spi.type.VarcharType.VARCHAR;
-import static io.trino.testing.MaterializedResult.resultBuilder;
-import static io.trino.testing.TestingConnectorBehavior.SUPPORTS_AGGREGATION_PUSHDOWN;
-import static io.trino.testing.TestingConnectorBehavior.SUPPORTS_JOIN_PUSHDOWN;
-import static io.trino.testing.TestingConnectorBehavior.SUPPORTS_JOIN_PUSHDOWN_WITH_DISTINCT_FROM;
-import static io.trino.testing.TestingConnectorBehavior.SUPPORTS_JOIN_PUSHDOWN_WITH_FULL_JOIN;
-import static io.trino.testing.TestingConnectorBehavior.SUPPORTS_JOIN_PUSHDOWN_WITH_VARCHAR_EQUALITY;
-import static io.trino.testing.TestingConnectorBehavior.SUPPORTS_JOIN_PUSHDOWN_WITH_VARCHAR_INEQUALITY;
-import static io.trino.testing.TestingConnectorBehavior.SUPPORTS_LIMIT_PUSHDOWN;
-import static io.trino.testing.TestingConnectorBehavior.SUPPORTS_PREDICATE_PUSHDOWN_WITH_VARCHAR_EQUALITY;
-import static io.trino.testing.TestingConnectorBehavior.SUPPORTS_PREDICATE_PUSHDOWN_WITH_VARCHAR_INEQUALITY;
-import static io.trino.testing.TestingConnectorBehavior.SUPPORTS_TOPN_PUSHDOWN;
-import static java.lang.String.format;
-import static org.assertj.core.api.Assertions.assertThat;
-import static org.assertj.core.api.Assertions.assertThatThrownBy;
-import static org.junit.jupiter.api.Assumptions.abort;
-
-public class TestVerticaConnectorTest
- extends BaseJdbcConnectorTest
-{
- protected TestingVerticaServer verticaServer;
-
- @Override
- protected QueryRunner createQueryRunner()
- throws Exception
- {
- verticaServer = closeAfterClass(new TestingVerticaServer());
- return VerticaQueryRunner.builder(verticaServer).setTables(REQUIRED_TPCH_TABLES).build();
- }
-
- @Override
- protected boolean hasBehavior(TestingConnectorBehavior connectorBehavior)
- {
- return switch (connectorBehavior) {
- case SUPPORTS_JOIN_PUSHDOWN -> true;
- case SUPPORTS_ARRAY,
- SUPPORTS_ADD_COLUMN_WITH_COMMENT,
- SUPPORTS_AGGREGATION_PUSHDOWN,
- SUPPORTS_COMMENT_ON_COLUMN,
- SUPPORTS_COMMENT_ON_TABLE,
- SUPPORTS_CREATE_TABLE_WITH_COLUMN_COMMENT,
- SUPPORTS_CREATE_TABLE_WITH_TABLE_COMMENT,
- SUPPORTS_DROP_COLUMN,
- SUPPORTS_JOIN_PUSHDOWN_WITH_DISTINCT_FROM,
- SUPPORTS_MAP_TYPE,
- SUPPORTS_RENAME_SCHEMA,
- SUPPORTS_RENAME_TABLE_ACROSS_SCHEMAS,
- SUPPORTS_ROW_TYPE,
- SUPPORTS_SET_COLUMN_TYPE,
- SUPPORTS_TOPN_PUSHDOWN -> false;
- default -> super.hasBehavior(connectorBehavior);
- };
- }
-
- // Overridden due to test case with a push down on a DOUBLE type
- // DOUBLE pushdown is disabled in Vertica due to precision issues
- @Test
- @Override
- public void testJoinPushdown()
- {
- for (JoinOperator joinOperator : JoinOperator.values()) {
- testJoinPushdown(joinOperator);
- }
- }
-
- private void testJoinPushdown(JoinOperator joinOperator)
- {
- Session session = joinPushdownEnabled(getSession());
-
- if (!hasBehavior(SUPPORTS_JOIN_PUSHDOWN)) {
- assertThat(query(session, "SELECT r.name, n.name FROM nation n JOIN region r ON n.regionkey = r.regionkey"))
- .joinIsNotFullyPushedDown();
- return;
- }
-
- if (joinOperator == FULL_JOIN && !hasBehavior(SUPPORTS_JOIN_PUSHDOWN_WITH_FULL_JOIN)) {
- // Covered by verifySupportsJoinPushdownWithFullJoinDeclaration
- return;
- }
-
- // Disable DF here for the sake of negative test cases' expected plan. With DF enabled, some operators return in DF's FilterNode and some do not.
- Session withoutDynamicFiltering = Session.builder(session)
- .setSystemProperty("enable_dynamic_filtering", "false")
- .build();
-
- String notDistinctOperator = "IS NOT DISTINCT FROM";
- List nonEqualities = Stream.concat(
- Stream.of(JoinCondition.Operator.values())
- .filter(operator -> operator != JoinCondition.Operator.EQUAL && operator != JoinCondition.Operator.IDENTICAL)
- .map(JoinCondition.Operator::getValue),
- Stream.of(notDistinctOperator))
- .collect(toImmutableList());
-
- try (TestTable nationLowercaseTable = newTrinoTable(
- // If a connector supports Join pushdown, but does not allow CTAS, we need to make the table creation here overridable.
- "nation_lowercase",
- "AS SELECT nationkey, lower(name) name, regionkey FROM nation")) {
- // basic case
- assertThat(query(session, format("SELECT r.name, n.name FROM nation n %s region r ON n.regionkey = r.regionkey", joinOperator))).isFullyPushedDown();
-
- // join over different columns
- assertThat(query(session, format("SELECT r.name, n.name FROM nation n %s region r ON n.nationkey = r.regionkey", joinOperator))).isFullyPushedDown();
-
- // pushdown when using USING
- assertThat(query(session, format("SELECT r.name, n.name FROM nation n %s region r USING(regionkey)", joinOperator))).isFullyPushedDown();
-
- // varchar equality predicate
- assertJoinConditionallyPushedDown(
- session,
- format("SELECT n.name, n2.regionkey FROM nation n %s nation n2 ON n.name = n2.name", joinOperator),
- hasBehavior(SUPPORTS_JOIN_PUSHDOWN_WITH_VARCHAR_EQUALITY));
- assertJoinConditionallyPushedDown(
- session,
- format("SELECT n.name, nl.regionkey FROM nation n %s %s nl ON n.name = nl.name", joinOperator, nationLowercaseTable.getName()),
- hasBehavior(SUPPORTS_JOIN_PUSHDOWN_WITH_VARCHAR_EQUALITY));
-
- // multiple bigint predicates
- assertThat(query(session, format("SELECT n.name, c.name FROM nation n %s customer c ON n.nationkey = c.nationkey and n.regionkey = c.custkey", joinOperator)))
- .isFullyPushedDown();
-
- // inequality
- for (String operator : nonEqualities) {
- // bigint inequality predicate
- assertJoinConditionallyPushedDown(
- withoutDynamicFiltering,
- format("SELECT r.name, n.name FROM nation n %s region r ON n.regionkey %s r.regionkey", joinOperator, operator),
- expectJoinPushdown(operator) && expectJoinPushdownOnInequalityOperator(joinOperator));
-
- // varchar inequality predicate
- assertJoinConditionallyPushedDown(
- withoutDynamicFiltering,
- format("SELECT n.name, nl.name FROM nation n %s %s nl ON n.name %s nl.name", joinOperator, nationLowercaseTable.getName(), operator),
- expectVarcharJoinPushdown(operator) && expectJoinPushdownOnInequalityOperator(joinOperator));
- }
-
- // inequality along with an equality, which constitutes an equi-condition and allows filter to remain as part of the Join
- for (String operator : nonEqualities) {
- assertJoinConditionallyPushedDown(
- session,
- format("SELECT n.name, c.name FROM nation n %s customer c ON n.nationkey = c.nationkey AND n.regionkey %s c.custkey", joinOperator, operator),
- expectJoinPushdown(operator));
- }
-
- // varchar inequality along with an equality, which constitutes an equi-condition and allows filter to remain as part of the Join
- for (String operator : nonEqualities) {
- assertJoinConditionallyPushedDown(
- session,
- format("SELECT n.name, nl.name FROM nation n %s %s nl ON n.regionkey = nl.regionkey AND n.name %s nl.name", joinOperator, nationLowercaseTable.getName(), operator),
- expectVarcharJoinPushdown(operator));
- }
-
- // Join over a (double) predicate
- /*
- assertThat(query(session, format("" +
- "SELECT c.name, n.name " +
- "FROM (SELECT * FROM customer WHERE acctbal > 8000) c " +
- "%s nation n ON c.custkey = n.nationkey", joinOperator)))
- .isFullyPushedDown();
- */
-
- // Join over a varchar equality predicate
- assertJoinConditionallyPushedDown(
- session,
- format("SELECT c.name, n.name FROM (SELECT * FROM customer WHERE address = 'TcGe5gaZNgVePxU5kRrvXBfkasDTea') c " +
- "%s nation n ON c.custkey = n.nationkey", joinOperator),
- hasBehavior(SUPPORTS_PREDICATE_PUSHDOWN_WITH_VARCHAR_EQUALITY));
-
- // Join over a varchar inequality predicate
- assertJoinConditionallyPushedDown(
- session,
- format("SELECT c.name, n.name FROM (SELECT * FROM customer WHERE address < 'TcGe5gaZNgVePxU5kRrvXBfkasDTea') c " +
- "%s nation n ON c.custkey = n.nationkey", joinOperator),
- hasBehavior(SUPPORTS_PREDICATE_PUSHDOWN_WITH_VARCHAR_INEQUALITY));
-
- // join over aggregation
- assertJoinConditionallyPushedDown(
- session,
- format("SELECT * FROM (SELECT regionkey rk, count(nationkey) c FROM nation GROUP BY regionkey) n " +
- "%s region r ON n.rk = r.regionkey", joinOperator),
- hasBehavior(SUPPORTS_AGGREGATION_PUSHDOWN));
-
- // join over LIMIT
- assertJoinConditionallyPushedDown(
- session,
- format("SELECT * FROM (SELECT nationkey FROM nation LIMIT 30) n " +
- "%s region r ON n.nationkey = r.regionkey", joinOperator),
- hasBehavior(SUPPORTS_LIMIT_PUSHDOWN));
-
- // join over TopN
- assertJoinConditionallyPushedDown(
- session,
- format("SELECT * FROM (SELECT nationkey FROM nation ORDER BY regionkey LIMIT 5) n " +
- "%s region r ON n.nationkey = r.regionkey", joinOperator),
- hasBehavior(SUPPORTS_TOPN_PUSHDOWN));
-
- // join over join
- assertThat(query(session, "SELECT * FROM nation n, region r, customer c WHERE n.regionkey = r.regionkey AND r.regionkey = c.custkey"))
- .isFullyPushedDown();
- }
- }
-
- @Override
- protected boolean expectJoinPushdown(String operator)
- {
- if ("IS NOT DISTINCT FROM".equals(operator)) {
- // TODO (https://github.com/trinodb/trino/issues/6967) support join pushdown for IS NOT DISTINCT FROM
- return false;
- }
- return switch (toJoinConditionOperator(operator)) {
- case EQUAL, NOT_EQUAL, LESS_THAN, LESS_THAN_OR_EQUAL, GREATER_THAN, GREATER_THAN_OR_EQUAL -> true;
- case IDENTICAL -> hasBehavior(SUPPORTS_JOIN_PUSHDOWN_WITH_DISTINCT_FROM);
- };
- }
-
- private boolean expectVarcharJoinPushdown(String operator)
- {
- if ("IS NOT DISTINCT FROM".equals(operator)) {
- // TODO (https://github.com/trinodb/trino/issues/6967) support join pushdown for IS NOT DISTINCT FROM
- return false;
- }
- return switch (toJoinConditionOperator(operator)) {
- case EQUAL, NOT_EQUAL -> hasBehavior(SUPPORTS_JOIN_PUSHDOWN_WITH_VARCHAR_EQUALITY);
- case LESS_THAN, LESS_THAN_OR_EQUAL, GREATER_THAN, GREATER_THAN_OR_EQUAL -> hasBehavior(SUPPORTS_JOIN_PUSHDOWN_WITH_VARCHAR_INEQUALITY);
- case IDENTICAL -> hasBehavior(SUPPORTS_JOIN_PUSHDOWN_WITH_DISTINCT_FROM) && hasBehavior(SUPPORTS_JOIN_PUSHDOWN_WITH_VARCHAR_EQUALITY);
- };
- }
-
- private JoinCondition.Operator toJoinConditionOperator(String operator)
- {
- return Stream.of(JoinCondition.Operator.values())
- .filter(joinOperator -> joinOperator.getValue().equals(operator))
- .collect(toOptional())
- .orElseThrow(() -> new IllegalArgumentException("Not found: " + operator));
- }
-
- @Test
- @Override
- public void testCreateTableAsSelectWithUnicode()
- {
- // Trino creates a table of varchar(1), This unicode occupies 3 chars in Vertica
- assertThatThrownBy(super::testCreateTableAsSelectWithUnicode)
- .hasStackTraceContaining("ERROR: String of 3 octets is too long for type Varchar(1) for column unicode");
- // Explicit CAST to VARCHAR(3) to make sure the value fits in Vertica's column
- assertCreateTableAsSelect(
- "SELECT CAST('\u2603' AS VARCHAR(3)) unicode",
- "SELECT 1");
- }
-
- @Override
- protected TestTable createTableWithDefaultColumns()
- {
- return new TestTable(
- this::execute,
- "tpch.table",
- "(col_required BIGINT NOT NULL," +
- "col_nullable BIGINT," +
- "col_default BIGINT DEFAULT 43," +
- "col_nonnull_default BIGINT NOT NULL DEFAULT 42," +
- "col_required2 BIGINT NOT NULL)");
- }
-
- @Override
- protected MaterializedResult getDescribeOrdersResult()
- {
- // Vertica INTEGER type is a 64-bit type
- // JDBC INTEGER is not supported, only BIGINT
- // Overridden because shippriority's column type is BIGINT from Vertica
- return resultBuilder(getSession(), VARCHAR, VARCHAR, VARCHAR, VARCHAR)
- .row("orderkey", "bigint", "", "")
- .row("custkey", "bigint", "", "")
- .row("orderstatus", "varchar(1)", "", "")
- .row("totalprice", "double", "", "")
- .row("orderdate", "date", "", "")
- .row("orderpriority", "varchar(15)", "", "")
- .row("clerk", "varchar(15)", "", "")
- .row("shippriority", "bigint", "", "")
- .row("comment", "varchar(79)", "", "")
- .build();
- }
-
- @Test
- @Override
- public void testShowCreateTable()
- {
- // Vertica INTEGER type is a 64-bit type
- // JDBC INTEGER is not supported, only BIGINT
- // Overridden because shippriority's column type is BIGINT from Vertica
- assertThat(computeActual("SHOW CREATE TABLE orders").getOnlyValue())
- .isEqualTo("""
- CREATE TABLE vertica.tpch.orders (
- orderkey bigint,
- custkey bigint,
- orderstatus varchar(1),
- totalprice double,
- orderdate date,
- orderpriority varchar(15),
- clerk varchar(15),
- shippriority bigint,
- comment varchar(79)
- )""");
- }
-
- @Test
- @Override
- public void testShowColumns()
- {
- // Vertica INTEGER type is a 64-bit type
- // JDBC INTEGER is not supported, only BIGINT
- // Overridden because shippriority's column type is BIGINT from Vertica
- MaterializedResult actual = computeActual("SHOW COLUMNS FROM orders");
- MaterializedResult expected = resultBuilder(getSession(), VARCHAR, VARCHAR, VARCHAR, VARCHAR)
- .row("orderkey", "bigint", "", "")
- .row("custkey", "bigint", "", "")
- .row("orderstatus", "varchar(1)", "", "")
- .row("totalprice", "double", "", "")
- .row("orderdate", "date", "", "")
- .row("orderpriority", "varchar(15)", "", "")
- .row("clerk", "varchar(15)", "", "")
- .row("shippriority", "bigint", "", "")
- .row("comment", "varchar(79)", "", "")
- .build();
- assertThat(actual).containsExactlyElementsOf(expected);
- }
-
- @Test
- @Override
- public void testDeleteWithLike()
- {
- assertThatThrownBy(super::testDeleteWithLike)
- .hasStackTraceContaining("TrinoException: This connector does not support modifying table rows");
- }
-
- @Test
- @Override
- public void testInsertIntoNotNullColumn()
- {
- abort("TODO Enable this test");
- }
-
- @Override
- protected OptionalInt maxSchemaNameLength()
- {
- return OptionalInt.of(128);
- }
-
- @Override
- protected void verifySchemaNameLengthFailurePermissible(Throwable e)
- {
- assertThat(e).hasMessageContaining("Maximum limit is 128 octets");
- }
-
- @Override
- protected OptionalInt maxTableNameLength()
- {
- return OptionalInt.of(128);
- }
-
- @Override
- protected void verifyTableNameLengthFailurePermissible(Throwable e)
- {
- assertThat(e).hasMessageContaining("Maximum limit is 128 octets");
- }
-
- @Override
- protected Optional filterDataMappingSmokeTestData(DataMappingTestSetup dataMappingTestSetup)
- {
- String typeName = dataMappingTestSetup.getTrinoTypeName();
- if (typeName.equals("date")) {
- // Vertica adds 10 days during julian-gregorian switch
- if (dataMappingTestSetup.getSampleValueLiteral().equals("DATE '1582-10-05'")) {
- return Optional.empty();
- }
- return Optional.of(dataMappingTestSetup);
- }
-
- if (typeName.equals("time")
- || typeName.equals("time(6)")
- || typeName.equals("timestamp")
- || typeName.equals("timestamp(6)")
- || typeName.equals("timestamp(3) with time zone")
- || typeName.equals("timestamp(6) with time zone")) {
- return Optional.of(dataMappingTestSetup.asUnsupported());
- }
-
- if (typeName.equals("real")
- || typeName.equals("double")) {
- // Vertica is not returning a value as precise as the inputs which causes the smoke tests to fail for these types
- // e.g. REAL '999999.999' is written but 1000000 is read
- // e.g. DOUBLE '1234567890123.123' is written but 1234567890123.12 is read
- return Optional.empty();
- }
-
- return Optional.of(dataMappingTestSetup);
- }
-
- @Override
- protected String errorMessageForInsertIntoNotNullColumn(String columnName)
- {
- return format(".*Cannot set a NOT NULL column \\(%s\\) to a NULL value in INSERT/UPDATE statement", columnName);
- }
-
- @Override
- protected void verifyAddNotNullColumnToNonEmptyTableFailurePermissible(Throwable e)
- {
- assertThat(e).hasMessageMatching(
- "\\[Vertica]\\[VJDBC]\\(2505\\) ROLLBACK: Cannot set column \"b_varchar\" in table " +
- "\"tpch.test_add_nn.*\" to NOT NULL since it contains null values");
- }
-
- @Override
- protected OptionalInt maxColumnNameLength()
- {
- return OptionalInt.of(128);
- }
-
- @Override
- protected void verifyColumnNameLengthFailurePermissible(Throwable e)
- {
- assertThat(e).hasMessageContaining("Maximum limit is 128 octets");
- }
-
- @Override
- protected SqlExecutor onRemoteDatabase()
- {
- return verticaServer.getSqlExecutor();
- }
-
- private void execute(String sql)
- {
- verticaServer.execute(sql);
- }
-}
diff --git a/plugin/trino-vertica/src/test/java/io/trino/plugin/vertica/TestVerticaLatestConnectorSmokeTest.java b/plugin/trino-vertica/src/test/java/io/trino/plugin/vertica/TestVerticaLatestConnectorSmokeTest.java
deleted file mode 100644
index e95216217804..000000000000
--- a/plugin/trino-vertica/src/test/java/io/trino/plugin/vertica/TestVerticaLatestConnectorSmokeTest.java
+++ /dev/null
@@ -1,31 +0,0 @@
-/*
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package io.trino.plugin.vertica;
-
-import io.trino.testing.QueryRunner;
-
-import static io.trino.plugin.vertica.TestingVerticaServer.LATEST_VERSION;
-
-public class TestVerticaLatestConnectorSmokeTest
- extends BaseVerticaConnectorSmokeTest
-{
- @Override
- protected QueryRunner createQueryRunner()
- throws Exception
- {
- return VerticaQueryRunner.builder(closeAfterClass(new TestingVerticaServer(LATEST_VERSION)))
- .setTables(REQUIRED_TPCH_TABLES)
- .build();
- }
-}
diff --git a/plugin/trino-vertica/src/test/java/io/trino/plugin/vertica/TestVerticaPlugin.java b/plugin/trino-vertica/src/test/java/io/trino/plugin/vertica/TestVerticaPlugin.java
deleted file mode 100644
index d71c164a0de9..000000000000
--- a/plugin/trino-vertica/src/test/java/io/trino/plugin/vertica/TestVerticaPlugin.java
+++ /dev/null
@@ -1,33 +0,0 @@
-/*
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package io.trino.plugin.vertica;
-
-import com.google.common.collect.ImmutableMap;
-import io.trino.spi.Plugin;
-import io.trino.spi.connector.ConnectorFactory;
-import io.trino.testing.TestingConnectorContext;
-import org.junit.jupiter.api.Test;
-
-import static com.google.common.collect.Iterables.getOnlyElement;
-
-public class TestVerticaPlugin
-{
- @Test
- public void testCreateConnector()
- {
- Plugin plugin = new VerticaPlugin();
- ConnectorFactory factory = getOnlyElement(plugin.getConnectorFactories());
- factory.create("test", ImmutableMap.of("connection-url", "jdbc:vertica://test"), new TestingConnectorContext()).shutdown();
- }
-}
diff --git a/plugin/trino-vertica/src/test/java/io/trino/plugin/vertica/TestVerticaTableStatistics.java b/plugin/trino-vertica/src/test/java/io/trino/plugin/vertica/TestVerticaTableStatistics.java
deleted file mode 100644
index 060d5c4e2ebf..000000000000
--- a/plugin/trino-vertica/src/test/java/io/trino/plugin/vertica/TestVerticaTableStatistics.java
+++ /dev/null
@@ -1,415 +0,0 @@
-/*
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package io.trino.plugin.vertica;
-
-import com.google.common.collect.ImmutableList;
-import com.google.common.collect.ImmutableMap;
-import io.trino.plugin.jdbc.BaseJdbcTableStatisticsTest;
-import io.trino.spi.connector.ColumnHandle;
-import io.trino.spi.statistics.ColumnStatistics;
-import io.trino.spi.statistics.Estimate;
-import io.trino.spi.statistics.TableStatistics;
-import io.trino.testing.MaterializedRow;
-import io.trino.testing.QueryRunner;
-import io.trino.testing.TestingMetadata.TestingColumnHandle;
-import io.trino.testing.sql.TestTable;
-import io.trino.tpch.TpchTable;
-import org.assertj.core.api.InstanceOfAssertFactories;
-import org.assertj.core.api.SoftAssertions;
-import org.intellij.lang.annotations.Language;
-import org.junit.jupiter.api.Test;
-
-import java.util.List;
-import java.util.Optional;
-import java.util.function.Consumer;
-
-import static io.trino.plugin.vertica.TestingVerticaServer.LATEST_VERSION;
-import static io.trino.plugin.vertica.VerticaQueryRunner.TPCH_SCHEMA;
-import static io.trino.testing.sql.TestTable.fromColumns;
-import static java.lang.String.format;
-import static org.assertj.core.api.Assertions.assertThat;
-import static org.assertj.core.api.Assertions.assertThatThrownBy;
-import static org.assertj.core.api.Assertions.from;
-import static org.assertj.core.api.Assertions.withinPercentage;
-import static org.junit.jupiter.api.Assumptions.abort;
-
-public class TestVerticaTableStatistics
- extends BaseJdbcTableStatisticsTest
-{
- private TestingVerticaServer verticaServer;
-
- @Override
- protected QueryRunner createQueryRunner()
- throws Exception
- {
- // Use the latest image to avoid "Must be superuser to run export_statistics"
- verticaServer = closeAfterClass(new TestingVerticaServer(LATEST_VERSION));
- return VerticaQueryRunner.builder(verticaServer)
- .addConnectorProperty("statistics.enabled", "true")
- .setTables(ImmutableList.of(TpchTable.ORDERS, TpchTable.REGION, TpchTable.NATION))
- .build();
- }
-
- @Override
- protected void checkEmptyTableStats(String tableName)
- {
- assertQuery(
- "SHOW STATS FOR " + tableName,
- "VALUES " +
- "('orderkey', null, null, null, null, null, null)," +
- "('custkey', null, null, null, null, null, null)," +
- "('orderpriority', null, null, null, null, null, null)," +
- "('comment', null, null, null, null, null, null)," +
- "(null, null, null, null, null, null, null)");
- }
-
- @Test
- @Override
- public void testNotAnalyzed()
- {
- String tableName = "test_stats_not_analyzed";
- assertUpdate("DROP TABLE IF EXISTS " + tableName);
- computeActual(format("CREATE TABLE %s AS SELECT * FROM tpch.tiny.orders", tableName));
- try {
- assertQuery(
- "SHOW STATS FOR " + tableName,
- "VALUES " +
- "('orderkey', null, null, null, null, null, null)," +
- "('custkey', null, null, null, null, null, null)," +
- "('orderstatus', null, null, null, null, null, null)," +
- "('totalprice', null, null, null, null, null, null)," +
- "('orderdate', null, null, null, null, null, null)," +
- "('orderpriority', null, null, null, null, null, null)," +
- "('clerk', null, null, null, null, null, null)," +
- "('shippriority', null, null, null, null, null, null)," +
- "('comment', null, null, null, null, null, null)," +
- "(null, null, null, null, null, null, null)");
- }
- finally {
- assertUpdate("DROP TABLE " + tableName);
- }
- }
-
- @Test
- @Override
- public void testBasic()
- {
- String tableName = "test_stats_orders";
- assertUpdate("DROP TABLE IF EXISTS " + tableName);
- computeActual(format("CREATE TABLE %s AS SELECT * FROM tpch.tiny.orders", tableName));
- try {
- gatherStats(tableName);
- assertThat(showStats(tableName))
- .get()
- .returns(Estimate.of(15000), from(TableStatistics::getRowCount))
- .extracting(TableStatistics::getColumnStatistics, InstanceOfAssertFactories.map(ColumnHandle.class, ColumnStatistics.class))
- .hasEntrySatisfying(handle("orderkey"), statsCloseTo(null, 15000, 0, 1.0, 60000.0))
- .hasEntrySatisfying(handle("custkey"), statsCloseTo(null, 1000, 0, 1.0, 1499.0))
- .hasEntrySatisfying(handle("orderstatus"), statsCloseTo(9605.0, 3, 0, null, null))
- .hasEntrySatisfying(handle("totalprice"), statsCloseTo(null, 14996, 0, 874.89, 466001.0))
- .hasEntrySatisfying(handle("orderdate"), statsCloseTo(null, 2401, 0, null, null))
- .hasEntrySatisfying(handle("orderpriority"), statsCloseTo(28284.0, 5, 0, null, null))
- .hasEntrySatisfying(handle("clerk"), statsCloseTo(56521.0, 1000, 0, null, null))
- .hasEntrySatisfying(handle("shippriority"), statsCloseTo(null, 1, 0, 0.0, 0.0))
- .hasEntrySatisfying(handle("comment"), statsCloseTo(289736.0, 14995, 0, null, null));
- }
- finally {
- assertUpdate("DROP TABLE " + tableName);
- }
- }
-
- @Test
- @Override
- public void testAllNulls()
- {
- String tableName = "test_stats_table_all_nulls";
- assertUpdate("DROP TABLE IF EXISTS " + tableName);
- computeActual(format("CREATE TABLE %s AS SELECT orderkey, custkey, orderpriority, comment FROM tpch.tiny.orders WHERE false", tableName));
- try {
- computeActual(format("INSERT INTO %s (orderkey) VALUES NULL, NULL, NULL", tableName));
- gatherStats(tableName);
- assertQuery(
- "SHOW STATS FOR " + tableName,
- "VALUES " +
- "('orderkey', 0, 0, 1, null, null, null)," +
- "('custkey', 0, 0, 1, null, null, null)," +
- "('orderpriority', 0, 0, 1, null, null, null)," +
- "('comment', 0, 0, 1, null, null, null)," +
- "(null, null, null, null, 3, null, null)");
- }
- finally {
- assertUpdate("DROP TABLE " + tableName);
- }
- }
-
- @Test
- @Override
- public void testNullsFraction()
- {
- String tableName = "test_stats_table_with_nulls";
- assertUpdate("DROP TABLE IF EXISTS " + tableName);
- assertUpdate("" +
- "CREATE TABLE " + tableName + " AS " +
- "SELECT " +
- " orderkey, " +
- " if(orderkey % 3 = 0, NULL, custkey) custkey, " +
- " if(orderkey % 5 = 0, NULL, orderpriority) orderpriority " +
- "FROM tpch.tiny.orders",
- 15000);
- try {
- gatherStats(tableName);
- assertThat(showStats(tableName))
- .get()
- .returns(Estimate.of(15000), from(TableStatistics::getRowCount))
- .extracting(TableStatistics::getColumnStatistics, InstanceOfAssertFactories.map(ColumnHandle.class, ColumnStatistics.class))
- .hasEntrySatisfying(handle("orderkey"), statsCloseTo(null, 15000, 0, 1.0, 60000.0))
- .hasEntrySatisfying(handle("custkey"), statsCloseTo(null, 1000, 0.3333333333333333, null, 1499.0))
- .hasEntrySatisfying(handle("orderpriority"), statsCloseTo(28284.0, 5, 0.2, null, null));
- }
- finally {
- assertUpdate("DROP TABLE " + tableName);
- }
- }
-
- @Test
- @Override
- public void testAverageColumnLength()
- {
- abort("Vertica connector does not report average column length");
- }
-
- @Test
- @Override
- public void testPartitionedTable()
- {
- String tableName = "tpch.test_stats_orders_part";
- assertUpdate("DROP TABLE IF EXISTS " + tableName);
- assertUpdate(format("CREATE TABLE %s AS SELECT * FROM orders", tableName), 15000);
- onVertica(format("ALTER TABLE %s PARTITION BY YEAR(orderdate) REORGANIZE", tableName));
- try {
- gatherStats(tableName);
- assertThat(showStats(tableName))
- .get()
- .returns(Estimate.of(15000), from(TableStatistics::getRowCount))
- .extracting(TableStatistics::getColumnStatistics, InstanceOfAssertFactories.map(ColumnHandle.class, ColumnStatistics.class))
- .hasEntrySatisfying(handle("orderkey"), statsCloseTo(null, 15000, 0, 1.0, 60000.0))
- .hasEntrySatisfying(handle("custkey"), statsCloseTo(null, 1000, 0, 1.0, 1499.0))
- .hasEntrySatisfying(handle("orderstatus"), statsCloseTo(1529.0, 3, 0, null, null)) // Note that the data size is different from non-partitioned tables
- .hasEntrySatisfying(handle("totalprice"), statsCloseTo(null, 14996, 0, 874.89, 466001.0))
- .hasEntrySatisfying(handle("orderdate"), statsCloseTo(null, 2401, 0, null, null))
- .hasEntrySatisfying(handle("orderpriority"), statsCloseTo(28284.0, 5, 0, null, null))
- .hasEntrySatisfying(handle("clerk"), statsCloseTo(56521.0, 1000, 0, null, null))
- .hasEntrySatisfying(handle("shippriority"), statsCloseTo(null, 1, 0, 0.0, 0.0))
- .hasEntrySatisfying(handle("comment"), statsCloseTo(289736.0, 14995, 0, null, null));
- }
- finally {
- assertUpdate("DROP TABLE " + tableName);
- }
- }
-
- @Test
- @Override
- public void testView()
- {
- String tableName = "tpch.test_stats_view";
- onVertica("CREATE OR REPLACE VIEW " + tableName + " AS SELECT orderkey, custkey, orderpriority, \"COMMENT\" FROM tpch.orders");
- try {
- assertQuery(
- "SHOW STATS FOR " + tableName,
- "VALUES " +
- "('orderkey', null, null, null, null, null, null)," +
- "('custkey', null, null, null, null, null, null)," +
- "('orderpriority', null, null, null, null, null, null)," +
- "('comment', null, null, null, null, null, null)," +
- "(null, null, null, null, null, null, null)");
- }
- finally {
- onVertica("DROP VIEW " + tableName);
- }
- }
-
- @Test
- @Override
- public void testMaterializedView()
- {
- abort("Vertica does not have Materialized Views");
- }
-
- @Override
- protected void testCaseColumnNames(String tableName)
- {
- onVertica("" +
- "CREATE TABLE tpch." + tableName + " " +
- "AS SELECT " +
- " orderkey AS CASE_UNQUOTED_UPPER, " +
- " custkey AS case_unquoted_lower, " +
- " orderstatus AS cASe_uNQuoTeD_miXED, " +
- " totalprice AS \"CASE_QUOTED_UPPER\", " +
- " orderdate AS \"case_quoted_lower\"," +
- " orderpriority AS \"CasE_QuoTeD_miXED\" " +
- "FROM tpch.orders");
- try {
- gatherStats(tableName);
- assertThat(showStats(tableName))
- .get()
- .returns(Estimate.of(15000), from(TableStatistics::getRowCount))
- .extracting(TableStatistics::getColumnStatistics, InstanceOfAssertFactories.map(ColumnHandle.class, ColumnStatistics.class))
- .hasEntrySatisfying(handle("case_unquoted_upper"), statsCloseTo(null, 15000, 0, 1.0, 60000.0))
- .hasEntrySatisfying(handle("case_unquoted_lower"), statsCloseTo(null, 1000, 0, 1.0, 1499.0))
- .hasEntrySatisfying(handle("case_unquoted_mixed"), statsCloseTo(9605.0, 3, 0, null, null))
- .hasEntrySatisfying(handle("case_quoted_upper"), statsCloseTo(null, 14996, 0, 874.89, 466001.0))
- .hasEntrySatisfying(handle("case_quoted_lower"), statsCloseTo(null, 2401, 0, null, null))
- .hasEntrySatisfying(handle("case_quoted_mixed"), statsCloseTo(28284.0, 5, 0, null, null));
- }
- finally {
- onVertica("DROP TABLE tpch." + tableName);
- }
- }
-
- @Test
- @Override
- public void testStatsWithAggregationPushdown()
- {
- assertThatThrownBy(super::testStatsWithAggregationPushdown)
- .hasMessageContaining("Plan does not match");
- abort("Aggregate pushdown is unsupported in Vertica connector");
- }
-
- @Test
- @Override
- public void testStatsWithTopNPushdown()
- {
- assertThatThrownBy(super::testStatsWithTopNPushdown)
- .hasMessageContaining("Plan does not match");
- abort("TopN pushdown is unsupported in Vertica connector");
- }
-
- @Test
- @Override
- public void testNumericCornerCases()
- {
- try (TestTable table = fromColumns(
- getQueryRunner()::execute,
- "test_numeric_corner_cases_",
- ImmutableMap.>builder()
- .put("only_negative_infinity double", List.of("-infinity()", "-infinity()", "-infinity()", "-infinity()"))
- .put("only_positive_infinity double", List.of("infinity()", "infinity()", "infinity()", "infinity()"))
- .put("mixed_infinities double", List.of("-infinity()", "infinity()", "-infinity()", "infinity()"))
- .put("mixed_infinities_and_numbers double", List.of("-infinity()", "infinity()", "-5.0", "7.0"))
- .put("nans_only double", List.of("nan()", "nan()"))
- .put("nans_and_numbers double", List.of("nan()", "nan()", "-5.0", "7.0"))
- .put("nans_and_numbers_and_null double", List.of("nan()", "10.0"))
- .put("large_doubles double", List.of("CAST(-50371909150609548946090.0 AS DOUBLE)", "CAST(50371909150609548946090.0 AS DOUBLE)")) // 2^77 DIV 3
- .put("short_decimals_big_fraction decimal(16,15)", List.of("-1.234567890123456", "1.234567890123456"))
- .put("short_decimals_big_integral decimal(16,1)", List.of("-123456789012345.6", "123456789012345.6"))
- .put("long_decimals_big_fraction decimal(38,37)", List.of("-1.2345678901234567890123456789012345678", "1.2345678901234567890123456789012345678"))
- .put("long_decimals_middle decimal(38,16)", List.of("-1234567890123456.7890123456789012345678", "1234567890123456.7890123456789012345678"))
- .put("long_decimals_big_integral decimal(38,1)", List.of("-1234567890123456789012345678901234567.8", "1234567890123456789012345678901234567.8"))
- .buildOrThrow(),
- "null")) {
- gatherStats(table.getName());
- assertThat(showStats(table.getName()))
- .get()
- .returns(Estimate.of(4), from(TableStatistics::getRowCount))
- .extracting(TableStatistics::getColumnStatistics, InstanceOfAssertFactories.map(ColumnHandle.class, ColumnStatistics.class))
- .hasEntrySatisfying(handle("only_negative_infinity"), statsCloseTo(null, 1, 0, null, null))
- .hasEntrySatisfying(handle("only_positive_infinity"), statsCloseTo(null, 1, 0, null, null))
- .hasEntrySatisfying(handle("mixed_infinities"), statsCloseTo(null, 2, 0, null, null))
- .hasEntrySatisfying(handle("mixed_infinities_and_numbers"), statsCloseTo(null, 4, 0, null, null))
- .hasEntrySatisfying(handle("nans_only"), statsCloseTo(null, 2, 0.1, null, null)) // nulls faction is 0.1 (unknown) because we can't calculate it when the rows are only nan and null
- .hasEntrySatisfying(handle("nans_and_numbers"), statsCloseTo(null, 3, 0, null, null))
- .hasEntrySatisfying(handle("nans_and_numbers_and_null"), statsCloseTo(null, 3, 0, null, null))
- .hasEntrySatisfying(handle("large_doubles"), statsCloseTo(null, 2, 0.5, -5.03719E22, null))
- .hasEntrySatisfying(handle("short_decimals_big_fraction"), statsCloseTo(null, 2, 0.5, null, 1.234567890123456))
- .hasEntrySatisfying(handle("short_decimals_big_integral"), statsCloseTo(null, 2, 0.5, null, 123456789012345.6))
- .hasEntrySatisfying(handle("long_decimals_big_fraction"), statsCloseTo(null, 2, 0.5, null, 1.2345678901234567890123456789012345678))
- .hasEntrySatisfying(handle("long_decimals_middle"), statsCloseTo(null, 2, 0.5, null, 1234567890123456.7890123456789012345678))
- .hasEntrySatisfying(handle("long_decimals_big_integral"), statsCloseTo(null, 2, 0.5, null, 1234567890123456789012345678901234567.8));
- }
- }
-
- @Override
- protected void gatherStats(String tableName)
- {
- onVertica(format("SELECT ANALYZE_STATISTICS('%s.%s')", TPCH_SCHEMA, tableName));
- }
-
- private void onVertica(@Language("SQL") String sql)
- {
- verticaServer.execute(sql);
- }
-
- private Optional showStats(String tableName)
- {
- List showStatsResult = computeActual("SHOW STATS FOR " + tableName).getMaterializedRows();
- double rowCount = (double) showStatsResult.get(showStatsResult.size() - 1).getField(4);
-
- TableStatistics.Builder tableStatistics = TableStatistics.builder();
- tableStatistics.setRowCount(Estimate.of(rowCount));
-
- for (MaterializedRow materializedRow : showStatsResult) {
- if (materializedRow.getField(0) != null) {
- ColumnStatistics statistics = ColumnStatistics.builder()
- .setDataSize(asEstimate(materializedRow.getField(1)))
- .setDistinctValuesCount(Estimate.of((Double) materializedRow.getField(2)))
- .setNullsFraction(Estimate.of((Double) materializedRow.getField(3)))
- .build();
-
- tableStatistics.setColumnStatistics(
- handle(String.valueOf(materializedRow.getField(0))),
- statistics);
- }
- }
- return Optional.of(tableStatistics.build());
- }
-
- private ColumnHandle handle(String name)
-
- {
- return new TestingColumnHandle(name);
- }
-
- private Estimate asEstimate(Object value)
- {
- if (value == null) {
- return Estimate.unknown();
- }
- return Estimate.of((Double) value);
- }
-
- private static Consumer statsCloseTo(Double dataSize, double distinctValues, double nullsFraction, Double lowValue, Double highValue)
- {
- return stats -> {
- SoftAssertions softly = new SoftAssertions();
-
- if (dataSize != null) {
- softly.assertThat(stats.getDataSize().getValue())
- .isCloseTo(dataSize, withinPercentage(80.0));
- }
-
- assertThat(stats.getDistinctValuesCount().getValue()).isEqualTo(distinctValues);
- assertThat(stats.getNullsFraction().getValue()).isEqualTo(nullsFraction);
-
- if (stats.getRange().isPresent()) {
- softly.assertThat(stats.getRange().get().getMin())
- .isCloseTo(lowValue, withinPercentage(80.0));
- softly.assertThat(stats.getRange().get().getMax())
- .isCloseTo(highValue, withinPercentage(80.0));
- }
-
- softly.assertThat(stats.getRange()).isEmpty();
- softly.assertAll();
- };
- }
-}
diff --git a/plugin/trino-vertica/src/test/java/io/trino/plugin/vertica/TestVerticaTypeMapping.java b/plugin/trino-vertica/src/test/java/io/trino/plugin/vertica/TestVerticaTypeMapping.java
deleted file mode 100644
index bdbc397a641e..000000000000
--- a/plugin/trino-vertica/src/test/java/io/trino/plugin/vertica/TestVerticaTypeMapping.java
+++ /dev/null
@@ -1,625 +0,0 @@
-/*
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package io.trino.plugin.vertica;
-
-import com.google.common.collect.ImmutableList;
-import com.google.common.collect.ImmutableMap;
-import io.trino.Session;
-import io.trino.plugin.jdbc.UnsupportedTypeHandling;
-import io.trino.testing.AbstractTestQueryFramework;
-import io.trino.testing.MaterializedResult;
-import io.trino.testing.QueryRunner;
-import io.trino.testing.datatype.CreateAndInsertDataSetup;
-import io.trino.testing.datatype.CreateAsSelectDataSetup;
-import io.trino.testing.datatype.DataSetup;
-import io.trino.testing.datatype.SqlDataTypeTest;
-import io.trino.testing.sql.SqlExecutor;
-import io.trino.testing.sql.TestTable;
-import io.trino.testing.sql.TrinoSqlExecutor;
-import org.junit.jupiter.api.Test;
-import org.junit.jupiter.params.ParameterizedTest;
-import org.junit.jupiter.params.provider.MethodSource;
-
-import java.math.RoundingMode;
-import java.sql.Connection;
-import java.sql.DriverManager;
-import java.sql.PreparedStatement;
-import java.sql.SQLException;
-import java.sql.Statement;
-import java.time.ZoneId;
-import java.util.Random;
-
-import static io.trino.plugin.jdbc.DecimalConfig.DecimalMapping.ALLOW_OVERFLOW;
-import static io.trino.plugin.jdbc.DecimalConfig.DecimalMapping.STRICT;
-import static io.trino.plugin.jdbc.DecimalSessionSessionProperties.DECIMAL_DEFAULT_SCALE;
-import static io.trino.plugin.jdbc.DecimalSessionSessionProperties.DECIMAL_MAPPING;
-import static io.trino.plugin.jdbc.DecimalSessionSessionProperties.DECIMAL_ROUNDING_MODE;
-import static io.trino.plugin.jdbc.TypeHandlingJdbcSessionProperties.UNSUPPORTED_TYPE_HANDLING;
-import static io.trino.plugin.jdbc.UnsupportedTypeHandling.CONVERT_TO_VARCHAR;
-import static io.trino.spi.type.BigintType.BIGINT;
-import static io.trino.spi.type.BooleanType.BOOLEAN;
-import static io.trino.spi.type.CharType.createCharType;
-import static io.trino.spi.type.DateType.DATE;
-import static io.trino.spi.type.DecimalType.createDecimalType;
-import static io.trino.spi.type.DoubleType.DOUBLE;
-import static io.trino.spi.type.TimeZoneKey.UTC_KEY;
-import static io.trino.spi.type.TimeZoneKey.getTimeZoneKey;
-import static io.trino.spi.type.VarbinaryType.VARBINARY;
-import static io.trino.spi.type.VarcharType.createVarcharType;
-import static java.lang.String.format;
-import static java.math.RoundingMode.HALF_UP;
-import static java.math.RoundingMode.UNNECESSARY;
-import static java.util.Arrays.asList;
-import static org.assertj.core.api.Assertions.assertThat;
-
-public class TestVerticaTypeMapping
- extends AbstractTestQueryFramework
-{
- private TestingVerticaServer verticaServer;
-
- @Override
- protected QueryRunner createQueryRunner()
- throws Exception
- {
- verticaServer = closeAfterClass(new TestingVerticaServer());
- // Increase max statement size to test large varbinary write
- return VerticaQueryRunner.builder(verticaServer)
- .addExtraProperties(ImmutableMap.of("query.max-length", "65000000"))
- .build();
- }
-
- @Test
- public void testBasicTypes()
- {
- SqlDataTypeTest.create()
- .addRoundTrip("boolean", "true", BOOLEAN)
- .addRoundTrip("boolean", "false", BOOLEAN)
- // All integer types in Vertica are BIGINT
- .addRoundTrip("tinyint", "5", BIGINT, "BIGINT '5'")
- .addRoundTrip("smallint", "32456", BIGINT, "BIGINT '32456'")
- .addRoundTrip("integer", "123456789", BIGINT, "BIGINT '123456789'")
- .addRoundTrip("bigint", "123456789012", BIGINT)
- .execute(getQueryRunner(), trinoCreateAsSelect("test_basic_types"));
- }
-
- @Test
- public void testBigintMinValue()
- {
- // -2^63 is a valid Trino value, but Vertica reserves it for NULL and is an invalid value
- assertUpdate("CREATE TABLE test_bigint_invalid_value (col BIGINT)");
- assertQueryFails("INSERT INTO test_bigint_invalid_value VALUES (CAST(POWER(-2, 63) AS BIGINT))", ".*Value \"-9223372036854775808\" is out of range.*");
- assertUpdate("DROP TABLE test_bigint_invalid_value");
- }
-
- @Test
- public void testReal()
- {
- SqlDataTypeTest.create()
- .addRoundTrip("real", "NULL", DOUBLE, "CAST(NULL AS double)")
- .addRoundTrip("real", "3.14", DOUBLE, "DOUBLE '3.14'")
- .addRoundTrip("real", "3.1415927", DOUBLE, "DOUBLE '3.1415927'")
- .addRoundTrip("real", "'NaN'::real", DOUBLE, "CAST(nan() AS double)")
- .addRoundTrip("real", "'-Infinity'::real", DOUBLE, "CAST(-infinity() AS double)")
- .addRoundTrip("real", "'+Infinity'::real", DOUBLE, "CAST(+infinity() AS double)")
- .execute(getQueryRunner(), verticaCreateAndInsert("tpch.vertica_test_real"));
-
- SqlDataTypeTest.create()
- .addRoundTrip("real", "NULL", DOUBLE, "CAST(NULL AS double)")
- .addRoundTrip("real", "3.14", DOUBLE, "DOUBLE '3.14'")
- .addRoundTrip("real", "3.1415927", DOUBLE, "DOUBLE '3.1415927'")
- .addRoundTrip("real", "nan()", DOUBLE, "CAST(nan() AS double)")
- .addRoundTrip("real", "-infinity()", DOUBLE, "CAST(-infinity() AS double)")
- .addRoundTrip("real", "+infinity()", DOUBLE, "CAST(+infinity() AS double)")
- .execute(getQueryRunner(), trinoCreateAsSelect("trino_test_real"));
- }
-
- @Test
- public void testDouble()
- {
- SqlDataTypeTest.create()
- .addRoundTrip("double precision", "NULL", DOUBLE, "CAST(NULL AS double)")
- .addRoundTrip("double precision", "1.0E100", DOUBLE, "1.0E100")
- .addRoundTrip("double precision", "'NaN'::double precision", DOUBLE, "nan()")
- .addRoundTrip("double precision", "'+Infinity'::double precision", DOUBLE, "+infinity()")
- .addRoundTrip("double precision", "'-Infinity'::double precision", DOUBLE, "-infinity()")
- .addRoundTrip("double precision", "1234567890123.123::double precision", DOUBLE, "DOUBLE '1234567890123.123'")
- .addRoundTrip("double precision", "9999999999999.999::double precision", DOUBLE, "DOUBLE '9999999999999.999'")
- .execute(getQueryRunner(), verticaCreateAndInsert("tpch.vertica_test_double"));
-
- SqlDataTypeTest.create()
- .addRoundTrip("double", "NULL", DOUBLE, "CAST(NULL AS double)")
- .addRoundTrip("double", "1.0E100", DOUBLE, "1.0E100")
- .addRoundTrip("double", "nan()", DOUBLE, "nan()")
- .addRoundTrip("double", "+infinity()", DOUBLE, "+infinity()")
- .addRoundTrip("double", "-infinity()", DOUBLE, "-infinity()")
- .addRoundTrip("double", "1234567890123.123", DOUBLE, "DOUBLE '1234567890123.123'")
- // Vertica is rounding that value
- .addRoundTrip("double", "9999999999999.999", DOUBLE, "DOUBLE '10000000000000.0'")
- .execute(getQueryRunner(), trinoCreateAsSelect("trino_test_double"));
- }
-
- @Test
- public void testDecimal()
- {
- SqlDataTypeTest.create()
- .addRoundTrip("decimal(3, 0)", "NULL", createDecimalType(3, 0), "CAST(NULL AS decimal(3, 0))")
- .addRoundTrip("decimal(3, 0)", "CAST('193' AS decimal(3, 0))", createDecimalType(3, 0), "CAST('193' AS decimal(3, 0))")
- .addRoundTrip("decimal(3, 0)", "CAST('19' AS decimal(3, 0))", createDecimalType(3, 0), "CAST('19' AS decimal(3, 0))")
- .addRoundTrip("decimal(3, 0)", "CAST('-193' AS decimal(3, 0))", createDecimalType(3, 0), "CAST('-193' AS decimal(3, 0))")
- .addRoundTrip("decimal(3, 1)", "CAST('10.0' AS decimal(3, 1))", createDecimalType(3, 1), "CAST('10.0' AS decimal(3, 1))")
- .addRoundTrip("decimal(3, 1)", "CAST('10.1' AS decimal(3, 1))", createDecimalType(3, 1), "CAST('10.1' AS decimal(3, 1))")
- .addRoundTrip("decimal(3, 1)", "CAST('-10.1' AS decimal(3, 1))", createDecimalType(3, 1), "CAST('-10.1' AS decimal(3, 1))")
- .addRoundTrip("decimal(4, 2)", "CAST('2' AS decimal(4, 2))", createDecimalType(4, 2), "CAST('2' AS decimal(4, 2))")
- .addRoundTrip("decimal(4, 2)", "CAST('2.3' AS decimal(4, 2))", createDecimalType(4, 2), "CAST('2.3' AS decimal(4, 2))")
- .addRoundTrip("decimal(24, 2)", "CAST('2' AS decimal(24, 2))", createDecimalType(24, 2), "CAST('2' AS decimal(24, 2))")
- .addRoundTrip("decimal(24, 2)", "CAST('2.3' AS decimal(24, 2))", createDecimalType(24, 2), "CAST('2.3' AS decimal(24, 2))")
- .addRoundTrip("decimal(24, 2)", "CAST('123456789.3' AS decimal(24, 2))", createDecimalType(24, 2), "CAST('123456789.3' AS decimal(24, 2))")
- .addRoundTrip("decimal(24, 4)", "CAST('12345678901234567890.31' AS decimal(24, 4))", createDecimalType(24, 4), "CAST('12345678901234567890.31' AS decimal(24, 4))")
- .addRoundTrip("decimal(30, 5)", "CAST('3141592653589793238462643.38327' AS decimal(30, 5))", createDecimalType(30, 5), "CAST('3141592653589793238462643.38327' AS decimal(30, 5))")
- .addRoundTrip("decimal(30, 5)", "CAST('-3141592653589793238462643.38327' AS decimal(30, 5))", createDecimalType(30, 5), "CAST('-3141592653589793238462643.38327' AS decimal(30, 5))")
- .addRoundTrip("decimal(38, 0)", "CAST('27182818284590452353602874713526624977' AS decimal(38, 0))", createDecimalType(38, 0), "CAST('27182818284590452353602874713526624977' AS decimal(38, 0))")
- .addRoundTrip("decimal(38, 0)", "CAST('-27182818284590452353602874713526624977' AS decimal(38, 0))", createDecimalType(38, 0), "CAST('-27182818284590452353602874713526624977' AS decimal(38, 0))")
- .execute(getQueryRunner(), verticaCreateAndInsert("tpch.test_decimal"))
- .execute(getQueryRunner(), trinoCreateAsSelect("test_decimal"));
- }
-
- @Test
- public void testDecimalExceedingPrecisionMaxIgnored()
- {
- testUnsupportedDataTypeAsIgnored("decimal(50,0)", "12345678901234567890123456789012345678901234567890");
- }
-
- @Test
- public void testDecimalExceedingPrecisionMaxConvertedToVarchar()
- {
- testUnsupportedDataTypeConvertedToVarchar(
- getSession(),
- "decimal(50,0)",
- "12345678901234567890123456789012345678901234567890",
- "'12345678901234567890123456789012345678901234567890'");
- }
-
- @Test
- public void testDecimalExceedingPrecisionMaxWithExceedingIntegerValues()
- {
- try (TestTable testTable = new TestTable(
- onRemoteDatabase(),
- "tpch.test_exceeding_max_decimal",
- "(d_col decimal(65,25))",
- asList("1234567890123456789012345678901234567890.123456789", "-1234567890123456789012345678901234567890.123456789"))) {
- assertQuery(
- sessionWithDecimalMappingAllowOverflow(UNNECESSARY, 0),
- format("SELECT column_name, data_type FROM information_schema.columns WHERE table_schema = 'tpch' AND table_schema||'.'||table_name = '%s'", testTable.getName()),
- "VALUES ('d_col', 'decimal(38,0)')");
- assertQueryFails(
- sessionWithDecimalMappingAllowOverflow(UNNECESSARY, 0),
- "SELECT d_col FROM " + testTable.getName(),
- "Rounding necessary");
- assertQueryFails(
- sessionWithDecimalMappingAllowOverflow(HALF_UP, 0),
- "SELECT d_col FROM " + testTable.getName(),
- "Decimal overflow");
- assertQuery(
- sessionWithDecimalMappingStrict(CONVERT_TO_VARCHAR),
- format("SELECT column_name, data_type FROM information_schema.columns WHERE table_schema = 'tpch' AND table_schema||'.'||table_name = '%s'", testTable.getName()),
- "VALUES ('d_col', 'varchar')");
- assertQuery(
- sessionWithDecimalMappingStrict(CONVERT_TO_VARCHAR),
- "SELECT d_col FROM " + testTable.getName(),
- "VALUES ('1234567890123456789012345678901234567890.1234567890000000000000000'), ('-1234567890123456789012345678901234567890.1234567890000000000000000')");
- }
- }
-
- @Test
- public void testDecimalExceedingPrecisionMaxWithNonExceedingIntegerValues()
- {
- try (TestTable testTable = new TestTable(
- onRemoteDatabase(),
- "tpch.test_exceeding_max_decimal",
- "(d_col decimal(60,20))",
- asList("123456789012345678901234567890.123456789012345", "-123456789012345678901234567890.123456789012345"))) {
- assertQuery(
- sessionWithDecimalMappingAllowOverflow(UNNECESSARY, 0),
- format("SELECT column_name, data_type FROM information_schema.columns WHERE table_schema = 'tpch' AND table_schema||'.'||table_name = '%s'", testTable.getName()),
- "VALUES ('d_col', 'decimal(38,0)')");
- assertQueryFails(
- sessionWithDecimalMappingAllowOverflow(UNNECESSARY, 0),
- "SELECT d_col FROM " + testTable.getName(),
- "Rounding necessary");
- assertQuery(
- sessionWithDecimalMappingAllowOverflow(HALF_UP, 0),
- "SELECT d_col FROM " + testTable.getName(),
- "VALUES (123456789012345678901234567890), (-123456789012345678901234567890)");
- assertQuery(
- sessionWithDecimalMappingAllowOverflow(UNNECESSARY, 8),
- format("SELECT column_name, data_type FROM information_schema.columns WHERE table_schema = 'tpch' AND table_schema||'.'||table_name = '%s'", testTable.getName()),
- "VALUES ('d_col', 'decimal(38,8)')");
- assertQueryFails(
- sessionWithDecimalMappingAllowOverflow(UNNECESSARY, 8),
- "SELECT d_col FROM " + testTable.getName(),
- "Rounding necessary");
- assertQuery(
- sessionWithDecimalMappingAllowOverflow(HALF_UP, 8),
- "SELECT d_col FROM " + testTable.getName(),
- "VALUES (123456789012345678901234567890.12345679), (-123456789012345678901234567890.12345679)");
- assertQuery(
- sessionWithDecimalMappingAllowOverflow(HALF_UP, 22),
- format("SELECT column_name, data_type FROM information_schema.columns WHERE table_schema = 'tpch' AND table_schema||'.'||table_name = '%s'", testTable.getName()),
- "VALUES ('d_col', 'decimal(38,20)')");
- assertQueryFails(
- sessionWithDecimalMappingAllowOverflow(HALF_UP, 20),
- "SELECT d_col FROM " + testTable.getName(),
- "Decimal overflow");
- assertQueryFails(
- sessionWithDecimalMappingAllowOverflow(HALF_UP, 9),
- "SELECT d_col FROM " + testTable.getName(),
- "Decimal overflow");
- assertQuery(
- sessionWithDecimalMappingStrict(CONVERT_TO_VARCHAR),
- format("SELECT column_name, data_type FROM information_schema.columns WHERE table_schema = 'tpch' AND table_schema||'.'||table_name = '%s'", testTable.getName()),
- "VALUES ('d_col', 'varchar')");
- assertQuery(
- sessionWithDecimalMappingStrict(CONVERT_TO_VARCHAR),
- "SELECT d_col FROM " + testTable.getName(),
- "VALUES ('123456789012345678901234567890.12345678901234500000'), ('-123456789012345678901234567890.12345678901234500000')");
- }
- }
-
- @ParameterizedTest
- @MethodSource("testDecimalExceedingPrecisionMaxProvider")
- public void testDecimalExceedingPrecisionMaxWithSupportedValues(int typePrecision, int typeScale)
- {
- try (TestTable testTable = new TestTable(
- onRemoteDatabase(),
- "tpch.test_exceeding_max_decimal",
- format("(d_col decimal(%d,%d))", typePrecision, typeScale),
- asList("12.01", "-12.01", "123", "-123", "1.12345678", "-1.12345678"))) {
- assertQuery(
- sessionWithDecimalMappingAllowOverflow(UNNECESSARY, 0),
- format("SELECT column_name, data_type FROM information_schema.columns WHERE table_schema = 'tpch' AND table_schema||'.'||table_name = '%s'", testTable.getName()),
- "VALUES ('d_col', 'decimal(38,0)')");
- assertQueryFails(
- sessionWithDecimalMappingAllowOverflow(UNNECESSARY, 0),
- "SELECT d_col FROM " + testTable.getName(),
- "Rounding necessary");
- assertQuery(
- sessionWithDecimalMappingAllowOverflow(HALF_UP, 0),
- "SELECT d_col FROM " + testTable.getName(),
- "VALUES (12), (-12), (123), (-123), (1), (-1)");
- assertQuery(
- sessionWithDecimalMappingAllowOverflow(HALF_UP, 3),
- format("SELECT column_name, data_type FROM information_schema.columns WHERE table_schema = 'tpch' AND table_schema||'.'||table_name = '%s'", testTable.getName()),
- "VALUES ('d_col', 'decimal(38,3)')");
- assertQuery(
- sessionWithDecimalMappingAllowOverflow(HALF_UP, 3),
- "SELECT d_col FROM " + testTable.getName(),
- "VALUES (12.01), (-12.01), (123), (-123), (1.123), (-1.123)");
- assertQueryFails(
- sessionWithDecimalMappingAllowOverflow(UNNECESSARY, 3),
- "SELECT d_col FROM " + testTable.getName(),
- "Rounding necessary");
- assertQuery(
- sessionWithDecimalMappingAllowOverflow(HALF_UP, 8),
- format("SELECT column_name, data_type FROM information_schema.columns WHERE table_schema = 'tpch' AND table_schema||'.'||table_name = '%s'", testTable.getName()),
- "VALUES ('d_col', 'decimal(38,8)')");
- assertQuery(
- sessionWithDecimalMappingAllowOverflow(HALF_UP, 8),
- "SELECT d_col FROM " + testTable.getName(),
- "VALUES (12.01), (-12.01), (123), (-123), (1.12345678), (-1.12345678)");
- assertQuery(
- sessionWithDecimalMappingAllowOverflow(HALF_UP, 9),
- "SELECT d_col FROM " + testTable.getName(),
- "VALUES (12.01), (-12.01), (123), (-123), (1.12345678), (-1.12345678)");
- assertQuery(
- sessionWithDecimalMappingAllowOverflow(UNNECESSARY, 8),
- "SELECT d_col FROM " + testTable.getName(),
- "VALUES (12.01), (-12.01), (123), (-123), (1.12345678), (-1.12345678)");
- }
- }
-
- public Object[][] testDecimalExceedingPrecisionMaxProvider()
- {
- return new Object[][] {
- {40, 8},
- {50, 10},
- };
- }
-
- @Test
- public void testChar()
- {
- SqlDataTypeTest.create()
- .addRoundTrip("char", "'a'", createCharType(1), "CAST('a' AS char(1))")
- .addRoundTrip("char(10)", "'text_a'", createCharType(10), "CAST('text_a' AS char(10))")
- .addRoundTrip("char(255)", "'text_b'", createCharType(255), "CAST('text_b' AS char(255))")
- .addRoundTrip("char(15)", "'攻殻機動隊'", createCharType(15), "CAST('攻殻機動隊' AS char(15))")
- .addRoundTrip("char(32)", "'攻殻機動隊'", createCharType(32), "CAST('攻殻機動隊' AS char(32))")
- .addRoundTrip("char(4)", "'😂'", createCharType(4), "CAST('😂' AS char(4))")
- .addRoundTrip("char(77)", "'Ну, погоди!'", createCharType(77), "CAST('Ну, погоди!' AS char(77))")
- .execute(getQueryRunner(), verticaCreateAndInsert("tpch.test_char"))
- .execute(getQueryRunner(), trinoCreateAsSelect("test_char"));
- }
-
- @Test
- public void testVarchar()
- {
- SqlDataTypeTest.create()
- .addRoundTrip("varchar(10)", "'text_a'", createVarcharType(10), "CAST('text_a' AS varchar(10))")
- .addRoundTrip("varchar(255)", "'text_b'", createVarcharType(255), "CAST('text_b' AS varchar(255))")
- .addRoundTrip("varchar(15)", "'攻殻機動隊'", createVarcharType(15), "CAST('攻殻機動隊' AS varchar(15))")
- .addRoundTrip("varchar(32)", "'攻殻機動隊'", createVarcharType(32), "CAST('攻殻機動隊' AS varchar(32))")
- .addRoundTrip("varchar(4)", "'😂'", createVarcharType(4), "CAST('😂' AS varchar(4))")
- .addRoundTrip("varchar(77)", "'Ну, погоди!'", createVarcharType(77), "CAST('Ну, погоди!' AS varchar(77))")
- .execute(getQueryRunner(), verticaCreateAndInsert("tpch.test_varchar"))
- .execute(getQueryRunner(), trinoCreateAsSelect("test_varchar"));
-
- SqlDataTypeTest.create()
- .addRoundTrip("varchar", "'text_default'", createVarcharType(80), "CAST('text_default' AS varchar(80))")
- .execute(getQueryRunner(), verticaCreateAndInsert("tpch.test_default_varchar"));
-
- SqlDataTypeTest.create()
- .addRoundTrip("varchar", "'text_default'", createVarcharType(1048576), "CAST('text_default' AS varchar(1048576))")
- .execute(getQueryRunner(), trinoCreateAsSelect("test_default_varchar"));
-
- // Test Vertica LONG VARCHAR mapped to Trino VARCHAR
- SqlDataTypeTest.create()
- .addRoundTrip("long varchar(10)", "'text_a'::LONG VARCHAR", createVarcharType(10), "CAST('text_a' AS varchar(10))")
- .addRoundTrip("long varchar(255)", "'text_b'::LONG VARCHAR", createVarcharType(255), "CAST('text_b' AS varchar(255))")
- .addRoundTrip("long varchar(15)", "'攻殻機動隊'::LONG VARCHAR", createVarcharType(15), "CAST('攻殻機動隊' AS varchar(15))")
- .addRoundTrip("long varchar(32)", "'攻殻機動隊'::LONG VARCHAR", createVarcharType(32), "CAST('攻殻機動隊' AS varchar(32))")
- .addRoundTrip("long varchar(4)", "'😂'::LONG VARCHAR", createVarcharType(4), "CAST('😂' AS varchar(4))")
- .addRoundTrip("long varchar(77)", "'Ну, погоди!'::LONG VARCHAR", createVarcharType(77), "CAST('Ну, погоди!' AS varchar(77))")
- .execute(getQueryRunner(), verticaCreateAndInsert("tpch.test_long_varchar"));
-
- SqlDataTypeTest.create()
- .addRoundTrip("long varchar", "'text_default'::LONG VARCHAR", createVarcharType(1048576), "CAST('text_default' AS varchar(1048576))")
- .execute(getQueryRunner(), verticaCreateAndInsert("tpch.test_default_long_varchar"));
- }
-
- @Test
- public void testVarbinary()
- {
- SqlDataTypeTest.create()
- .addRoundTrip("varbinary", "NULL", VARBINARY, "CAST(NULL AS VARBINARY)")
- .addRoundTrip("varbinary", "X''", VARBINARY, "X''")
- .addRoundTrip("varbinary", "X'000000000000'", VARBINARY, "X'000000000000'")
- .addRoundTrip("varbinary", "X'68656C6C6F'", VARBINARY, "to_utf8('hello')")
- .addRoundTrip("varbinary", "X'5069C4996B6E6120C582C4856B61207720E69DB1E4BAACE983BD'", VARBINARY, "to_utf8('Piękna łąka w 東京都')")
- .addRoundTrip("varbinary", "X'4261672066756C6C206F6620F09F92B0'", VARBINARY, "to_utf8('Bag full of 💰')")
- .addRoundTrip("varbinary", "X'0001020304050607080DF9367AA7000000'", VARBINARY, "X'0001020304050607080DF9367AA7000000'")
- .execute(getQueryRunner(), verticaCreateAndInsert("tpch.test_varbinary"))
- .execute(getQueryRunner(), trinoCreateAsSelect("test_varbinary"));
-
- // Test Vertica LONG VARBINARY mapped to Trino VARBINARY
- SqlDataTypeTest.create()
- .addRoundTrip("long varbinary", "NULL", VARBINARY, "CAST(NULL AS VARBINARY)")
- .addRoundTrip("long varbinary", "X''", VARBINARY, "X''")
- .addRoundTrip("long varbinary", "X'000000000000'", VARBINARY, "X'000000000000'")
- .addRoundTrip("long varbinary", "X'68656C6C6F'", VARBINARY, "to_utf8('hello')")
- .addRoundTrip("long varbinary", "X'5069C4996B6E6120C582C4856B61207720E69DB1E4BAACE983BD'", VARBINARY, "to_utf8('Piękna łąka w 東京都')")
- .addRoundTrip("long varbinary", "X'4261672066756C6C206F6620F09F92B0'", VARBINARY, "to_utf8('Bag full of 💰')")
- .addRoundTrip("long varbinary", "X'0001020304050607080DF9367AA7000000'", VARBINARY, "X'0001020304050607080DF9367AA7000000'")
- .execute(getQueryRunner(), verticaCreateAndInsert("tpch.test_varbinary"));
- }
-
- @Test
- public void testMaxLengthVarbinary()
- throws SQLException
- {
- byte[] maxLengthVarbinary = new byte[1_000_000];
- byte[] varbinaryOverflow = new byte[1_000_001];
-
- Random random = new Random();
- random.nextBytes(maxLengthVarbinary);
- random.nextBytes(varbinaryOverflow);
-
- try (Connection connection = DriverManager.getConnection(verticaServer.getJdbcUrl(), verticaServer.getUsername(), verticaServer.getPassword());
- Statement statement = connection.createStatement()) {
- statement.executeUpdate("CREATE TABLE tpch.test_max_varbinary (col LONG VARBINARY(1000000))");
-
- // Insert maximum allowable value
- try (PreparedStatement preparedStatement = connection.prepareStatement("INSERT INTO tpch.test_max_varbinary VALUES (?)")) {
- preparedStatement.setBytes(1, maxLengthVarbinary);
- preparedStatement.executeUpdate();
- }
- }
-
- // Assert read correctness
- query("SELECT * FROM test_max_varbinary")
- .assertThat()
- .result()
- .matches(MaterializedResult.resultBuilder(getSession(), VARBINARY)
- .row(new Object[] {maxLengthVarbinary})
- .build());
-
- // Test overflow insert from Trino to Vertica
- String url = format("jdbc:trino://%s/vertica/tpch", getDistributedQueryRunner().getCoordinator().getAddress());
- try (Connection connection = DriverManager.getConnection(url, "user", null);
- PreparedStatement statement = connection.prepareStatement("INSERT INTO test_max_varbinary VALUES (?)")) {
- try {
- statement.setBytes(1, varbinaryOverflow);
- statement.executeUpdate();
- }
- catch (SQLException ex) {
- // Assert we get a helpful error message when trying to insert data that is too large for LONG VARBINARY
- assertThat(ex.getMessage()).contains("ERROR: String of 1000001 octets is too long for type Long Varbinary(1000000) for column col");
- }
- }
-
- assertUpdate("DROP TABLE test_max_varbinary");
- }
-
- @Test
- public void testDate()
- {
- SqlDataTypeTest dateTests = SqlDataTypeTest.create()
- .addRoundTrip("DATE", "NULL", DATE, "CAST(NULL AS DATE)")
- .addRoundTrip("DATE", "DATE '0001-01-01'", DATE, "DATE '0001-01-01'")
- // julian->gregorian switch
- .addRoundTrip("DATE", "DATE '1582-10-04'", DATE, "DATE '1582-10-04'")
- // .addRoundTrip("DATE", "DATE '1582-10-05'", DATE, "DATE '1582-10-05'") // Vertica plus 10 days
- // .addRoundTrip("DATE", "DATE '1582-10-14'", DATE, "DATE '1582-10-14'") // Vertica plus 10 days
- .addRoundTrip("DATE", "DATE '1582-10-15'", DATE, "DATE '1582-10-15'")
- // before epoch
- .addRoundTrip("DATE", "DATE '1952-04-03'", DATE, "DATE '1952-04-03'")
- .addRoundTrip("DATE", "DATE '1970-01-01'", DATE, "DATE '1970-01-01'")
- .addRoundTrip("DATE", "DATE '1970-02-03'", DATE, "DATE '1970-02-03'")
- // summer on northern hemisphere (possible DST)
- .addRoundTrip("DATE", "DATE '2017-07-01'", DATE, "DATE '2017-07-01'")
- // winter on northern hemisphere
- // (possible DST on southern hemisphere)
- .addRoundTrip("DATE", "DATE '2017-01-01'", DATE, "DATE '2017-01-01'")
- //.addRoundTrip("DATE", "DATE '1983-04-01'", DATE, "DATE '1983-04-01'")
- .addRoundTrip("DATE", "DATE '1983-10-01'", DATE, "DATE '1983-10-01'")
- // some large dates
- .addRoundTrip("DATE", "DATE '9999-12-31'", DATE, "DATE '9999-12-31'")
- .addRoundTrip("DATE", "DATE '5881580-07-11'", DATE, "DATE '5881580-07-11'"); // The epoch is integer max
-
- for (String timeZoneId : ImmutableList.of(UTC_KEY.getId(), ZoneId.systemDefault().getId(), ZoneId.of("Europe/Vilnius").getId())) {
- Session session = Session.builder(getSession())
- .setTimeZoneKey(getTimeZoneKey(timeZoneId))
- .build();
- dateTests.execute(getQueryRunner(), session, verticaCreateAndInsert("tpch.test_date"));
- dateTests.execute(getQueryRunner(), session, trinoCreateAsSelect("test_date"));
- dateTests.execute(getQueryRunner(), session, trinoCreateAndInsert(getSession(), "test_date"));
- }
- }
-
- @Test
- public void testLowestDate()
- {
- // Testing '-5877641-06-23' whose epoch is equals to integer min since the low value (25e+15 BC) in Vertica is out of range in Trino
- SqlDataTypeTest.create()
- // Vertica doesn't accept '-yyyy-MM-dd' format for writing BC dates
- // Vertica isn't based on astronomical year numbering, so '5877641-06-23 BC' is equal to '-5877641-06-23'
- .addRoundTrip("DATE", "'5877641-06-23 BC'", DATE, "DATE '-5877641-06-23'")
- .execute(getQueryRunner(), verticaCreateAndInsert("tpch.test_lowest_date"));
-
- SqlDataTypeTest.create()
- .addRoundTrip("DATE", "'-5877641-06-23'", DATE, "DATE '-5877641-06-23'")
- .execute(getQueryRunner(), trinoCreateAsSelect("tpch.test_lowest_date"));
- }
-
- private void testUnsupportedDataTypeAsIgnored(String dataTypeName, String databaseValue)
- {
- testUnsupportedDataTypeAsIgnored(getSession(), dataTypeName, databaseValue);
- }
-
- private void testUnsupportedDataTypeAsIgnored(Session session, String dataTypeName, String databaseValue)
- {
- try (TestTable table = new TestTable(
- verticaServer.getSqlExecutor(),
- "tpch.unsupported_type",
- format("(key varchar(5), unsupported_column %s)", dataTypeName),
- ImmutableList.of(
- "'1', NULL",
- "'2', " + databaseValue))) {
- assertQuery(session, "SELECT * FROM " + table.getName(), "VALUES 1, 2");
- assertQuery(
- session,
- "DESC " + table.getName(),
- "VALUES ('key', 'varchar(5)','', '')"); // no 'unsupported_column'
-
- assertUpdate(session, format("INSERT INTO %s VALUES '3'", table.getName()), 1);
- assertQuery(session, "SELECT * FROM " + table.getName(), "VALUES '1', '2', '3'");
- }
- }
-
- private void testUnsupportedDataTypeConvertedToVarchar(Session session, String dataTypeName, String databaseValue, String trinoValue)
- {
- try (TestTable table = new TestTable(
- onRemoteDatabase(),
- "tpch.unsupported_type",
- format("(key varchar(5), unsupported_column %s)", dataTypeName),
- ImmutableList.of(
- "1, NULL",
- "2, " + databaseValue))) {
- Session convertToVarchar = Session.builder(session)
- .setCatalogSessionProperty("vertica", UNSUPPORTED_TYPE_HANDLING, CONVERT_TO_VARCHAR.name())
- .build();
- assertQuery(
- convertToVarchar,
- "SELECT * FROM " + table.getName(),
- format("VALUES ('1', NULL), ('2', %s)", trinoValue));
- assertQuery(
- convertToVarchar,
- format("SELECT key FROM %s WHERE unsupported_column = %s", table.getName(), trinoValue),
- "VALUES '2'");
- assertQuery(
- convertToVarchar,
- "DESC " + table.getName(),
- "VALUES " +
- "('key', 'varchar(5)', '', ''), " +
- "('unsupported_column', 'varchar', '', '')");
- assertQueryFails(
- convertToVarchar,
- format("INSERT INTO %s (key, unsupported_column) VALUES (3, NULL)", table.getName()),
- ".*Insert query has mismatched column types: Table: \\[varchar\\(5\\), varchar\\], Query: \\[integer, unknown\\]");
- assertQueryFails(
- convertToVarchar,
- format("INSERT INTO %s (key, unsupported_column) VALUES (4, %s)", table.getName(), trinoValue),
- ".*Insert query has mismatched column types: Table: \\[varchar\\(5\\), varchar\\], Query: \\[integer, varchar\\(\\d+\\)\\]");
- assertUpdate(
- convertToVarchar,
- format("INSERT INTO %s (key) VALUES '5'", table.getName()),
- 1);
- assertQuery(
- convertToVarchar,
- "SELECT * FROM " + table.getName(),
- format("VALUES ('1', NULL), ('2', %s), ('5', NULL)", trinoValue));
- }
- }
-
- private Session sessionWithDecimalMappingAllowOverflow(RoundingMode roundingMode, int scale)
- {
- return Session.builder(getSession())
- .setCatalogSessionProperty("vertica", DECIMAL_MAPPING, ALLOW_OVERFLOW.name())
- .setCatalogSessionProperty("vertica", DECIMAL_ROUNDING_MODE, roundingMode.name())
- .setCatalogSessionProperty("vertica", DECIMAL_DEFAULT_SCALE, Integer.valueOf(scale).toString())
- .build();
- }
-
- private Session sessionWithDecimalMappingStrict(UnsupportedTypeHandling unsupportedTypeHandling)
- {
- return Session.builder(getSession())
- .setCatalogSessionProperty("vertica", DECIMAL_MAPPING, STRICT.name())
- .setCatalogSessionProperty("vertica", UNSUPPORTED_TYPE_HANDLING, unsupportedTypeHandling.name())
- .build();
- }
-
- private DataSetup trinoCreateAsSelect(String tableNamePrefix)
- {
- return trinoCreateAsSelect(getSession(), tableNamePrefix);
- }
-
- private DataSetup trinoCreateAsSelect(Session session, String tableNamePrefix)
- {
- return new CreateAsSelectDataSetup(new TrinoSqlExecutor(getQueryRunner(), session), tableNamePrefix);
- }
-
- private DataSetup trinoCreateAndInsert(Session session, String tableNamePrefix)
- {
- return new CreateAndInsertDataSetup(new TrinoSqlExecutor(getQueryRunner(), session), tableNamePrefix);
- }
-
- private DataSetup verticaCreateAndInsert(String tableNamePrefix)
- {
- return new CreateAndInsertDataSetup(onRemoteDatabase(), tableNamePrefix);
- }
-
- private SqlExecutor onRemoteDatabase()
- {
- return verticaServer.getSqlExecutor();
- }
-}
diff --git a/plugin/trino-vertica/src/test/java/io/trino/plugin/vertica/TestingVerticaServer.java b/plugin/trino-vertica/src/test/java/io/trino/plugin/vertica/TestingVerticaServer.java
deleted file mode 100644
index 9ad171d6678c..000000000000
--- a/plugin/trino-vertica/src/test/java/io/trino/plugin/vertica/TestingVerticaServer.java
+++ /dev/null
@@ -1,178 +0,0 @@
-/*
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package io.trino.plugin.vertica;
-
-import com.google.common.collect.ImmutableSet;
-import io.trino.plugin.base.util.AutoCloseableCloser;
-import io.trino.testing.ResourcePresence;
-import io.trino.testing.sql.SqlExecutor;
-import org.junit.jupiter.api.Assumptions;
-import org.testcontainers.containers.JdbcDatabaseContainer;
-import org.testcontainers.utility.DockerImageName;
-import org.testcontainers.utility.MountableFile;
-
-import java.sql.Connection;
-import java.sql.DriverManager;
-import java.sql.SQLException;
-import java.sql.Statement;
-import java.time.Duration;
-import java.util.Set;
-
-import static io.trino.testing.containers.TestContainers.startOrReuse;
-import static java.lang.String.format;
-import static java.util.Objects.requireNonNull;
-
-/**
- * @see Vertica product lifecycle
- */
-public class TestingVerticaServer
- extends JdbcDatabaseContainer
-{
- public static final String LATEST_VERSION = "23.4.0-0";
- public static final String DEFAULT_VERSION = "11.0.0-0";
-
- public static final Integer PORT = 5433;
-
- public static final String DATABASE = "tpch";
- private static final String USER = "test_user";
- private static final String PASSWORD = "test_password";
-
- private final AutoCloseableCloser closer = AutoCloseableCloser.create();
- private final String database;
- private final String user;
- private final String password;
-
- public TestingVerticaServer()
- {
- this(DEFAULT_VERSION, DATABASE, USER, PASSWORD);
- }
-
- public TestingVerticaServer(String version)
- {
- this(version, DATABASE, USER, PASSWORD);
- }
-
- public TestingVerticaServer(String version, String database, String user, String password)
- {
- super(DockerImageName.parse("vertica/vertica-ce").withTag(version));
- Assumptions.abort("Disabled until Vertica image is fixed");
- this.database = requireNonNull(database, "database is null");
- this.user = requireNonNull(user, "user is null");
- this.password = requireNonNull(password, "password is null");
- withStartupAttempts(3);
- withStartupTimeoutSeconds((int) Duration.ofMinutes(10).toSeconds());
- closer.register(startOrReuse(this));
- execute(format("GRANT ALL ON DATABASE %s TO %s", database, user), "dbadmin", null);
- }
-
- @Override
- public Set getLivenessCheckPortNumbers()
- {
- return ImmutableSet.of(getMappedPort(PORT));
- }
-
- @Override
- protected void configure()
- {
- addExposedPort(PORT);
- addEnv("VERTICA_DB_NAME", database);
- addEnv("APP_DB_USER", user);
- addEnv("APP_DB_PASSWORD", password);
- withCopyFileToContainer(MountableFile.forClasspathResource("vmart_define_schema.sql"), "/opt/vertica/examples/VMart_Schema");
- withCopyFileToContainer(MountableFile.forClasspathResource("vmart_load_data.sql"), "/opt/vertica/examples/VMart_Schema");
- setStartupAttempts(3);
- }
-
- @Override
- public String getDriverClassName()
- {
- return "io.trino.plugin.vertica.VerticaDriver";
- }
-
- @Override
- public String getUsername()
- {
- return user;
- }
-
- @Override
- public String getPassword()
- {
- return password;
- }
-
- @Override
- public String getJdbcUrl()
- {
- return format("jdbc:vertica://%s:%s/%s", getHost(), getMappedPort(PORT), database);
- }
-
- @Override
- public String getTestQueryString()
- {
- return "SELECT 1";
- }
-
- @Override
- public void close()
- {
- try {
- closer.close();
- }
- catch (Exception e) {
- throw new RuntimeException(e);
- }
- }
-
- public void execute(String sql)
- {
- execute(sql, getUsername(), getPassword());
- }
-
- public void execute(String sql, String user, String password)
- {
- try (Connection connection = DriverManager.getConnection(getJdbcUrl(), user, password);
- Statement statement = connection.createStatement()) {
- statement.execute(sql);
- }
- catch (SQLException e) {
- throw new RuntimeException(e);
- }
- }
-
- public SqlExecutor getSqlExecutor()
- {
- return new SqlExecutor()
- {
- @Override
- public void execute(String sql)
- {
- TestingVerticaServer.this.execute(sql);
- }
-
- @Override
- public boolean supportsMultiRowInsert()
- {
- return false;
- }
- };
- }
-
- @ResourcePresence
- @Override
- public boolean isRunning()
- {
- return getContainerId() != null;
- }
-}
diff --git a/plugin/trino-vertica/src/test/java/io/trino/plugin/vertica/VerticaQueryRunner.java b/plugin/trino-vertica/src/test/java/io/trino/plugin/vertica/VerticaQueryRunner.java
deleted file mode 100644
index 8adb125b773b..000000000000
--- a/plugin/trino-vertica/src/test/java/io/trino/plugin/vertica/VerticaQueryRunner.java
+++ /dev/null
@@ -1,156 +0,0 @@
-/*
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package io.trino.plugin.vertica;
-
-import com.google.common.collect.ImmutableList;
-import com.google.errorprone.annotations.CanIgnoreReturnValue;
-import io.airlift.log.Logger;
-import io.airlift.log.Logging;
-import io.trino.Session;
-import io.trino.plugin.jmx.JmxPlugin;
-import io.trino.plugin.tpch.TpchPlugin;
-import io.trino.spi.security.Identity;
-import io.trino.testing.DistributedQueryRunner;
-import io.trino.tpch.TpchTable;
-
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-
-import static io.airlift.testing.Closeables.closeAllSuppress;
-import static io.trino.plugin.tpch.TpchMetadata.TINY_SCHEMA_NAME;
-import static io.trino.testing.QueryAssertions.copyTpchTables;
-import static io.trino.testing.TestingSession.testSessionBuilder;
-import static java.util.Objects.requireNonNull;
-
-public final class VerticaQueryRunner
-{
- private VerticaQueryRunner() {}
-
- public static final String GRANTED_USER = "alice";
- public static final String NON_GRANTED_USER = "bob";
- public static final String TPCH_SCHEMA = "tpch";
-
- public static Builder builder(TestingVerticaServer server)
- {
- return new Builder(server)
- .addConnectorProperty("connection-url", requireNonNull(server.getJdbcUrl(), "jdbcUrl is null"))
- .addConnectorProperty("connection-user", requireNonNull(server.getUsername(), "user is null"))
- .addConnectorProperty("connection-password", requireNonNull(server.getPassword(), "password is null"));
- }
-
- public static final class Builder
- extends DistributedQueryRunner.Builder
- {
- private final TestingVerticaServer server;
- private List> tables = ImmutableList.of();
- private final Map connectorProperties = new HashMap<>();
-
- private Builder(TestingVerticaServer server)
- {
- super(testSessionBuilder()
- .setCatalog("vertica")
- .setSchema(TPCH_SCHEMA)
- .build());
- this.server = requireNonNull(server, "server is null");
- }
-
- @CanIgnoreReturnValue
- public Builder addConnectorProperty(String key, String value)
- {
- connectorProperties.put(key, value);
- return this;
- }
-
- @CanIgnoreReturnValue
- public Builder setTables(Iterable> tables)
- {
- this.tables = ImmutableList.copyOf(requireNonNull(tables, "tables is null"));
- return this;
- }
-
- @Override
- public DistributedQueryRunner build()
- throws Exception
- {
- DistributedQueryRunner queryRunner = super.build();
- try {
- queryRunner.installPlugin(new JmxPlugin());
- queryRunner.createCatalog("jmx", "jmx");
-
- queryRunner.installPlugin(new TpchPlugin());
- queryRunner.createCatalog(TPCH_SCHEMA, TPCH_SCHEMA);
-
- // Create two users, one of which will have access to the TPCH database/schema
- executeAsAdmin(server, "CREATE SCHEMA IF NOT EXISTS tpch");
- executeAsAdmin(server, "CREATE ROLE " + GRANTED_USER);
- executeAsAdmin(server, "CREATE ROLE " + NON_GRANTED_USER);
- executeAsAdmin(server, "GRANT ALL PRIVILEGES ON DATABASE tpch TO " + GRANTED_USER);
- executeAsAdmin(server, "GRANT ALL PRIVILEGES ON SCHEMA tpch TO " + GRANTED_USER);
-
- // Allow the user to set the roles
- executeAsAdmin(server, "GRANT " + GRANTED_USER + " TO " + server.getUsername());
- executeAsAdmin(server, "GRANT " + NON_GRANTED_USER + " TO " + server.getUsername());
-
- queryRunner.installPlugin(new VerticaPlugin());
- queryRunner.createCatalog("vertica", "vertica", connectorProperties);
-
- copyTpchTables(queryRunner, TPCH_SCHEMA, TINY_SCHEMA_NAME, createSession(GRANTED_USER, "vertica"), tables);
-
- // Revoke all access to the database for the server's user if impersonation is enabled
- // This will allow the impersonation to work as intended for testing as Vertica roles add to the user's existing permissions
- // Running queries with the NON_GRANTED_USER user/role will succeed because the user in the JDBC connection has access to the tables
- if (Boolean.parseBoolean(connectorProperties.getOrDefault("vertica.impersonation.enabled", "false"))) {
- executeAsAdmin(server, "REVOKE ALL ON SCHEMA tpch FROM " + server.getUsername());
- executeAsAdmin(server, "REVOKE ALL ON DATABASE tpch FROM " + server.getUsername());
- }
-
- return queryRunner;
- }
- catch (Throwable e) {
- closeAllSuppress(e, queryRunner);
- throw e;
- }
- }
- }
-
- public static Session createSession(String user, String catalogName)
- {
- return testSessionBuilder()
- .setCatalog(catalogName)
- .setSchema(TPCH_SCHEMA)
- .setIdentity(Identity.ofUser(user))
- .build();
- }
-
- private static void executeAsAdmin(TestingVerticaServer server, String sql)
- {
- server.execute(sql, "dbadmin", null);
- }
-
- public static void main(String[] args)
- throws Exception
- {
- Logging.initialize();
-
- DistributedQueryRunner queryRunner = builder(new TestingVerticaServer())
- .addCoordinatorProperty("http-server.http.port", "8080")
- .setTables(TpchTable.getTables())
- .build();
-
- Logger log = Logger.get(VerticaQueryRunner.class);
- log.info("======== SERVER STARTED ========");
- log.info("\n====\n%s\n====", queryRunner.getCoordinator().getBaseUrl());
- }
-}
diff --git a/plugin/trino-vertica/src/test/resources/vmart_define_schema.sql b/plugin/trino-vertica/src/test/resources/vmart_define_schema.sql
deleted file mode 100644
index 7c029d27445c..000000000000
--- a/plugin/trino-vertica/src/test/resources/vmart_define_schema.sql
+++ /dev/null
@@ -1 +0,0 @@
--- Don't prepare VMart example database to speed up startup time
diff --git a/plugin/trino-vertica/src/test/resources/vmart_load_data.sql b/plugin/trino-vertica/src/test/resources/vmart_load_data.sql
deleted file mode 100644
index 7c029d27445c..000000000000
--- a/plugin/trino-vertica/src/test/resources/vmart_load_data.sql
+++ /dev/null
@@ -1 +0,0 @@
--- Don't prepare VMart example database to speed up startup time
diff --git a/pom.xml b/pom.xml
index 9fb7d20ea1d2..34ba9ab0cf15 100644
--- a/pom.xml
+++ b/pom.xml
@@ -119,7 +119,6 @@
plugin/trino-thrift-testing-server
plugin/trino-tpcds
plugin/trino-tpch
- plugin/trino-vertica
service/trino-proxy
service/trino-verifier
testing/trino-benchmark-queries
@@ -1558,12 +1557,6 @@
${project.version}
-
- io.trino
- trino-vertica
- ${project.version}
-
-
io.trino
trino-web-ui
diff --git a/testing/trino-product-tests-launcher/src/main/java/io/trino/tests/product/launcher/env/environment/EnvMultinodeAllConnectors.java b/testing/trino-product-tests-launcher/src/main/java/io/trino/tests/product/launcher/env/environment/EnvMultinodeAllConnectors.java
index ee3de89bd9f0..c15fbdf8bb52 100644
--- a/testing/trino-product-tests-launcher/src/main/java/io/trino/tests/product/launcher/env/environment/EnvMultinodeAllConnectors.java
+++ b/testing/trino-product-tests-launcher/src/main/java/io/trino/tests/product/launcher/env/environment/EnvMultinodeAllConnectors.java
@@ -76,8 +76,7 @@ public void extendEnvironment(Environment.Builder builder)
"snowflake",
"sqlserver",
"tpcds",
- "trino_thrift",
- "vertica")
+ "trino_thrift")
.forEach(connector -> builder.addConnector(
connector,
forHostPath(configDir.getPath(connector + ".properties"))));
diff --git a/testing/trino-product-tests-launcher/src/main/resources/docker/trino-product-tests/conf/environment/multinode-all/vertica.properties b/testing/trino-product-tests-launcher/src/main/resources/docker/trino-product-tests/conf/environment/multinode-all/vertica.properties
deleted file mode 100644
index bf1195451ddb..000000000000
--- a/testing/trino-product-tests-launcher/src/main/resources/docker/trino-product-tests/conf/environment/multinode-all/vertica.properties
+++ /dev/null
@@ -1,4 +0,0 @@
-connector.name=vertica
-connection-url=jdbc:vertica://host1.invalid:5433
-connection-user=root
-connection-password=secret